From 535480ebe6ef7ac3c081a1d6b0c9519404d70e58 Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Fri, 11 Aug 2023 16:25:08 -0700 Subject: [PATCH 01/68] deps: npm-registry-fetch@15.0.0 --- node_modules/.gitignore | 11 + node_modules/@npmcli/agent/lib/dns.js | 51 + node_modules/@npmcli/agent/lib/errors.js | 71 ++ node_modules/@npmcli/agent/lib/http.js | 33 + node_modules/@npmcli/agent/lib/https.js | 33 + node_modules/@npmcli/agent/lib/index.js | 135 +++ node_modules/@npmcli/agent/lib/proxy/http.js | 146 +++ node_modules/@npmcli/agent/lib/proxy/index.js | 25 + node_modules/@npmcli/agent/lib/proxy/null.js | 97 ++ node_modules/@npmcli/agent/lib/proxy/socks.js | 153 +++ node_modules/@npmcli/agent/lib/util.js | 33 + node_modules/@npmcli/agent/package.json | 56 + .../npm-registry-fetch/LICENSE.md | 20 + .../npm-registry-fetch/lib/auth.js | 145 +++ .../npm-registry-fetch/lib/check-response.js | 100 ++ .../npm-registry-fetch/lib/clean-url.js | 27 + .../npm-registry-fetch/lib/default-opts.js | 19 + .../npm-registry-fetch/lib/errors.js | 80 ++ .../npm-registry-fetch/lib/index.js | 247 ++++ .../npm-registry-fetch/package.json | 67 ++ .../node_modules/make-fetch-happen/LICENSE | 16 + .../make-fetch-happen/lib/cache/entry.js | 469 ++++++++ .../make-fetch-happen/lib/cache/errors.js | 11 + .../make-fetch-happen/lib/cache/index.js | 49 + .../make-fetch-happen/lib/cache/key.js | 17 + .../make-fetch-happen/lib/cache/policy.js | 161 +++ .../make-fetch-happen/lib/fetch.js | 118 ++ .../make-fetch-happen/lib/index.js | 41 + .../make-fetch-happen/lib/options.js | 54 + .../make-fetch-happen/lib/pipeline.js | 41 + .../make-fetch-happen/lib/remote.js | 127 ++ .../make-fetch-happen/package.json | 80 ++ .../node_modules/minipass/LICENSE | 15 + .../node_modules/minipass/dist/cjs/index.js | 1028 +++++++++++++++++ .../minipass/dist/cjs/package.json | 3 + .../node_modules/minipass/dist/mjs/index.js | 1018 ++++++++++++++++ .../minipass/dist/mjs/package.json | 3 + .../node_modules/minipass/package.json | 82 ++ node_modules/npm-registry-fetch/package.json | 18 +- .../npm-registry-fetch/LICENSE.md | 20 + .../npm-registry-fetch/lib/auth.js | 145 +++ .../npm-registry-fetch/lib/check-response.js | 100 ++ .../npm-registry-fetch/lib/clean-url.js | 27 + .../npm-registry-fetch/lib/default-opts.js | 19 + .../npm-registry-fetch/lib/errors.js | 80 ++ .../npm-registry-fetch/lib/index.js | 247 ++++ .../npm-registry-fetch/package.json | 67 ++ package-lock.json | 98 +- package.json | 2 +- workspaces/arborist/package.json | 2 +- workspaces/libnpmaccess/package.json | 2 +- workspaces/libnpmhook/package.json | 2 +- workspaces/libnpmorg/package.json | 2 +- workspaces/libnpmpublish/package.json | 2 +- workspaces/libnpmsearch/package.json | 2 +- workspaces/libnpmteam/package.json | 2 +- 56 files changed, 5696 insertions(+), 23 deletions(-) create mode 100644 node_modules/@npmcli/agent/lib/dns.js create mode 100644 node_modules/@npmcli/agent/lib/errors.js create mode 100644 node_modules/@npmcli/agent/lib/http.js create mode 100644 node_modules/@npmcli/agent/lib/https.js create mode 100644 node_modules/@npmcli/agent/lib/index.js create mode 100644 node_modules/@npmcli/agent/lib/proxy/http.js create mode 100644 node_modules/@npmcli/agent/lib/proxy/index.js create mode 100644 node_modules/@npmcli/agent/lib/proxy/null.js create mode 100644 node_modules/@npmcli/agent/lib/proxy/socks.js create mode 100644 node_modules/@npmcli/agent/lib/util.js create mode 100644 node_modules/@npmcli/agent/package.json create mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md create mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js create mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js create mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/clean-url.js create mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js create mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js create mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js create mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/package.json create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/entry.js create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/errors.js create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/index.js create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/key.js create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/policy.js create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/fetch.js create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/index.js create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/options.js create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/pipeline.js create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/remote.js create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json create mode 100644 node_modules/npm-registry-fetch/node_modules/minipass/LICENSE create mode 100644 node_modules/npm-registry-fetch/node_modules/minipass/dist/cjs/index.js create mode 100644 node_modules/npm-registry-fetch/node_modules/minipass/dist/cjs/package.json create mode 100644 node_modules/npm-registry-fetch/node_modules/minipass/dist/mjs/index.js create mode 100644 node_modules/npm-registry-fetch/node_modules/minipass/dist/mjs/package.json create mode 100644 node_modules/npm-registry-fetch/node_modules/minipass/package.json create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/clean-url.js create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/package.json diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 2ae24fcf8a16f..851e54092990b 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -18,6 +18,7 @@ !/@isaacs/string-locale-compare !/@npmcli/ /@npmcli/* +!/@npmcli/agent !/@npmcli/disparity-colors !/@npmcli/fs !/@npmcli/git @@ -193,12 +194,22 @@ !/npm-packlist !/npm-pick-manifest !/npm-profile +!/npm-profile/node_modules/ +/npm-profile/node_modules/* +!/npm-profile/node_modules/npm-registry-fetch !/npm-registry-fetch +!/npm-registry-fetch/node_modules/ +/npm-registry-fetch/node_modules/* +!/npm-registry-fetch/node_modules/make-fetch-happen +!/npm-registry-fetch/node_modules/minipass !/npm-user-validate !/npmlog !/once !/p-map !/pacote +!/pacote/node_modules/ +/pacote/node_modules/* +!/pacote/node_modules/npm-registry-fetch !/parse-conflict-json !/path-is-absolute !/path-key diff --git a/node_modules/@npmcli/agent/lib/dns.js b/node_modules/@npmcli/agent/lib/dns.js new file mode 100644 index 0000000000000..10dcb8d471d10 --- /dev/null +++ b/node_modules/@npmcli/agent/lib/dns.js @@ -0,0 +1,51 @@ +'use strict' + +const LRUCache = require('lru-cache') +const dns = require('dns') + +const defaultOptions = exports.defaultOptions = { + family: undefined, + hints: dns.ADDRCONFIG, + all: false, + verbatim: undefined, +} + +const lookupCache = exports.lookupCache = new LRUCache({ max: 50 }) + +// this is a factory so that each request can have its own opts (i.e. ttl) +// while still sharing the cache across all requests +exports.getLookup = (dnsOptions) => { + return (hostname, options, callback) => { + if (typeof options === 'function') { + callback = options + options = null + } else if (typeof options === 'number') { + options = { family: options } + } + + options = { ...defaultOptions, ...options } + + const key = JSON.stringify({ + hostname, + family: options.family, + hints: options.hints, + all: options.all, + verbatim: options.verbatim, + }) + + if (lookupCache.has(key)) { + const [address, family] = lookupCache.get(key) + process.nextTick(callback, null, address, family) + return + } + + dnsOptions.lookup(hostname, options, (err, address, family) => { + if (err) { + return callback(err) + } + + lookupCache.set(key, [address, family], { ttl: dnsOptions.ttl }) + return callback(null, address, family) + }) + } +} diff --git a/node_modules/@npmcli/agent/lib/errors.js b/node_modules/@npmcli/agent/lib/errors.js new file mode 100644 index 0000000000000..9c664aeb39757 --- /dev/null +++ b/node_modules/@npmcli/agent/lib/errors.js @@ -0,0 +1,71 @@ +'use strict' + +class InvalidProxyProtocolError extends Error { + constructor (url) { + super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``) + this.code = 'EINVALIDPROXY' + this.proxy = url + } +} + +class InvalidProxyResponseError extends Error { + constructor (url, status) { + super(`Invalid status code \`${status}\` connecting to proxy \`${url.host}\``) + this.code = 'EINVALIDRESPONSE' + this.proxy = url + this.status = status + } +} + +class ConnectionTimeoutError extends Error { + constructor (host) { + super(`Timeout connecting to host \`${host}\``) + this.code = 'ECONNECTIONTIMEOUT' + this.host = host + } +} + +class IdleTimeoutError extends Error { + constructor (host) { + super(`Idle timeout reached for host \`${host}\``) + this.code = 'EIDLETIMEOUT' + this.host = host + } +} + +class ResponseTimeoutError extends Error { + constructor (proxy, request) { + let msg = 'Response timeout ' + if (proxy.url) { + msg += `from proxy \`${proxy.url.host}\` ` + } + msg += `connecting to host \`${request.host}\`` + super(msg) + this.code = 'ERESPONSETIMEOUT' + this.proxy = proxy.url + this.request = request + } +} + +class TransferTimeoutError extends Error { + constructor (proxy, request) { + let msg = 'Transfer timeout ' + if (proxy.url) { + msg += `from proxy \`${proxy.url.host}\` ` + } + msg += `for \`${request.host}\`` + super(msg) + this.code = 'ETRANSFERTIMEOUT' + this.proxy = proxy.url + this.request = request + } +} + +module.exports = { + InvalidProxyProtocolError, + InvalidProxyResponseError, + ConnectionTimeoutError, + IdleTimeoutError, + ResponseTimeoutError, + TransferTimeoutError, +} diff --git a/node_modules/@npmcli/agent/lib/http.js b/node_modules/@npmcli/agent/lib/http.js new file mode 100644 index 0000000000000..23512393caf3f --- /dev/null +++ b/node_modules/@npmcli/agent/lib/http.js @@ -0,0 +1,33 @@ +'use strict' + +const http = require('http') + +const { getLookup } = require('./dns.js') +const { normalizeOptions } = require('./util.js') +const createProxy = require('./proxy/index.js') + +class HttpAgent extends http.Agent { + constructor (_options = {}) { + const options = normalizeOptions(_options) + super(options) + this.proxy = createProxy({ + agent: this, + lookup: getLookup(options.dns), + proxy: options.proxy, + secure: false, + }) + } + + createConnection (_options, callback) { + const options = normalizeOptions(_options) + return this.proxy.createConnection(options, callback) + } + + addRequest (request, _options) { + const options = normalizeOptions(_options) + super.addRequest(request, _options) + return this.proxy.addRequest(request, options) + } +} + +module.exports = HttpAgent diff --git a/node_modules/@npmcli/agent/lib/https.js b/node_modules/@npmcli/agent/lib/https.js new file mode 100644 index 0000000000000..b544614d7f47f --- /dev/null +++ b/node_modules/@npmcli/agent/lib/https.js @@ -0,0 +1,33 @@ +'use strict' + +const https = require('https') + +const { getLookup } = require('./dns.js') +const { normalizeOptions } = require('./util.js') +const createProxy = require('./proxy/index.js') + +class HttpsAgent extends https.Agent { + constructor (_options) { + const options = normalizeOptions(_options) + super(options) + this.proxy = createProxy({ + agent: this, + lookup: getLookup(options.dns), + proxy: options.proxy, + secure: true, + }) + } + + createConnection (_options, callback) { + const options = normalizeOptions(_options) + return this.proxy.createConnection(options, callback) + } + + addRequest (request, _options) { + const options = normalizeOptions(_options) + super.addRequest(request, options) + return this.proxy.addRequest(request, options) + } +} + +module.exports = HttpsAgent diff --git a/node_modules/@npmcli/agent/lib/index.js b/node_modules/@npmcli/agent/lib/index.js new file mode 100644 index 0000000000000..a6f556964d86d --- /dev/null +++ b/node_modules/@npmcli/agent/lib/index.js @@ -0,0 +1,135 @@ +'use strict' + +const { normalizeOptions } = require('./util.js') +const HttpAgent = require('./http.js') +const HttpsAgent = require('./https.js') + +const AgentCache = new Map() + +const proxyEnv = {} +for (const [key, value] of Object.entries(process.env)) { + const lowerKey = key.toLowerCase() + if (['https_proxy', 'http_proxy', 'proxy', 'no_proxy'].includes(lowerKey)) { + proxyEnv[lowerKey] = value + } +} + +const getAgent = (url, options) => { + url = new URL(url) + options = normalizeOptions(options) + + // false has meaning so this can't be a simple truthiness check + if (options.agent != null) { + return options.agent + } + + const isHttps = url.protocol === 'https:' + + let proxy = options.proxy + if (!proxy) { + proxy = isHttps + ? proxyEnv.https_proxy + : (proxyEnv.https_proxy || proxyEnv.http_proxy || proxyEnv.proxy) + } + + if (proxy) { + proxy = new URL(proxy) + let noProxy = options.noProxy || proxyEnv.no_proxy + if (typeof noProxy === 'string') { + noProxy = noProxy.split(',').map((p) => p.trim()) + } + + if (noProxy) { + const hostSegments = url.hostname.split('.').reverse() + const matches = noProxy.some((no) => { + const noSegments = no.split('.').filter(Boolean).reverse() + if (!noSegments.length) { + return false + } + + for (let i = 0; i < noSegments.length; ++i) { + if (hostSegments[i] !== noSegments[i]) { + return false + } + } + + return true + }) + + if (matches) { + proxy = '' + } + } + } + + const timeouts = [ + options.timeouts.connection || 0, + options.timeouts.idle || 0, + options.timeouts.response || 0, + options.timeouts.transfer || 0, + ].join('.') + + const maxSockets = options.maxSockets || 15 + + let proxyDescriptor = 'proxy:' + if (!proxy) { + proxyDescriptor += 'null' + } else { + proxyDescriptor += `${proxy.protocol}//` + let auth = '' + + if (proxy.username) { + auth += proxy.username + } + + if (proxy.password) { + auth += `:${proxy.password}` + } + + if (auth) { + proxyDescriptor += `${auth}@` + } + + proxyDescriptor += proxy.host + } + + const key = [ + `https:${isHttps}`, + proxyDescriptor, + `local-address:${options.localAddress || 'null'}`, + `strict-ssl:${isHttps ? options.rejectUnauthorized : 'false'}`, + `ca:${isHttps && options.ca || 'null'}`, + `cert:${isHttps && options.cert || 'null'}`, + `key:${isHttps && options.key || 'null'}`, + `timeouts:${timeouts}`, + `maxSockets:${maxSockets}`, + ].join(':') + + if (AgentCache.has(key)) { + return AgentCache.get(key) + } + + const agentOptions = { + ca: options.ca, + cert: options.cert, + key: options.key, + rejectUnauthorized: options.rejectUnauthorized, + maxSockets, + timeouts: options.timeouts, + localAddress: options.localAddress, + proxy, + } + + const agent = isHttps + ? new HttpsAgent(agentOptions) + : new HttpAgent(agentOptions) + + AgentCache.set(key, agent) + return agent +} + +module.exports = { + getAgent, + HttpAgent, + HttpsAgent, +} diff --git a/node_modules/@npmcli/agent/lib/proxy/http.js b/node_modules/@npmcli/agent/lib/proxy/http.js new file mode 100644 index 0000000000000..8d092e963c084 --- /dev/null +++ b/node_modules/@npmcli/agent/lib/proxy/http.js @@ -0,0 +1,146 @@ +'use strict' + +const http = require('http') +const https = require('https') +const net = require('net') +const tls = require('tls') + +const { + ConnectionTimeoutError, + IdleTimeoutError, + InvalidProxyResponseError, + ResponseTimeoutError, + TransferTimeoutError, +} = require('../errors.js') + +// this proxy class uses the http CONNECT method +class HttpProxy { + constructor ({ agent, lookup, url, secure }) { + this.agent = agent + this.lookup = lookup + this.url = url + this.secure = secure + } + + createConnection (options, callback) { + const requestOptions = { + // pass createConnection so this request doesn't go through an agent + createConnection: (opts, cb) => { + // delete the path first, otherwise (net|tls).connect will try to open a unix socket + delete opts.path + // we also delete the timeout since we control it ourselves + delete opts.timeout + opts.family = this.agent.options.family + opts.lookup = this.lookup + + if (this.url.protocol === 'https:') { + return tls.connect(opts, cb) + } + + return net.connect(opts, cb) + }, + method: 'CONNECT', + host: this.url.hostname, + port: this.url.port, + servername: this.url.hostname, + path: `${options.host}:${options.port}`, + setHost: false, + timeout: options.timeout, + headers: { + connection: this.agent.keepAlive ? 'keep-alive' : 'close', + host: `${options.host}:${options.port}`, + }, + rejectUnauthorized: options.rejectUnauthorized, + } + + if (this.url.username || this.url.password) { + const username = decodeURIComponent(this.url.username) + const password = decodeURIComponent(this.url.password) + requestOptions.headers['proxy-authentication'] = + Buffer.from(`${username}:${password}`).toString('base64') + } + + let connectionTimeout + + const onConnect = (res, socket) => { + clearTimeout(connectionTimeout) + req.removeListener('error', onError) + + if (res.statusCode !== 200) { + return callback(new InvalidProxyResponseError(this.url, res.statusCode)) + } + + if (this.secure) { + socket = tls.connect({ ...options, socket }) + } + + socket.setKeepAlive(this.agent.keepAlive, this.agent.keepAliveMsecs) + socket.setNoDelay(this.agent.keepAlive) + + if (options.timeouts.idle) { + socket.setTimeout(options.timeouts.idle) + socket.once('timeout', () => { + socket.destroy(new IdleTimeoutError(this.url.host)) + }) + } + + return callback(null, socket) + } + + const onError = (err) => { + req.removeListener('connect', onConnect) + return callback(err) + } + + const req = this.secure + ? https.request(requestOptions) + : http.request(requestOptions) + + req.once('connect', onConnect) + req.once('error', onError) + req.end() + + if (options.timeouts.connection) { + connectionTimeout = setTimeout(() => { + return callback(new ConnectionTimeoutError(this.url.host)) + }, options.timeouts.connection) + } + } + + addRequest (request, options) { + if (this.agent.options.timeouts.response) { + let responseTimeout + + const onFinish = () => { + responseTimeout = setTimeout(() => { + request.destroy(new ResponseTimeoutError(this, request)) + }, this.agent.options.timeouts.response) + } + + const onResponse = () => { + clearTimeout(responseTimeout) + } + + request.once('finish', onFinish) + request.once('response', onResponse) + } + + if (this.agent.options.timeouts.transfer) { + let transferTimeout + + const onResponse = (res) => { + transferTimeout = setTimeout(() => { + res.destroy(new TransferTimeoutError(this, request)) + }, this.agent.options.timeouts.transfer) + + res.once('close', () => { + clearTimeout(transferTimeout) + }) + } + + request.once('response', onResponse) + } + } +} + +module.exports = HttpProxy diff --git a/node_modules/@npmcli/agent/lib/proxy/index.js b/node_modules/@npmcli/agent/lib/proxy/index.js new file mode 100644 index 0000000000000..87f628c5bbf94 --- /dev/null +++ b/node_modules/@npmcli/agent/lib/proxy/index.js @@ -0,0 +1,25 @@ +'use strict' + +const { InvalidProxyProtocolError } = require('../errors.js') +const HttpProxy = require('./http.js') +const NullProxy = require('./null.js') +const SocksProxy = require('./socks.js') + +const createProxy = ({ agent, lookup, proxy, secure }) => { + if (!proxy) { + return new NullProxy({ agent, lookup, secure }) + } + + const parsed = new URL(proxy) + if (parsed.protocol === 'http:' || parsed.protocol === 'https:') { + return new HttpProxy({ agent, lookup, url: parsed, secure }) + } + + if (parsed.protocol.startsWith('socks')) { + return new SocksProxy({ agent, lookup, url: parsed, secure }) + } + + throw new InvalidProxyProtocolError(parsed) +} + +module.exports = createProxy diff --git a/node_modules/@npmcli/agent/lib/proxy/null.js b/node_modules/@npmcli/agent/lib/proxy/null.js new file mode 100644 index 0000000000000..d2b2f6f777e92 --- /dev/null +++ b/node_modules/@npmcli/agent/lib/proxy/null.js @@ -0,0 +1,97 @@ +'use strict' + +const net = require('net') +const tls = require('tls') + +const { + ConnectionTimeoutError, + IdleTimeoutError, + ResponseTimeoutError, + TransferTimeoutError, +} = require('../errors.js') + +class NullProxy { + constructor ({ agent, lookup, secure }) { + this.agent = agent + this.lookup = lookup + this.secure = secure + } + + createConnection (options, callback) { + const socket = this.secure + ? tls.connect({ ...options, family: this.agent.options.family, lookup: this.lookup }) + : net.connect({ ...options, family: this.agent.options.family, lookup: this.lookup }) + + socket.setKeepAlive(this.agent.keepAlive, this.agent.keepAliveMsecs) + socket.setNoDelay(this.agent.keepAlive) + + let connectionTimeout + + if (options.timeouts.connection) { + connectionTimeout = setTimeout(() => { + callback(new ConnectionTimeoutError(options.host)) + }, options.timeouts.connection) + } + + if (options.timeouts.idle) { + socket.setTimeout(options.timeouts.idle) + socket.once('timeout', () => { + socket.destroy(new IdleTimeoutError(options.host)) + }) + } + + const onConnect = () => { + clearTimeout(connectionTimeout) + socket.removeListener('error', onError) + callback(null, socket) + } + + const onError = (err) => { + socket.removeListener('connect', onConnect) + callback(err) + } + + socket.once('error', onError) + socket.once(this.secure ? 'secureConnect' : 'connect', onConnect) + } + + addRequest (request, options) { + if (this.agent.options.timeouts.response) { + let responseTimeout + + const onFinish = () => { + responseTimeout = setTimeout(() => { + request.destroy(new ResponseTimeoutError(this, request)) + }, this.agent.options.timeouts.response) + } + + const onResponse = () => { + clearTimeout(responseTimeout) + } + + request.once('finish', onFinish) + request.once('response', onResponse) + } + + if (this.agent.options.timeouts.transfer) { + let transferTimeout + + const onResponse = (res) => { + transferTimeout = setTimeout(() => { + // swallow the error event on the request, this allows the one on the response + // to make it to the end user + request.once('error', () => {}) + res.destroy(new TransferTimeoutError(this, request)) + }, this.agent.options.timeouts.transfer) + + res.once('close', () => { + clearTimeout(transferTimeout) + }) + } + + request.once('response', onResponse) + } + } +} + +module.exports = NullProxy diff --git a/node_modules/@npmcli/agent/lib/proxy/socks.js b/node_modules/@npmcli/agent/lib/proxy/socks.js new file mode 100644 index 0000000000000..8cad7148e9227 --- /dev/null +++ b/node_modules/@npmcli/agent/lib/proxy/socks.js @@ -0,0 +1,153 @@ +'use strict' + +const { SocksClient } = require('socks') +const tls = require('tls') + +const { + ConnectionTimeoutError, + IdleTimeoutError, + InvalidProxyProtocolError, + ResponseTimeoutError, + TransferTimeoutError, +} = require('../errors.js') + +class SocksProxy { + constructor ({ agent, lookup, secure, url }) { + this.agent = agent + this.lookup = lookup + this.secure = secure + this.url = url + if (!this.url.port) { + this.url.port = 1080 + } + + if (this.url.protocol === 'socks4:') { + this.shouldLookup = true + this.type = 4 + } else if (this.url.protocol === 'socks4a:') { + this.shouldLookup = false + this.type = 4 + } else if (this.url.protocol === 'socks5:') { + this.shouldLookup = true + this.type = 5 + } else if (this.url.protocol === 'socks5h:' || this.url.protocol === 'socks:') { + this.shouldLookup = false + this.type = 5 + } else { + throw new InvalidProxyProtocolError(this.url) + } + } + + createConnection (options, callback) { + const socksOptions = { + proxy: { + host: this.url.hostname, + port: parseInt(this.url.port, 10), + type: this.type, + userId: this.url.username, + password: this.url.password, + }, + destination: { + host: options.host, + port: parseInt(options.port, 10), + }, + command: 'connect', + socket_options: { + family: this.agent.options.family, + lookup: this.lookup, + }, + } + + const connect = () => { + let connectionTimeout + const socksClient = new SocksClient(socksOptions) + + const onError = (err) => { + socksClient.removeListener('established', onEstablished) + return callback(err) + } + + const onEstablished = (connection) => { + clearTimeout(connectionTimeout) + socksClient.removeListener('error', onError) + + if (this.secure) { + connection.socket = tls.connect({ ...options, socket: connection.socket }) + } + + connection.socket.setKeepAlive(this.agent.keepAlive, this.agent.keepAliveMsecs) + connection.socket.setNoDelay(this.agent.keepAlive) + + if (options.timeouts.idle) { + connection.socket.setTimeout(options.timeouts.idle) + connection.socket.once('timeout', () => { + connection.socket.destroy(new IdleTimeoutError(this.url.host)) + }) + } + + return callback(null, connection.socket) + } + + socksClient.once('error', onError) + socksClient.once('established', onEstablished) + + if (options.timeouts.connection) { + connectionTimeout = setTimeout(() => { + return callback(new ConnectionTimeoutError(this.url.host)) + }, options.timeouts.connection) + } + + socksClient.connect() + } + + if (!this.shouldLookup) { + return connect() + } + + this.lookup(options.host, (err, result) => { + if (err) { + return callback(err) + } + + socksOptions.destination.host = result + connect() + }) + } + + addRequest (request, options) { + if (this.agent.options.timeouts.response) { + let responseTimeout + + const onFinish = () => { + responseTimeout = setTimeout(() => { + request.destroy(new ResponseTimeoutError(this, request)) + }, this.agent.options.timeouts.response) + } + + const onResponse = () => { + clearTimeout(responseTimeout) + } + + request.once('finish', onFinish) + request.once('response', onResponse) + } + + if (this.agent.options.timeouts.transfer) { + let transferTimeout + + const onResponse = (res) => { + transferTimeout = setTimeout(() => { + res.destroy(new TransferTimeoutError(this, request)) + }, this.agent.options.timeouts.transfer) + + res.once('close', () => { + clearTimeout(transferTimeout) + }) + } + + request.once('response', onResponse) + } + } +} + +module.exports = SocksProxy diff --git a/node_modules/@npmcli/agent/lib/util.js b/node_modules/@npmcli/agent/lib/util.js new file mode 100644 index 0000000000000..512207084d23e --- /dev/null +++ b/node_modules/@npmcli/agent/lib/util.js @@ -0,0 +1,33 @@ +'use strict' + +const dns = require('dns') + +const normalizeOptions = (_options) => { + const options = { ..._options } + + if (typeof options.keepAlive === 'undefined') { + options.keepAlive = true + } + + if (!options.timeouts) { + options.timeouts = {} + } + + if (options.timeout) { + options.timeouts.idle = options.timeout + delete options.timeout + } + + options.family = !isNaN(+options.family) ? +options.family : 0 + options.dns = { + ttl: 5 * 60 * 1000, + lookup: dns.lookup, + ...options.dns, + } + + return options +} + +module.exports = { + normalizeOptions, +} diff --git a/node_modules/@npmcli/agent/package.json b/node_modules/@npmcli/agent/package.json new file mode 100644 index 0000000000000..a3fb4262b9c86 --- /dev/null +++ b/node_modules/@npmcli/agent/package.json @@ -0,0 +1,56 @@ +{ + "name": "@npmcli/agent", + "version": "1.1.0", + "description": "the http/https agent used by the npm cli", + "main": "lib/index.js", + "scripts": { + "gencerts": "bash scripts/create-cert.sh", + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/agent/issues" + }, + "homepage": "https://github.com/npm/agent#readme", + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.15.1", + "publish": "true" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.15.1", + "minipass-fetch": "^3.0.3", + "nock": "^13.2.7", + "simple-socks": "^2.2.2", + "tap": "^16.3.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/agent.git" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "dependencies": { + "lru-cache": "^7.18.3", + "socks": "^2.7.1" + } +} diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md b/node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md new file mode 100644 index 0000000000000..5fc208ff122e0 --- /dev/null +++ b/node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md @@ -0,0 +1,20 @@ + + +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js new file mode 100644 index 0000000000000..870ce0d923cd0 --- /dev/null +++ b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js @@ -0,0 +1,145 @@ +'use strict' +const fs = require('fs') +const npa = require('npm-package-arg') +const { URL } = require('url') + +// Find the longest registry key that is used for some kind of auth +// in the options. +const regKeyFromURI = (uri, opts) => { + const parsed = new URL(uri) + // try to find a config key indicating we have auth for this registry + // can be one of :_authToken, :_auth, :_password and :username, or + // :certfile and :keyfile + // We walk up the "path" until we're left with just //[:], + // stopping when we reach '//'. + let regKey = `//${parsed.host}${parsed.pathname}` + while (regKey.length > '//'.length) { + // got some auth for this URI + if (hasAuth(regKey, opts)) { + return regKey + } + + // can be either //host/some/path/:_auth or //host/some/path:_auth + // walk up by removing EITHER what's after the slash OR the slash itself + regKey = regKey.replace(/([^/]+|\/)$/, '') + } +} + +const hasAuth = (regKey, opts) => ( + opts[`${regKey}:_authToken`] || + opts[`${regKey}:_auth`] || + opts[`${regKey}:username`] && opts[`${regKey}:_password`] || + opts[`${regKey}:certfile`] && opts[`${regKey}:keyfile`] +) + +const sameHost = (a, b) => { + const parsedA = new URL(a) + const parsedB = new URL(b) + return parsedA.host === parsedB.host +} + +const getRegistry = opts => { + const { spec } = opts + const { scope: specScope, subSpec } = spec ? npa(spec) : {} + const subSpecScope = subSpec && subSpec.scope + const scope = subSpec ? subSpecScope : specScope + const scopeReg = scope && opts[`${scope}:registry`] + return scopeReg || opts.registry +} + +const maybeReadFile = file => { + try { + return fs.readFileSync(file, 'utf8') + } catch (er) { + if (er.code !== 'ENOENT') { + throw er + } + return null + } +} + +const getAuth = (uri, opts = {}) => { + const { forceAuth } = opts + if (!uri) { + throw new Error('URI is required') + } + const regKey = regKeyFromURI(uri, forceAuth || opts) + + // we are only allowed to use what's in forceAuth if specified + if (forceAuth && !regKey) { + return new Auth({ + scopeAuthKey: null, + token: forceAuth._authToken || forceAuth.token, + username: forceAuth.username, + password: forceAuth._password || forceAuth.password, + auth: forceAuth._auth || forceAuth.auth, + certfile: forceAuth.certfile, + keyfile: forceAuth.keyfile, + }) + } + + // no auth for this URI, but might have it for the registry + if (!regKey) { + const registry = getRegistry(opts) + if (registry && uri !== registry && sameHost(uri, registry)) { + return getAuth(registry, opts) + } else if (registry !== opts.registry) { + // If making a tarball request to a different base URI than the + // registry where we logged in, but the same auth SHOULD be sent + // to that artifact host, then we track where it was coming in from, + // and warn the user if we get a 4xx error on it. + const scopeAuthKey = regKeyFromURI(registry, opts) + return new Auth({ scopeAuthKey }) + } + } + + const { + [`${regKey}:_authToken`]: token, + [`${regKey}:username`]: username, + [`${regKey}:_password`]: password, + [`${regKey}:_auth`]: auth, + [`${regKey}:certfile`]: certfile, + [`${regKey}:keyfile`]: keyfile, + } = opts + + return new Auth({ + scopeAuthKey: null, + token, + auth, + username, + password, + certfile, + keyfile, + }) +} + +class Auth { + constructor ({ token, auth, username, password, scopeAuthKey, certfile, keyfile }) { + this.scopeAuthKey = scopeAuthKey + this.token = null + this.auth = null + this.isBasicAuth = false + this.cert = null + this.key = null + if (token) { + this.token = token + } else if (auth) { + this.auth = auth + } else if (username && password) { + const p = Buffer.from(password, 'base64').toString('utf8') + this.auth = Buffer.from(`${username}:${p}`, 'utf8').toString('base64') + this.isBasicAuth = true + } + // mTLS may be used in conjunction with another auth method above + if (certfile && keyfile) { + const cert = maybeReadFile(certfile, 'utf-8') + const key = maybeReadFile(keyfile, 'utf-8') + if (cert && key) { + this.cert = cert + this.key = key + } + } + } +} + +module.exports = getAuth diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js new file mode 100644 index 0000000000000..066ac3c32420f --- /dev/null +++ b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js @@ -0,0 +1,100 @@ +'use strict' + +const errors = require('./errors.js') +const { Response } = require('minipass-fetch') +const defaultOpts = require('./default-opts.js') +const log = require('proc-log') +const cleanUrl = require('./clean-url.js') + +/* eslint-disable-next-line max-len */ +const moreInfoUrl = 'https://github.com/npm/cli/wiki/No-auth-for-URI,-but-auth-present-for-scoped-registry' +const checkResponse = + async ({ method, uri, res, startTime, auth, opts }) => { + opts = { ...defaultOpts, ...opts } + if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) { + log.notice('', res.headers.get('npm-notice')) + } + + if (res.status >= 400) { + logRequest(method, res, startTime) + if (auth && auth.scopeAuthKey && !auth.token && !auth.auth) { + // we didn't have auth for THIS request, but we do have auth for + // requests to the registry indicated by the spec's scope value. + // Warn the user. + log.warn('registry', `No auth for URI, but auth present for scoped registry. + +URI: ${uri} +Scoped Registry Key: ${auth.scopeAuthKey} + +More info here: ${moreInfoUrl}`) + } + return checkErrors(method, res, startTime, opts) + } else { + res.body.on('end', () => logRequest(method, res, startTime, opts)) + if (opts.ignoreBody) { + res.body.resume() + return new Response(null, res) + } + return res + } + } +module.exports = checkResponse + +function logRequest (method, res, startTime) { + const elapsedTime = Date.now() - startTime + const attempt = res.headers.get('x-fetch-attempts') + const attemptStr = attempt && attempt > 1 ? ` attempt #${attempt}` : '' + const cacheStatus = res.headers.get('x-local-cache-status') + const cacheStr = cacheStatus ? ` (cache ${cacheStatus})` : '' + const urlStr = cleanUrl(res.url) + + log.http( + 'fetch', + `${method.toUpperCase()} ${res.status} ${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}` + ) +} + +function checkErrors (method, res, startTime, opts) { + return res.buffer() + .catch(() => null) + .then(body => { + let parsed = body + try { + parsed = JSON.parse(body.toString('utf8')) + } catch { + // ignore errors + } + if (res.status === 401 && res.headers.get('www-authenticate')) { + const auth = res.headers.get('www-authenticate') + .split(/,\s*/) + .map(s => s.toLowerCase()) + if (auth.indexOf('ipaddress') !== -1) { + throw new errors.HttpErrorAuthIPAddress( + method, res, parsed, opts.spec + ) + } else if (auth.indexOf('otp') !== -1) { + throw new errors.HttpErrorAuthOTP( + method, res, parsed, opts.spec + ) + } else { + throw new errors.HttpErrorAuthUnknown( + method, res, parsed, opts.spec + ) + } + } else if ( + res.status === 401 && + body != null && + /one-time pass/.test(body.toString('utf8')) + ) { + // Heuristic for malformed OTP responses that don't include the + // www-authenticate header. + throw new errors.HttpErrorAuthOTP( + method, res, parsed, opts.spec + ) + } else { + throw new errors.HttpErrorGeneral( + method, res, parsed, opts.spec + ) + } + }) +} diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/clean-url.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/clean-url.js new file mode 100644 index 0000000000000..0c2656b5653a0 --- /dev/null +++ b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/clean-url.js @@ -0,0 +1,27 @@ +const { URL } = require('url') + +const replace = '***' +const tokenRegex = /\bnpm_[a-zA-Z0-9]{36}\b/g +const guidRegex = /\b[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\b/g + +const cleanUrl = (str) => { + if (typeof str !== 'string' || !str) { + return str + } + + try { + const url = new URL(str) + if (url.password) { + url.password = replace + str = url.toString() + } + } catch { + // ignore errors + } + + return str + .replace(tokenRegex, `npm_${replace}`) + .replace(guidRegex, `npm_${replace}`) +} + +module.exports = cleanUrl diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js new file mode 100644 index 0000000000000..f0847f0b507e2 --- /dev/null +++ b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js @@ -0,0 +1,19 @@ +const pkg = require('../package.json') +module.exports = { + maxSockets: 12, + method: 'GET', + registry: 'https://registry.npmjs.org/', + timeout: 5 * 60 * 1000, // 5 minutes + strictSSL: true, + noProxy: process.env.NOPROXY, + userAgent: `${pkg.name + }@${ + pkg.version + }/node@${ + process.version + }+${ + process.arch + } (${ + process.platform + })`, +} diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js new file mode 100644 index 0000000000000..cf5ddba6f300c --- /dev/null +++ b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js @@ -0,0 +1,80 @@ +'use strict' + +const url = require('url') + +function packageName (href) { + try { + let basePath = new url.URL(href).pathname.slice(1) + if (!basePath.match(/^-/)) { + basePath = basePath.split('/') + var index = basePath.indexOf('_rewrite') + if (index === -1) { + index = basePath.length - 1 + } else { + index++ + } + return decodeURIComponent(basePath[index]) + } + } catch (_) { + // this is ok + } +} + +class HttpErrorBase extends Error { + constructor (method, res, body, spec) { + super() + this.name = this.constructor.name + this.headers = res.headers.raw() + this.statusCode = res.status + this.code = `E${res.status}` + this.method = method + this.uri = res.url + this.body = body + this.pkgid = spec ? spec.toString() : packageName(res.url) + } +} +module.exports.HttpErrorBase = HttpErrorBase + +class HttpErrorGeneral extends HttpErrorBase { + constructor (method, res, body, spec) { + super(method, res, body, spec) + this.message = `${res.status} ${res.statusText} - ${ + this.method.toUpperCase() + } ${ + this.spec || this.uri + }${ + (body && body.error) ? ' - ' + body.error : '' + }` + Error.captureStackTrace(this, HttpErrorGeneral) + } +} +module.exports.HttpErrorGeneral = HttpErrorGeneral + +class HttpErrorAuthOTP extends HttpErrorBase { + constructor (method, res, body, spec) { + super(method, res, body, spec) + this.message = 'OTP required for authentication' + this.code = 'EOTP' + Error.captureStackTrace(this, HttpErrorAuthOTP) + } +} +module.exports.HttpErrorAuthOTP = HttpErrorAuthOTP + +class HttpErrorAuthIPAddress extends HttpErrorBase { + constructor (method, res, body, spec) { + super(method, res, body, spec) + this.message = 'Login is not allowed from your IP address' + this.code = 'EAUTHIP' + Error.captureStackTrace(this, HttpErrorAuthIPAddress) + } +} +module.exports.HttpErrorAuthIPAddress = HttpErrorAuthIPAddress + +class HttpErrorAuthUnknown extends HttpErrorBase { + constructor (method, res, body, spec) { + super(method, res, body, spec) + this.message = 'Unable to authenticate, need: ' + res.headers.get('www-authenticate') + Error.captureStackTrace(this, HttpErrorAuthUnknown) + } +} +module.exports.HttpErrorAuthUnknown = HttpErrorAuthUnknown diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js new file mode 100644 index 0000000000000..23e349c5c5b96 --- /dev/null +++ b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js @@ -0,0 +1,247 @@ +'use strict' + +const { HttpErrorAuthOTP } = require('./errors.js') +const checkResponse = require('./check-response.js') +const getAuth = require('./auth.js') +const fetch = require('make-fetch-happen') +const JSONStream = require('minipass-json-stream') +const npa = require('npm-package-arg') +const qs = require('querystring') +const url = require('url') +const zlib = require('minizlib') +const { Minipass } = require('minipass') + +const defaultOpts = require('./default-opts.js') + +// WhatWG URL throws if it's not fully resolved +const urlIsValid = u => { + try { + return !!new url.URL(u) + } catch (_) { + return false + } +} + +module.exports = regFetch +function regFetch (uri, /* istanbul ignore next */ opts_ = {}) { + const opts = { + ...defaultOpts, + ...opts_, + } + + // if we did not get a fully qualified URI, then we look at the registry + // config or relevant scope to resolve it. + const uriValid = urlIsValid(uri) + let registry = opts.registry || defaultOpts.registry + if (!uriValid) { + registry = opts.registry = ( + (opts.spec && pickRegistry(opts.spec, opts)) || + opts.registry || + registry + ) + uri = `${ + registry.trim().replace(/\/?$/g, '') + }/${ + uri.trim().replace(/^\//, '') + }` + // asserts that this is now valid + new url.URL(uri) + } + + const method = opts.method || 'GET' + + // through that takes into account the scope, the prefix of `uri`, etc + const startTime = Date.now() + const auth = getAuth(uri, opts) + const headers = getHeaders(uri, auth, opts) + let body = opts.body + const bodyIsStream = Minipass.isStream(body) + const bodyIsPromise = body && + typeof body === 'object' && + typeof body.then === 'function' + + if ( + body && !bodyIsStream && !bodyIsPromise && typeof body !== 'string' && !Buffer.isBuffer(body) + ) { + headers['content-type'] = headers['content-type'] || 'application/json' + body = JSON.stringify(body) + } else if (body && !headers['content-type']) { + headers['content-type'] = 'application/octet-stream' + } + + if (opts.gzip) { + headers['content-encoding'] = 'gzip' + if (bodyIsStream) { + const gz = new zlib.Gzip() + body.on('error', /* istanbul ignore next: unlikely and hard to test */ + err => gz.emit('error', err)) + body = body.pipe(gz) + } else if (!bodyIsPromise) { + body = new zlib.Gzip().end(body).concat() + } + } + + const parsed = new url.URL(uri) + + if (opts.query) { + const q = typeof opts.query === 'string' ? qs.parse(opts.query) + : opts.query + + Object.keys(q).forEach(key => { + if (q[key] !== undefined) { + parsed.searchParams.set(key, q[key]) + } + }) + uri = url.format(parsed) + } + + if (parsed.searchParams.get('write') === 'true' && method === 'GET') { + // do not cache, because this GET is fetching a rev that will be + // used for a subsequent PUT or DELETE, so we need to conditionally + // update cache. + opts.offline = false + opts.preferOffline = false + opts.preferOnline = true + } + + const doFetch = async fetchBody => { + const p = fetch(uri, { + agent: opts.agent, + algorithms: opts.algorithms, + body: fetchBody, + cache: getCacheMode(opts), + cachePath: opts.cache, + ca: opts.ca, + cert: auth.cert || opts.cert, + headers, + integrity: opts.integrity, + key: auth.key || opts.key, + localAddress: opts.localAddress, + maxSockets: opts.maxSockets, + memoize: opts.memoize, + method: method, + noProxy: opts.noProxy, + proxy: opts.httpsProxy || opts.proxy, + retry: opts.retry ? opts.retry : { + retries: opts.fetchRetries, + factor: opts.fetchRetryFactor, + minTimeout: opts.fetchRetryMintimeout, + maxTimeout: opts.fetchRetryMaxtimeout, + }, + strictSSL: opts.strictSSL, + timeout: opts.timeout || 30 * 1000, + }).then(res => checkResponse({ + method, + uri, + res, + registry, + startTime, + auth, + opts, + })) + + if (typeof opts.otpPrompt === 'function') { + return p.catch(async er => { + if (er instanceof HttpErrorAuthOTP) { + let otp + // if otp fails to complete, we fail with that failure + try { + otp = await opts.otpPrompt() + } catch (_) { + // ignore this error + } + // if no otp provided, or otpPrompt errored, throw the original HTTP error + if (!otp) { + throw er + } + return regFetch(uri, { ...opts, otp }) + } + throw er + }) + } else { + return p + } + } + + return Promise.resolve(body).then(doFetch) +} + +module.exports.json = fetchJSON +function fetchJSON (uri, opts) { + return regFetch(uri, opts).then(res => res.json()) +} + +module.exports.json.stream = fetchJSONStream +function fetchJSONStream (uri, jsonPath, + /* istanbul ignore next */ opts_ = {}) { + const opts = { ...defaultOpts, ...opts_ } + const parser = JSONStream.parse(jsonPath, opts.mapJSON) + regFetch(uri, opts).then(res => + res.body.on('error', + /* istanbul ignore next: unlikely and difficult to test */ + er => parser.emit('error', er)).pipe(parser) + ).catch(er => parser.emit('error', er)) + return parser +} + +module.exports.pickRegistry = pickRegistry +function pickRegistry (spec, opts = {}) { + spec = npa(spec) + let registry = spec.scope && + opts[spec.scope.replace(/^@?/, '@') + ':registry'] + + if (!registry && opts.scope) { + registry = opts[opts.scope.replace(/^@?/, '@') + ':registry'] + } + + if (!registry) { + registry = opts.registry || defaultOpts.registry + } + + return registry +} + +function getCacheMode (opts) { + return opts.offline ? 'only-if-cached' + : opts.preferOffline ? 'force-cache' + : opts.preferOnline ? 'no-cache' + : 'default' +} + +function getHeaders (uri, auth, opts) { + const headers = Object.assign({ + 'user-agent': opts.userAgent, + }, opts.headers || {}) + + if (opts.authType) { + headers['npm-auth-type'] = opts.authType + } + + if (opts.scope) { + headers['npm-scope'] = opts.scope + } + + if (opts.npmSession) { + headers['npm-session'] = opts.npmSession + } + + if (opts.npmCommand) { + headers['npm-command'] = opts.npmCommand + } + + // If a tarball is hosted on a different place than the manifest, only send + // credentials on `alwaysAuth` + if (auth.token) { + headers.authorization = `Bearer ${auth.token}` + } else if (auth.auth) { + headers.authorization = `Basic ${auth.auth}` + } + + if (opts.otp) { + headers['npm-otp'] = opts.otp + } + + return headers +} + +module.exports.cleanUrl = require('./clean-url.js') diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/package.json b/node_modules/npm-profile/node_modules/npm-registry-fetch/package.json new file mode 100644 index 0000000000000..63a44725886cc --- /dev/null +++ b/node_modules/npm-profile/node_modules/npm-registry-fetch/package.json @@ -0,0 +1,67 @@ +{ + "name": "npm-registry-fetch", + "version": "14.0.5", + "description": "Fetch-based http client for use with npm registry APIs", + "main": "lib", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "eslint": "eslint", + "lint": "eslint \"**/*.js\"", + "lintfix": "npm run lint -- --fix", + "test": "tap", + "posttest": "npm run lint", + "npmclilint": "npmcli-lint", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "snap": "tap", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/npm-registry-fetch.git" + }, + "keywords": [ + "npm", + "registry", + "fetch" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "make-fetch-happen": "^11.0.0", + "minipass": "^5.0.0", + "minipass-fetch": "^3.0.0", + "minipass-json-stream": "^1.0.1", + "minizlib": "^2.1.2", + "npm-package-arg": "^10.0.0", + "proc-log": "^3.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.14.1", + "cacache": "^17.0.0", + "nock": "^13.2.4", + "require-inject": "^1.4.4", + "ssri": "^10.0.0", + "tap": "^16.0.1" + }, + "tap": { + "check-coverage": true, + "test-ignore": "test[\\\\/](util|cache)[\\\\/]", + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.14.1", + "publish": "true" + } +} diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE new file mode 100644 index 0000000000000..1808eb2844231 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE @@ -0,0 +1,16 @@ +ISC License + +Copyright 2017-2022 (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/entry.js new file mode 100644 index 0000000000000..45141095074ec --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/entry.js @@ -0,0 +1,469 @@ +const { Request, Response } = require('minipass-fetch') +const { Minipass } = require('minipass') +const MinipassFlush = require('minipass-flush') +const cacache = require('cacache') +const url = require('url') + +const CachingMinipassPipeline = require('../pipeline.js') +const CachePolicy = require('./policy.js') +const cacheKey = require('./key.js') +const remote = require('../remote.js') + +const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) + +// allow list for request headers that will be written to the cache index +// note: we will also store any request headers +// that are named in a response's vary header +const KEEP_REQUEST_HEADERS = [ + 'accept-charset', + 'accept-encoding', + 'accept-language', + 'accept', + 'cache-control', +] + +// allow list for response headers that will be written to the cache index +// note: we must not store the real response's age header, or when we load +// a cache policy based on the metadata it will think the cached response +// is always stale +const KEEP_RESPONSE_HEADERS = [ + 'cache-control', + 'content-encoding', + 'content-language', + 'content-type', + 'date', + 'etag', + 'expires', + 'last-modified', + 'link', + 'location', + 'pragma', + 'vary', +] + +// return an object containing all metadata to be written to the index +const getMetadata = (request, response, options) => { + const metadata = { + time: Date.now(), + url: request.url, + reqHeaders: {}, + resHeaders: {}, + + // options on which we must match the request and vary the response + options: { + compress: options.compress != null ? options.compress : request.compress, + }, + } + + // only save the status if it's not a 200 or 304 + if (response.status !== 200 && response.status !== 304) { + metadata.status = response.status + } + + for (const name of KEEP_REQUEST_HEADERS) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + + // if the request's host header differs from the host in the url + // we need to keep it, otherwise it's just noise and we ignore it + const host = request.headers.get('host') + const parsedUrl = new url.URL(request.url) + if (host && parsedUrl.host !== host) { + metadata.reqHeaders.host = host + } + + // if the response has a vary header, make sure + // we store the relevant request headers too + if (response.headers.has('vary')) { + const vary = response.headers.get('vary') + // a vary of "*" means every header causes a different response. + // in that scenario, we do not include any additional headers + // as the freshness check will always fail anyway and we don't + // want to bloat the cache indexes + if (vary !== '*') { + // copy any other request headers that will vary the response + const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) + for (const name of varyHeaders) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + } + } + + for (const name of KEEP_RESPONSE_HEADERS) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + for (const name of options.cacheAdditionalHeaders) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + return metadata +} + +// symbols used to hide objects that may be lazily evaluated in a getter +const _request = Symbol('request') +const _response = Symbol('response') +const _policy = Symbol('policy') + +class CacheEntry { + constructor ({ entry, request, response, options }) { + if (entry) { + this.key = entry.key + this.entry = entry + // previous versions of this module didn't write an explicit timestamp in + // the metadata, so fall back to the entry's timestamp. we can't use the + // entry timestamp to determine staleness because cacache will update it + // when it verifies its data + this.entry.metadata.time = this.entry.metadata.time || this.entry.time + } else { + this.key = cacheKey(request) + } + + this.options = options + + // these properties are behind getters that lazily evaluate + this[_request] = request + this[_response] = response + this[_policy] = null + } + + // returns a CacheEntry instance that satisfies the given request + // or undefined if no existing entry satisfies + static async find (request, options) { + try { + // compacts the index and returns an array of unique entries + var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { + const entryA = new CacheEntry({ entry: A, options }) + const entryB = new CacheEntry({ entry: B, options }) + return entryA.policy.satisfies(entryB.request) + }, { + validateEntry: (entry) => { + // clean out entries with a buggy content-encoding value + if (entry.metadata && + entry.metadata.resHeaders && + entry.metadata.resHeaders['content-encoding'] === null) { + return false + } + + // if an integrity is null, it needs to have a status specified + if (entry.integrity === null) { + return !!(entry.metadata && entry.metadata.status) + } + + return true + }, + }) + } catch (err) { + // if the compact request fails, ignore the error and return + return + } + + // a cache mode of 'reload' means to behave as though we have no cache + // on the way to the network. return undefined to allow cacheFetch to + // create a brand new request no matter what. + if (options.cache === 'reload') { + return + } + + // find the specific entry that satisfies the request + let match + for (const entry of matches) { + const _entry = new CacheEntry({ + entry, + options, + }) + + if (_entry.policy.satisfies(request)) { + match = _entry + break + } + } + + return match + } + + // if the user made a PUT/POST/PATCH then we invalidate our + // cache for the same url by deleting the index entirely + static async invalidate (request, options) { + const key = cacheKey(request) + try { + await cacache.rm.entry(options.cachePath, key, { removeFully: true }) + } catch (err) { + // ignore errors + } + } + + get request () { + if (!this[_request]) { + this[_request] = new Request(this.entry.metadata.url, { + method: 'GET', + headers: this.entry.metadata.reqHeaders, + ...this.entry.metadata.options, + }) + } + + return this[_request] + } + + get response () { + if (!this[_response]) { + this[_response] = new Response(null, { + url: this.entry.metadata.url, + counter: this.options.counter, + status: this.entry.metadata.status || 200, + headers: { + ...this.entry.metadata.resHeaders, + 'content-length': this.entry.size, + }, + }) + } + + return this[_response] + } + + get policy () { + if (!this[_policy]) { + this[_policy] = new CachePolicy({ + entry: this.entry, + request: this.request, + response: this.response, + options: this.options, + }) + } + + return this[_policy] + } + + // wraps the response in a pipeline that stores the data + // in the cache while the user consumes it + async store (status) { + // if we got a status other than 200, 301, or 308, + // or the CachePolicy forbid storage, append the + // cache status header and return it untouched + if ( + this.request.method !== 'GET' || + ![200, 301, 308].includes(this.response.status) || + !this.policy.storable() + ) { + this.response.headers.set('x-local-cache-status', 'skip') + return this.response + } + + const size = this.response.headers.get('content-length') + const cacheOpts = { + algorithms: this.options.algorithms, + metadata: getMetadata(this.request, this.response, this.options), + size, + integrity: this.options.integrity, + integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body, + } + + let body = null + // we only set a body if the status is a 200, redirects are + // stored as metadata only + if (this.response.status === 200) { + let cacheWriteResolve, cacheWriteReject + const cacheWritePromise = new Promise((resolve, reject) => { + cacheWriteResolve = resolve + cacheWriteReject = reject + }) + + body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({ + flush () { + return cacheWritePromise + }, + })) + // this is always true since if we aren't reusing the one from the remote fetch, we + // are using the one from cacache + body.hasIntegrityEmitter = true + + const onResume = () => { + const tee = new Minipass() + const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) + // re-emit the integrity and size events on our new response body so they can be reused + cacheStream.on('integrity', i => body.emit('integrity', i)) + cacheStream.on('size', s => body.emit('size', s)) + // stick a flag on here so downstream users will know if they can expect integrity events + tee.pipe(cacheStream) + // TODO if the cache write fails, log a warning but return the response anyway + // eslint-disable-next-line promise/catch-or-return + cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) + body.unshift(tee) + body.unshift(this.response.body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + } else { + await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) + } + + // note: we do not set the x-local-cache-hash header because we do not know + // the hash value until after the write to the cache completes, which doesn't + // happen until after the response has been sent and it's too late to write + // the header anyway + this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + this.response.headers.set('x-local-cache-mode', 'stream') + this.response.headers.set('x-local-cache-status', status) + this.response.headers.set('x-local-cache-time', new Date().toISOString()) + const newResponse = new Response(body, { + url: this.response.url, + status: this.response.status, + headers: this.response.headers, + counter: this.options.counter, + }) + return newResponse + } + + // use the cached data to create a response and return it + async respond (method, options, status) { + let response + if (method === 'HEAD' || [301, 308].includes(this.response.status)) { + // if the request is a HEAD, or the response is a redirect, + // then the metadata in the entry already includes everything + // we need to build a response + response = this.response + } else { + // we're responding with a full cached response, so create a body + // that reads from cacache and attach it to a new Response + const body = new Minipass() + const headers = { ...this.policy.responseHeaders() } + + const onResume = () => { + const cacheStream = cacache.get.stream.byDigest( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + cacheStream.on('error', async (err) => { + cacheStream.pause() + if (err.code === 'EINTEGRITY') { + await cacache.rm.content( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + } + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { + await CacheEntry.invalidate(this.request, this.options) + } + body.emit('error', err) + cacheStream.resume() + }) + // emit the integrity and size events based on our metadata so we're consistent + body.emit('integrity', this.entry.integrity) + body.emit('size', Number(headers['content-length'])) + cacheStream.pipe(body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + response = new Response(body, { + url: this.entry.metadata.url, + counter: options.counter, + status: 200, + headers, + }) + } + + response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) + response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + response.headers.set('x-local-cache-mode', 'stream') + response.headers.set('x-local-cache-status', status) + response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) + return response + } + + // use the provided request along with this cache entry to + // revalidate the stored response. returns a response, either + // from the cache or from the update + async revalidate (request, options) { + const revalidateRequest = new Request(request, { + headers: this.policy.revalidationHeaders(request), + }) + + try { + // NOTE: be sure to remove the headers property from the + // user supplied options, since we have already defined + // them on the new request object. if they're still in the + // options then those will overwrite the ones from the policy + var response = await remote(revalidateRequest, { + ...options, + headers: undefined, + }) + } catch (err) { + // if the network fetch fails, return the stale + // cached response unless it has a cache-control + // of 'must-revalidate' + if (!this.policy.mustRevalidate) { + return this.respond(request.method, options, 'stale') + } + + throw err + } + + if (this.policy.revalidated(revalidateRequest, response)) { + // we got a 304, write a new index to the cache and respond from cache + const metadata = getMetadata(request, response, options) + // 304 responses do not include headers that are specific to the response data + // since they do not include a body, so we copy values for headers that were + // in the old cache entry to the new one, if the new metadata does not already + // include that header + for (const name of KEEP_RESPONSE_HEADERS) { + if ( + !hasOwnProperty(metadata.resHeaders, name) && + hasOwnProperty(this.entry.metadata.resHeaders, name) + ) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + } + + for (const name of options.cacheAdditionalHeaders) { + const inMeta = hasOwnProperty(metadata.resHeaders, name) + const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name) + const inPolicy = hasOwnProperty(this.policy.response.headers, name) + + // if the header is in the existing entry, but it is not in the metadata + // then we need to write it to the metadata as this will refresh the on-disk cache + if (!inMeta && inEntry) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + // if the header is in the metadata, but not in the policy, then we need to set + // it in the policy so that it's included in the immediate response. future + // responses will load a new cache entry, so we don't need to change that + if (!inPolicy && inMeta) { + this.policy.response.headers[name] = metadata.resHeaders[name] + } + } + + try { + await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { + size: this.entry.size, + metadata, + }) + } catch (err) { + // if updating the cache index fails, we ignore it and + // respond anyway + } + return this.respond(request.method, options, 'revalidated') + } + + // if we got a modified response, create a new entry based on it + const newEntry = new CacheEntry({ + request, + response, + options, + }) + + // respond with the new entry while writing it to the cache + return newEntry.store('updated') + } +} + +module.exports = CacheEntry diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/errors.js new file mode 100644 index 0000000000000..67a66573bebe6 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/errors.js @@ -0,0 +1,11 @@ +class NotCachedError extends Error { + constructor (url) { + /* eslint-disable-next-line max-len */ + super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) + this.code = 'ENOTCACHED' + } +} + +module.exports = { + NotCachedError, +} diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/index.js new file mode 100644 index 0000000000000..0de49d23fb933 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/index.js @@ -0,0 +1,49 @@ +const { NotCachedError } = require('./errors.js') +const CacheEntry = require('./entry.js') +const remote = require('../remote.js') + +// do whatever is necessary to get a Response and return it +const cacheFetch = async (request, options) => { + // try to find a cached entry that satisfies this request + const entry = await CacheEntry.find(request, options) + if (!entry) { + // no cached result, if the cache mode is 'only-if-cached' that's a failure + if (options.cache === 'only-if-cached') { + throw new NotCachedError(request.url) + } + + // otherwise, we make a request, store it and return it + const response = await remote(request, options) + const newEntry = new CacheEntry({ request, response, options }) + return newEntry.store('miss') + } + + // we have a cached response that satisfies this request, however if the cache + // mode is 'no-cache' then we send the revalidation request no matter what + if (options.cache === 'no-cache') { + return entry.revalidate(request, options) + } + + // if the cached entry is not stale, or if the cache mode is 'force-cache' or + // 'only-if-cached' we can respond with the cached entry. set the status + // based on the result of needsRevalidation and respond + const _needsRevalidation = entry.policy.needsRevalidation(request) + if (options.cache === 'force-cache' || + options.cache === 'only-if-cached' || + !_needsRevalidation) { + return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') + } + + // if we got here, the cache entry is stale so revalidate it + return entry.revalidate(request, options) +} + +cacheFetch.invalidate = async (request, options) => { + if (!options.cachePath) { + return + } + + return CacheEntry.invalidate(request, options) +} + +module.exports = cacheFetch diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/key.js new file mode 100644 index 0000000000000..f7684d562b7fa --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/key.js @@ -0,0 +1,17 @@ +const { URL, format } = require('url') + +// options passed to url.format() when generating a key +const formatOptions = { + auth: false, + fragment: false, + search: true, + unicode: false, +} + +// returns a string to be used as the cache key for the Request +const cacheKey = (request) => { + const parsed = new URL(request.url) + return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` +} + +module.exports = cacheKey diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/policy.js new file mode 100644 index 0000000000000..ada3c8600dae9 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/policy.js @@ -0,0 +1,161 @@ +const CacheSemantics = require('http-cache-semantics') +const Negotiator = require('negotiator') +const ssri = require('ssri') + +// options passed to http-cache-semantics constructor +const policyOptions = { + shared: false, + ignoreCargoCult: true, +} + +// a fake empty response, used when only testing the +// request for storability +const emptyResponse = { status: 200, headers: {} } + +// returns a plain object representation of the Request +const requestObject = (request) => { + const _obj = { + method: request.method, + url: request.url, + headers: {}, + compress: request.compress, + } + + request.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +// returns a plain object representation of the Response +const responseObject = (response) => { + const _obj = { + status: response.status, + headers: {}, + } + + response.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +class CachePolicy { + constructor ({ entry, request, response, options }) { + this.entry = entry + this.request = requestObject(request) + this.response = responseObject(response) + this.options = options + this.policy = new CacheSemantics(this.request, this.response, policyOptions) + + if (this.entry) { + // if we have an entry, copy the timestamp to the _responseTime + // this is necessary because the CacheSemantics constructor forces + // the value to Date.now() which means a policy created from a + // cache entry is likely to always identify itself as stale + this.policy._responseTime = this.entry.metadata.time + } + } + + // static method to quickly determine if a request alone is storable + static storable (request, options) { + // no cachePath means no caching + if (!options.cachePath) { + return false + } + + // user explicitly asked not to cache + if (options.cache === 'no-store') { + return false + } + + // we only cache GET and HEAD requests + if (!['GET', 'HEAD'].includes(request.method)) { + return false + } + + // otherwise, let http-cache-semantics make the decision + // based on the request's headers + const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) + return policy.storable() + } + + // returns true if the policy satisfies the request + satisfies (request) { + const _req = requestObject(request) + if (this.request.headers.host !== _req.headers.host) { + return false + } + + if (this.request.compress !== _req.compress) { + return false + } + + const negotiatorA = new Negotiator(this.request) + const negotiatorB = new Negotiator(_req) + + if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { + return false + } + + if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { + return false + } + + if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { + return false + } + + if (this.options.integrity) { + return ssri.parse(this.options.integrity).match(this.entry.integrity) + } + + return true + } + + // returns true if the request and response allow caching + storable () { + return this.policy.storable() + } + + // NOTE: this is a hack to avoid parsing the cache-control + // header ourselves, it returns true if the response's + // cache-control contains must-revalidate + get mustRevalidate () { + return !!this.policy._rescc['must-revalidate'] + } + + // returns true if the cached response requires revalidation + // for the given request + needsRevalidation (request) { + const _req = requestObject(request) + // force method to GET because we only cache GETs + // but can serve a HEAD from a cached GET + _req.method = 'GET' + return !this.policy.satisfiesWithoutRevalidation(_req) + } + + responseHeaders () { + return this.policy.responseHeaders() + } + + // returns a new object containing the appropriate headers + // to send a revalidation request + revalidationHeaders (request) { + const _req = requestObject(request) + return this.policy.revalidationHeaders(_req) + } + + // returns true if the request/response was revalidated + // successfully. returns false if a new response was received + revalidated (request, response) { + const _req = requestObject(request) + const _res = responseObject(response) + const policy = this.policy.revalidatedPolicy(_req, _res) + return !policy.modified + } +} + +module.exports = CachePolicy diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/fetch.js new file mode 100644 index 0000000000000..233ba67e16550 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/fetch.js @@ -0,0 +1,118 @@ +'use strict' + +const { FetchError, Request, isRedirect } = require('minipass-fetch') +const url = require('url') + +const CachePolicy = require('./cache/policy.js') +const cache = require('./cache/index.js') +const remote = require('./remote.js') + +// given a Request, a Response and user options +// return true if the response is a redirect that +// can be followed. we throw errors that will result +// in the fetch being rejected if the redirect is +// possible but invalid for some reason +const canFollowRedirect = (request, response, options) => { + if (!isRedirect(response.status)) { + return false + } + + if (options.redirect === 'manual') { + return false + } + + if (options.redirect === 'error') { + throw new FetchError(`redirect mode is set to error: ${request.url}`, + 'no-redirect', { code: 'ENOREDIRECT' }) + } + + if (!response.headers.has('location')) { + throw new FetchError(`redirect location header missing for: ${request.url}`, + 'no-location', { code: 'EINVALIDREDIRECT' }) + } + + if (request.counter >= request.follow) { + throw new FetchError(`maximum redirect reached at: ${request.url}`, + 'max-redirect', { code: 'EMAXREDIRECT' }) + } + + return true +} + +// given a Request, a Response, and the user's options return an object +// with a new Request and a new options object that will be used for +// following the redirect +const getRedirect = (request, response, options) => { + const _opts = { ...options } + const location = response.headers.get('location') + const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) + // Comment below is used under the following license: + /** + * @license + * Copyright (c) 2010-2012 Mikeal Rogers + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an "AS + * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + + // Remove authorization if changing hostnames (but not if just + // changing ports or protocols). This matches the behavior of request: + // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 + if (new url.URL(request.url).hostname !== redirectUrl.hostname) { + request.headers.delete('authorization') + request.headers.delete('cookie') + } + + // for POST request with 301/302 response, or any request with 303 response, + // use GET when following redirect + if ( + response.status === 303 || + (request.method === 'POST' && [301, 302].includes(response.status)) + ) { + _opts.method = 'GET' + _opts.body = null + request.headers.delete('content-length') + } + + _opts.headers = {} + request.headers.forEach((value, key) => { + _opts.headers[key] = value + }) + + _opts.counter = ++request.counter + const redirectReq = new Request(url.format(redirectUrl), _opts) + return { + request: redirectReq, + options: _opts, + } +} + +const fetch = async (request, options) => { + const response = CachePolicy.storable(request, options) + ? await cache(request, options) + : await remote(request, options) + + // if the request wasn't a GET or HEAD, and the response + // status is between 200 and 399 inclusive, invalidate the + // request url + if (!['GET', 'HEAD'].includes(request.method) && + response.status >= 200 && + response.status <= 399) { + await cache.invalidate(request, options) + } + + if (!canFollowRedirect(request, response, options)) { + return response + } + + const redirect = getRedirect(request, response, options) + return fetch(redirect.request, redirect.options) +} + +module.exports = fetch diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/index.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/index.js new file mode 100644 index 0000000000000..2f12e8e1b6113 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/index.js @@ -0,0 +1,41 @@ +const { FetchError, Headers, Request, Response } = require('minipass-fetch') + +const configureOptions = require('./options.js') +const fetch = require('./fetch.js') + +const makeFetchHappen = (url, opts) => { + const options = configureOptions(opts) + + const request = new Request(url, options) + return fetch(request, options) +} + +makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => { + if (typeof defaultUrl === 'object') { + defaultOptions = defaultUrl + defaultUrl = null + } + + const defaultedFetch = (url, options = {}) => { + const finalUrl = url || defaultUrl + const finalOptions = { + ...defaultOptions, + ...options, + headers: { + ...defaultOptions.headers, + ...options.headers, + }, + } + return wrappedFetch(finalUrl, finalOptions) + } + + defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) => + makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch) + return defaultedFetch +} + +module.exports = makeFetchHappen +module.exports.FetchError = FetchError +module.exports.Headers = Headers +module.exports.Request = Request +module.exports.Response = Response diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/options.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/options.js new file mode 100644 index 0000000000000..f77511279f831 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/options.js @@ -0,0 +1,54 @@ +const dns = require('dns') + +const conditionalHeaders = [ + 'if-modified-since', + 'if-none-match', + 'if-unmodified-since', + 'if-match', + 'if-range', +] + +const configureOptions = (opts) => { + const { strictSSL, ...options } = { ...opts } + options.method = options.method ? options.method.toUpperCase() : 'GET' + options.rejectUnauthorized = strictSSL !== false + + if (!options.retry) { + options.retry = { retries: 0 } + } else if (typeof options.retry === 'string') { + const retries = parseInt(options.retry, 10) + if (isFinite(retries)) { + options.retry = { retries } + } else { + options.retry = { retries: 0 } + } + } else if (typeof options.retry === 'number') { + options.retry = { retries: options.retry } + } else { + options.retry = { retries: 0, ...options.retry } + } + + options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns } + + options.cache = options.cache || 'default' + if (options.cache === 'default') { + const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { + return conditionalHeaders.includes(name.toLowerCase()) + }) + if (hasConditionalHeader) { + options.cache = 'no-store' + } + } + + options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || [] + + // cacheManager is deprecated, but if it's set and + // cachePath is not we should copy it to the new field + if (options.cacheManager && !options.cachePath) { + options.cachePath = options.cacheManager + } + + return options +} + +module.exports = configureOptions diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/pipeline.js new file mode 100644 index 0000000000000..b1d221b2d0ce3 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/pipeline.js @@ -0,0 +1,41 @@ +'use strict' + +const MinipassPipeline = require('minipass-pipeline') + +class CachingMinipassPipeline extends MinipassPipeline { + #events = [] + #data = new Map() + + constructor (opts, ...streams) { + // CRITICAL: do NOT pass the streams to the call to super(), this will start + // the flow of data and potentially cause the events we need to catch to emit + // before we've finished our own setup. instead we call super() with no args, + // finish our setup, and then push the streams into ourselves to start the + // data flow + super() + this.#events = opts.events + + /* istanbul ignore next - coverage disabled because this is pointless to test here */ + if (streams.length) { + this.push(...streams) + } + } + + on (event, handler) { + if (this.#events.includes(event) && this.#data.has(event)) { + return handler(...this.#data.get(event)) + } + + return super.on(event, handler) + } + + emit (event, ...data) { + if (this.#events.includes(event)) { + this.#data.set(event, data) + } + + return super.emit(event, ...data) + } +} + +module.exports = CachingMinipassPipeline diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/remote.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/remote.js new file mode 100644 index 0000000000000..2aef9f8f969b0 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/remote.js @@ -0,0 +1,127 @@ +const { Minipass } = require('minipass') +const fetch = require('minipass-fetch') +const promiseRetry = require('promise-retry') +const ssri = require('ssri') + +const CachingMinipassPipeline = require('./pipeline.js') +const { getAgent } = require('@npmcli/agent') +const pkg = require('../package.json') + +const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` + +const RETRY_ERRORS = [ + 'ECONNRESET', // remote socket closed on us + 'ECONNREFUSED', // remote host refused to open connection + 'EADDRINUSE', // failed to bind to a local port (proxy?) + 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW + // from @npmcli/agent + 'ECONNECTIONTIMEOUT', + 'EIDLETIMEOUT', + 'ERESPONSETIMEOUT', + 'ETRANSFERTIMEOUT', + // Known codes we do NOT retry on: + // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) + // EINVALIDPROXY // invalid protocol from @npmcli/agent + // EINVALIDRESPONSE // invalid status code from @npmcli/agent +] + +const RETRY_TYPES = [ + 'request-timeout', +] + +// make a request directly to the remote source, +// retrying certain classes of errors as well as +// following redirects (through the cache if necessary) +// and verifying response integrity +const remoteFetch = (request, options) => { + const agent = getAgent(request.url, options) + if (!request.headers.has('connection')) { + request.headers.set('connection', agent ? 'keep-alive' : 'close') + } + + if (!request.headers.has('user-agent')) { + request.headers.set('user-agent', USER_AGENT) + } + + // keep our own options since we're overriding the agent + // and the redirect mode + const _opts = { + ...options, + agent, + redirect: 'manual', + } + + return promiseRetry(async (retryHandler, attemptNum) => { + const req = new fetch.Request(request, _opts) + try { + let res = await fetch(req, _opts) + if (_opts.integrity && res.status === 200) { + // we got a 200 response and the user has specified an expected + // integrity value, so wrap the response in an ssri stream to verify it + const integrityStream = ssri.integrityStream({ + algorithms: _opts.algorithms, + integrity: _opts.integrity, + size: _opts.size, + }) + const pipeline = new CachingMinipassPipeline({ + events: ['integrity', 'size'], + }, res.body, integrityStream) + // we also propagate the integrity and size events out to the pipeline so we can use + // this new response body as an integrityEmitter for cacache + integrityStream.on('integrity', i => pipeline.emit('integrity', i)) + integrityStream.on('size', s => pipeline.emit('size', s)) + res = new fetch.Response(pipeline, res) + // set an explicit flag so we know if our response body will emit integrity and size + res.body.hasIntegrityEmitter = true + } + + res.headers.set('x-fetch-attempts', attemptNum) + + // do not retry POST requests, or requests with a streaming body + // do retry requests with a 408, 420, 429 or 500+ status in the response + const isStream = Minipass.isStream(req.body) + const isRetriable = req.method !== 'POST' && + !isStream && + ([408, 420, 429].includes(res.status) || res.status >= 500) + + if (isRetriable) { + if (typeof options.onRetry === 'function') { + options.onRetry(res) + } + + return retryHandler(res) + } + + return res + } catch (err) { + const code = (err.code === 'EPROMISERETRY') + ? err.retried.code + : err.code + + // err.retried will be the thing that was thrown from above + // if it's a response, we just got a bad status code and we + // can re-throw to allow the retry + const isRetryError = err.retried instanceof fetch.Response || + (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) + + if (req.method === 'POST' || isRetryError) { + throw err + } + + if (typeof options.onRetry === 'function') { + options.onRetry(err) + } + + return retryHandler(err) + } + }, options.retry).catch((err) => { + // don't reject for http errors, just return them + if (err.status >= 400 && err.type !== 'system') { + return err + } + + throw err + }) +} + +module.exports = remoteFetch diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json new file mode 100644 index 0000000000000..419db8fbb1289 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json @@ -0,0 +1,80 @@ +{ + "name": "make-fetch-happen", + "version": "12.0.0", + "description": "Opinionated, caching, retrying fetch client", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "posttest": "npm run lint", + "eslint": "eslint", + "lint": "eslint \"**/*.js\"", + "lintfix": "npm run lint -- --fix", + "postlint": "template-oss-check", + "snap": "tap", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/make-fetch-happen.git" + }, + "keywords": [ + "http", + "request", + "fetch", + "mean girls", + "caching", + "cache", + "subresource integrity" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "@npmcli/agent": "^1.1.0", + "cacache": "^17.0.0", + "http-cache-semantics": "^4.1.1", + "is-lambda": "^1.0.1", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "promise-retry": "^2.0.1", + "ssri": "^10.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.18.0", + "nock": "^13.2.4", + "safe-buffer": "^5.2.1", + "standard-version": "^9.3.2", + "tap": "^16.0.0" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + }, + "tap": { + "color": 1, + "files": "test/*.js", + "check-coverage": true, + "timeout": 60, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ], + "version": "4.18.0", + "publish": "true" + } +} diff --git a/node_modules/npm-registry-fetch/node_modules/minipass/LICENSE b/node_modules/npm-registry-fetch/node_modules/minipass/LICENSE new file mode 100644 index 0000000000000..97f8e32ed82e4 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/minipass/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-registry-fetch/node_modules/minipass/dist/cjs/index.js b/node_modules/npm-registry-fetch/node_modules/minipass/dist/cjs/index.js new file mode 100644 index 0000000000000..068c095b69793 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/minipass/dist/cjs/index.js @@ -0,0 +1,1028 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0; +const proc = typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + }; +const node_events_1 = require("node:events"); +const node_stream_1 = __importDefault(require("node:stream")); +const node_string_decoder_1 = require("node:string_decoder"); +/** + * Return true if the argument is a Minipass stream, Node stream, or something + * else that Minipass can interact with. + */ +const isStream = (s) => !!s && + typeof s === 'object' && + (s instanceof Minipass || + s instanceof node_stream_1.default || + (0, exports.isReadable)(s) || + (0, exports.isWritable)(s)); +exports.isStream = isStream; +/** + * Return true if the argument is a valid {@link Minipass.Readable} + */ +const isReadable = (s) => !!s && + typeof s === 'object' && + s instanceof node_events_1.EventEmitter && + typeof s.pipe === 'function' && + // node core Writable streams have a pipe() method, but it throws + s.pipe !== node_stream_1.default.Writable.prototype.pipe; +exports.isReadable = isReadable; +/** + * Return true if the argument is a valid {@link Minipass.Writable} + */ +const isWritable = (s) => !!s && + typeof s === 'object' && + s instanceof node_events_1.EventEmitter && + typeof s.write === 'function' && + typeof s.end === 'function'; +exports.isWritable = isWritable; +const EOF = Symbol('EOF'); +const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); +const EMITTED_END = Symbol('emittedEnd'); +const EMITTING_END = Symbol('emittingEnd'); +const EMITTED_ERROR = Symbol('emittedError'); +const CLOSED = Symbol('closed'); +const READ = Symbol('read'); +const FLUSH = Symbol('flush'); +const FLUSHCHUNK = Symbol('flushChunk'); +const ENCODING = Symbol('encoding'); +const DECODER = Symbol('decoder'); +const FLOWING = Symbol('flowing'); +const PAUSED = Symbol('paused'); +const RESUME = Symbol('resume'); +const BUFFER = Symbol('buffer'); +const PIPES = Symbol('pipes'); +const BUFFERLENGTH = Symbol('bufferLength'); +const BUFFERPUSH = Symbol('bufferPush'); +const BUFFERSHIFT = Symbol('bufferShift'); +const OBJECTMODE = Symbol('objectMode'); +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed'); +// internal event when stream has an error +const ERROR = Symbol('error'); +const EMITDATA = Symbol('emitData'); +const EMITEND = Symbol('emitEnd'); +const EMITEND2 = Symbol('emitEnd2'); +const ASYNC = Symbol('async'); +const ABORT = Symbol('abort'); +const ABORTED = Symbol('aborted'); +const SIGNAL = Symbol('signal'); +const DATALISTENERS = Symbol('dataListeners'); +const DISCARDED = Symbol('discarded'); +const defer = (fn) => Promise.resolve().then(fn); +const nodefer = (fn) => fn(); +const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; +const isArrayBufferLike = (b) => b instanceof ArrayBuffer || + (!!b && + typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0); +const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); +/** + * Internal class representing a pipe to a destination stream. + * + * @internal + */ +class Pipe { + src; + dest; + opts; + ondrain; + constructor(src, dest, opts) { + this.src = src; + this.dest = dest; + this.opts = opts; + this.ondrain = () => src[RESUME](); + this.dest.on('drain', this.ondrain); + } + unpipe() { + this.dest.removeListener('drain', this.ondrain); + } + // only here for the prototype + /* c8 ignore start */ + proxyErrors(_er) { } + /* c8 ignore stop */ + end() { + this.unpipe(); + if (this.opts.end) + this.dest.end(); + } +} +/** + * Internal class representing a pipe to a destination stream where + * errors are proxied. + * + * @internal + */ +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors); + super.unpipe(); + } + constructor(src, dest, opts) { + super(src, dest, opts); + this.proxyErrors = er => dest.emit('error', er); + src.on('error', this.proxyErrors); + } +} +const isObjectModeOptions = (o) => !!o.objectMode; +const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; +/** + * Main export, the Minipass class + * + * `RType` is the type of data emitted, defaults to Buffer + * + * `WType` is the type of data to be written, if RType is buffer or string, + * then any {@link Minipass.ContiguousData} is allowed. + * + * `Events` is the set of event handler signatures that this object + * will emit, see {@link Minipass.Events} + */ +class Minipass extends node_events_1.EventEmitter { + [FLOWING] = false; + [PAUSED] = false; + [PIPES] = []; + [BUFFER] = []; + [OBJECTMODE]; + [ENCODING]; + [ASYNC]; + [DECODER]; + [EOF] = false; + [EMITTED_END] = false; + [EMITTING_END] = false; + [CLOSED] = false; + [EMITTED_ERROR] = null; + [BUFFERLENGTH] = 0; + [DESTROYED] = false; + [SIGNAL]; + [ABORTED] = false; + [DATALISTENERS] = 0; + [DISCARDED] = false; + /** + * true if the stream can be written + */ + writable = true; + /** + * true if the stream can be read + */ + readable = true; + /** + * If `RType` is Buffer, then options do not need to be provided. + * Otherwise, an options object must be provided to specify either + * {@link Minipass.SharedOptions.objectMode} or + * {@link Minipass.SharedOptions.encoding}, as appropriate. + */ + constructor(...args) { + const options = (args[0] || + {}); + super(); + if (options.objectMode && typeof options.encoding === 'string') { + throw new TypeError('Encoding and objectMode may not be used together'); + } + if (isObjectModeOptions(options)) { + this[OBJECTMODE] = true; + this[ENCODING] = null; + } + else if (isEncodingOptions(options)) { + this[ENCODING] = options.encoding; + this[OBJECTMODE] = false; + } + else { + this[OBJECTMODE] = false; + this[ENCODING] = null; + } + this[ASYNC] = !!options.async; + this[DECODER] = this[ENCODING] + ? new node_string_decoder_1.StringDecoder(this[ENCODING]) + : null; + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); + } + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); + } + const { signal } = options; + if (signal) { + this[SIGNAL] = signal; + if (signal.aborted) { + this[ABORT](); + } + else { + signal.addEventListener('abort', () => this[ABORT]()); + } + } + } + /** + * The amount of data stored in the buffer waiting to be read. + * + * For Buffer strings, this will be the total byte length. + * For string encoding streams, this will be the string character length, + * according to JavaScript's `string.length` logic. + * For objectMode streams, this is a count of the items waiting to be + * emitted. + */ + get bufferLength() { + return this[BUFFERLENGTH]; + } + /** + * The `BufferEncoding` currently in use, or `null` + */ + get encoding() { + return this[ENCODING]; + } + /** + * @deprecated - This is a read only property + */ + set encoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * @deprecated - Encoding may only be set at instantiation time + */ + setEncoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * True if this is an objectMode stream + */ + get objectMode() { + return this[OBJECTMODE]; + } + /** + * @deprecated - This is a read-only property + */ + set objectMode(_om) { + throw new Error('objectMode must be set at instantiation time'); + } + /** + * true if this is an async stream + */ + get ['async']() { + return this[ASYNC]; + } + /** + * Set to true to make this stream async. + * + * Once set, it cannot be unset, as this would potentially cause incorrect + * behavior. Ie, a sync stream can be made async, but an async stream + * cannot be safely made sync. + */ + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a; + } + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true; + this.emit('abort', this[SIGNAL]?.reason); + this.destroy(this[SIGNAL]?.reason); + } + /** + * True if the stream has been aborted. + */ + get aborted() { + return this[ABORTED]; + } + /** + * No-op setter. Stream aborted status is set via the AbortSignal provided + * in the constructor options. + */ + set aborted(_) { } + write(chunk, encoding, cb) { + if (this[ABORTED]) + return false; + if (this[EOF]) + throw new Error('write after end'); + if (this[DESTROYED]) { + this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); + return true; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (!encoding) + encoding = 'utf8'; + const fn = this[ASYNC] ? defer : nodefer; + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything is only allowed if in object mode, so throw + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); + } + else if (isArrayBufferLike(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk); + } + else if (typeof chunk !== 'string') { + throw new Error('Non-contiguous data written to non-objectMode stream'); + } + } + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + // maybe impossible? + /* c8 ignore start */ + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + /* c8 ignore stop */ + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { + //@ts-ignore - sinful unsafe type change + chunk = Buffer.from(chunk, encoding); + } + if (Buffer.isBuffer(chunk) && this[ENCODING]) { + //@ts-ignore - sinful unsafe type change + chunk = this[DECODER].write(chunk); + } + // Note: flushing CAN potentially switch us into not-flowing mode + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + /** + * Low-level explicit read method. + * + * In objectMode, the argument is ignored, and one item is returned if + * available. + * + * `n` is the number of bytes (or in the case of encoding streams, + * characters) to consume. If `n` is not provided, then the entire buffer + * is returned, or `null` is returned if no data is available. + * + * If `n` is greater that the amount of data in the internal buffer, + * then `null` is returned. + */ + read(n) { + if (this[DESTROYED]) + return null; + this[DISCARDED] = false; + if (this[BUFFERLENGTH] === 0 || + n === 0 || + (n && n > this[BUFFERLENGTH])) { + this[MAYBE_EMIT_END](); + return null; + } + if (this[OBJECTMODE]) + n = null; + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + // not object mode, so if we have an encoding, then RType is string + // otherwise, must be Buffer + this[BUFFER] = [ + (this[ENCODING] + ? this[BUFFER].join('') + : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), + ]; + } + const ret = this[READ](n || null, this[BUFFER][0]); + this[MAYBE_EMIT_END](); + return ret; + } + [READ](n, chunk) { + if (this[OBJECTMODE]) + this[BUFFERSHIFT](); + else { + const c = chunk; + if (n === c.length || n === null) + this[BUFFERSHIFT](); + else if (typeof c === 'string') { + this[BUFFER][0] = c.slice(n); + chunk = c.slice(0, n); + this[BUFFERLENGTH] -= n; + } + else { + this[BUFFER][0] = c.subarray(n); + chunk = c.subarray(0, n); + this[BUFFERLENGTH] -= n; + } + } + this.emit('data', chunk); + if (!this[BUFFER].length && !this[EOF]) + this.emit('drain'); + return chunk; + } + end(chunk, encoding, cb) { + if (typeof chunk === 'function') { + cb = chunk; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (chunk !== undefined) + this.write(chunk, encoding); + if (cb) + this.once('end', cb); + this[EOF] = true; + this.writable = false; + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this[FLOWING] || !this[PAUSED]) + this[MAYBE_EMIT_END](); + return this; + } + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) + return; + if (!this[DATALISTENERS] && !this[PIPES].length) { + this[DISCARDED] = true; + } + this[PAUSED] = false; + this[FLOWING] = true; + this.emit('resume'); + if (this[BUFFER].length) + this[FLUSH](); + else if (this[EOF]) + this[MAYBE_EMIT_END](); + else + this.emit('drain'); + } + /** + * Resume the stream if it is currently in a paused state + * + * If called when there are no pipe destinations or `data` event listeners, + * this will place the stream in a "discarded" state, where all data will + * be thrown away. The discarded state is removed if a pipe destination or + * data handler is added, if pause() is called, or if any synchronous or + * asynchronous iteration is started. + */ + resume() { + return this[RESUME](); + } + /** + * Pause the stream + */ + pause() { + this[FLOWING] = false; + this[PAUSED] = true; + this[DISCARDED] = false; + } + /** + * true if the stream has been forcibly destroyed + */ + get destroyed() { + return this[DESTROYED]; + } + /** + * true if the stream is currently in a flowing state, meaning that + * any writes will be immediately emitted. + */ + get flowing() { + return this[FLOWING]; + } + /** + * true if the stream is currently in a paused state + */ + get paused() { + return this[PAUSED]; + } + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1; + else + this[BUFFERLENGTH] += chunk.length; + this[BUFFER].push(chunk); + } + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1; + else + this[BUFFERLENGTH] -= this[BUFFER][0].length; + return this[BUFFER].shift(); + } + [FLUSH](noDrain = false) { + do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && + this[BUFFER].length); + if (!noDrain && !this[BUFFER].length && !this[EOF]) + this.emit('drain'); + } + [FLUSHCHUNK](chunk) { + this.emit('data', chunk); + return this[FLOWING]; + } + /** + * Pipe all data emitted by this stream into the destination provided. + * + * Triggers the flow of data. + */ + pipe(dest, opts) { + if (this[DESTROYED]) + return dest; + this[DISCARDED] = false; + const ended = this[EMITTED_END]; + opts = opts || {}; + if (dest === proc.stdout || dest === proc.stderr) + opts.end = false; + else + opts.end = opts.end !== false; + opts.proxyErrors = !!opts.proxyErrors; + // piping an ended stream ends immediately + if (ended) { + if (opts.end) + dest.end(); + } + else { + // "as" here just ignores the WType, which pipes don't care about, + // since they're only consuming from us, and writing to the dest + this[PIPES].push(!opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts)); + if (this[ASYNC]) + defer(() => this[RESUME]()); + else + this[RESUME](); + } + return dest; + } + /** + * Fully unhook a piped destination stream. + * + * If the destination stream was the only consumer of this stream (ie, + * there are no other piped destinations or `'data'` event listeners) + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest); + if (p) { + if (this[PIPES].length === 1) { + if (this[FLOWING] && this[DATALISTENERS] === 0) { + this[FLOWING] = false; + } + this[PIPES] = []; + } + else + this[PIPES].splice(this[PIPES].indexOf(p), 1); + p.unpipe(); + } + } + /** + * Alias for {@link Minipass#on} + */ + addListener(ev, handler) { + return this.on(ev, handler); + } + /** + * Mostly identical to `EventEmitter.on`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * - Adding a 'data' event handler will trigger the flow of data + * + * - Adding a 'readable' event handler when there is data waiting to be read + * will cause 'readable' to be emitted immediately. + * + * - Adding an 'endish' event handler ('end', 'finish', etc.) which has + * already passed will cause the event to be emitted immediately and all + * handlers removed. + * + * - Adding an 'error' event handler after an error has been emitted will + * cause the event to be re-emitted immediately with the error previously + * raised. + */ + on(ev, handler) { + const ret = super.on(ev, handler); + if (ev === 'data') { + this[DISCARDED] = false; + this[DATALISTENERS]++; + if (!this[PIPES].length && !this[FLOWING]) { + this[RESUME](); + } + } + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { + super.emit('readable'); + } + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev); + this.removeAllListeners(ev); + } + else if (ev === 'error' && this[EMITTED_ERROR]) { + const h = handler; + if (this[ASYNC]) + defer(() => h.call(this, this[EMITTED_ERROR])); + else + h.call(this, this[EMITTED_ERROR]); + } + return ret; + } + /** + * Alias for {@link Minipass#off} + */ + removeListener(ev, handler) { + return this.off(ev, handler); + } + /** + * Mostly identical to `EventEmitter.off` + * + * If a 'data' event handler is removed, and it was the last consumer + * (ie, there are no pipe destinations or other 'data' event listeners), + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + off(ev, handler) { + const ret = super.off(ev, handler); + // if we previously had listeners, and now we don't, and we don't + // have any pipes, then stop the flow, unless it's been explicitly + // put in a discarded flowing state via stream.resume(). + if (ev === 'data') { + this[DATALISTENERS] = this.listeners('data').length; + if (this[DATALISTENERS] === 0 && + !this[DISCARDED] && + !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * Mostly identical to `EventEmitter.removeAllListeners` + * + * If all 'data' event handlers are removed, and they were the last consumer + * (ie, there are no pipe destinations), then the flow of data will stop + * until there is another consumer or {@link Minipass#resume} is explicitly + * called. + */ + removeAllListeners(ev) { + const ret = super.removeAllListeners(ev); + if (ev === 'data' || ev === undefined) { + this[DATALISTENERS] = 0; + if (!this[DISCARDED] && !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * true if the 'end' event has been emitted + */ + get emittedEnd() { + return this[EMITTED_END]; + } + [MAYBE_EMIT_END]() { + if (!this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF]) { + this[EMITTING_END] = true; + this.emit('end'); + this.emit('prefinish'); + this.emit('finish'); + if (this[CLOSED]) + this.emit('close'); + this[EMITTING_END] = false; + } + } + /** + * Mostly identical to `EventEmitter.emit`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * If the stream has been destroyed, and the event is something other + * than 'close' or 'error', then `false` is returned and no handlers + * are called. + * + * If the event is 'end', and has already been emitted, then the event + * is ignored. If the stream is in a paused or non-flowing state, then + * the event will be deferred until data flow resumes. If the stream is + * async, then handlers will be called on the next tick rather than + * immediately. + * + * If the event is 'close', and 'end' has not yet been emitted, then + * the event will be deferred until after 'end' is emitted. + * + * If the event is 'error', and an AbortSignal was provided for the stream, + * and there are no listeners, then the event is ignored, matching the + * behavior of node core streams in the presense of an AbortSignal. + * + * If the event is 'finish' or 'prefinish', then all listeners will be + * removed after emitting the event, to prevent double-firing. + */ + emit(ev, ...args) { + const data = args[0]; + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && + ev !== 'close' && + ev !== DESTROYED && + this[DESTROYED]) { + return false; + } + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? (defer(() => this[EMITDATA](data)), true) + : this[EMITDATA](data); + } + else if (ev === 'end') { + return this[EMITEND](); + } + else if (ev === 'close') { + this[CLOSED] = true; + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) + return false; + const ret = super.emit('close'); + this.removeAllListeners('close'); + return ret; + } + else if (ev === 'error') { + this[EMITTED_ERROR] = data; + super.emit(ERROR, data); + const ret = !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false; + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'resume') { + const ret = super.emit('resume'); + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev); + this.removeAllListeners(ev); + return ret; + } + // Some other unknown event + const ret = super.emit(ev, ...args); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) + this.pause(); + } + const ret = this[DISCARDED] ? false : super.emit('data', data); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITEND]() { + if (this[EMITTED_END]) + return false; + this[EMITTED_END] = true; + this.readable = false; + return this[ASYNC] + ? (defer(() => this[EMITEND2]()), true) + : this[EMITEND2](); + } + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end(); + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data); + } + if (!this[DISCARDED]) + super.emit('data', data); + } + } + for (const p of this[PIPES]) { + p.end(); + } + const ret = super.emit('end'); + this.removeAllListeners('end'); + return ret; + } + /** + * Return a Promise that resolves to an array of all emitted data once + * the stream ends. + */ + async collect() { + const buf = Object.assign([], { + dataLength: 0, + }); + if (!this[OBJECTMODE]) + buf.dataLength = 0; + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise(); + this.on('data', c => { + buf.push(c); + if (!this[OBJECTMODE]) + buf.dataLength += c.length; + }); + await p; + return buf; + } + /** + * Return a Promise that resolves to the concatenation of all emitted data + * once the stream ends. + * + * Not allowed on objectMode streams. + */ + async concat() { + if (this[OBJECTMODE]) { + throw new Error('cannot concat in objectMode'); + } + const buf = await this.collect(); + return (this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength)); + } + /** + * Return a void Promise that resolves once the stream ends. + */ + async promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))); + this.on('error', er => reject(er)); + this.on('end', () => resolve()); + }); + } + /** + * Asynchronous `for await of` iteration. + * + * This will continue emitting all chunks until the stream terminates. + */ + [Symbol.asyncIterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = async () => { + this.pause(); + stopped = true; + return { value: undefined, done: true }; + }; + const next = () => { + if (stopped) + return stop(); + const res = this.read(); + if (res !== null) + return Promise.resolve({ done: false, value: res }); + if (this[EOF]) + return stop(); + let resolve; + let reject; + const onerr = (er) => { + this.off('data', ondata); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + stop(); + reject(er); + }; + const ondata = (value) => { + this.off('error', onerr); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + this.pause(); + resolve({ value, done: !!this[EOF] }); + }; + const onend = () => { + this.off('error', onerr); + this.off('data', ondata); + this.off(DESTROYED, ondestroy); + stop(); + resolve({ done: true, value: undefined }); + }; + const ondestroy = () => onerr(new Error('stream destroyed')); + return new Promise((res, rej) => { + reject = rej; + resolve = res; + this.once(DESTROYED, ondestroy); + this.once('error', onerr); + this.once('end', onend); + this.once('data', ondata); + }); + }; + return { + next, + throw: stop, + return: stop, + [Symbol.asyncIterator]() { + return this; + }, + }; + } + /** + * Synchronous `for of` iteration. + * + * The iteration will terminate when the internal buffer runs out, even + * if the stream has not yet terminated. + */ + [Symbol.iterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = () => { + this.pause(); + this.off(ERROR, stop); + this.off(DESTROYED, stop); + this.off('end', stop); + stopped = true; + return { done: true, value: undefined }; + }; + const next = () => { + if (stopped) + return stop(); + const value = this.read(); + return value === null ? stop() : { done: false, value }; + }; + this.once('end', stop); + this.once(ERROR, stop); + this.once(DESTROYED, stop); + return { + next, + throw: stop, + return: stop, + [Symbol.iterator]() { + return this; + }, + }; + } + /** + * Destroy a stream, preventing it from being used for any further purpose. + * + * If the stream has a `close()` method, then it will be called on + * destruction. + * + * After destruction, any attempt to write data, read data, or emit most + * events will be ignored. + * + * If an error argument is provided, then it will be emitted in an + * 'error' event. + */ + destroy(er) { + if (this[DESTROYED]) { + if (er) + this.emit('error', er); + else + this.emit(DESTROYED); + return this; + } + this[DESTROYED] = true; + this[DISCARDED] = true; + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0; + this[BUFFERLENGTH] = 0; + const wc = this; + if (typeof wc.close === 'function' && !this[CLOSED]) + wc.close(); + if (er) + this.emit('error', er); + // if no error to emit, still reject pending promises + else + this.emit(DESTROYED); + return this; + } + /** + * Alias for {@link isStream} + * + * Former export location, maintained for backwards compatibility. + * + * @deprecated + */ + static get isStream() { + return exports.isStream; + } +} +exports.Minipass = Minipass; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/npm-registry-fetch/node_modules/minipass/dist/cjs/package.json b/node_modules/npm-registry-fetch/node_modules/minipass/dist/cjs/package.json new file mode 100644 index 0000000000000..5bbefffbabee3 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/minipass/dist/cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/npm-registry-fetch/node_modules/minipass/dist/mjs/index.js b/node_modules/npm-registry-fetch/node_modules/minipass/dist/mjs/index.js new file mode 100644 index 0000000000000..b5fa4513c9083 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/minipass/dist/mjs/index.js @@ -0,0 +1,1018 @@ +const proc = typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + }; +import { EventEmitter } from 'node:events'; +import Stream from 'node:stream'; +import { StringDecoder } from 'node:string_decoder'; +/** + * Return true if the argument is a Minipass stream, Node stream, or something + * else that Minipass can interact with. + */ +export const isStream = (s) => !!s && + typeof s === 'object' && + (s instanceof Minipass || + s instanceof Stream || + isReadable(s) || + isWritable(s)); +/** + * Return true if the argument is a valid {@link Minipass.Readable} + */ +export const isReadable = (s) => !!s && + typeof s === 'object' && + s instanceof EventEmitter && + typeof s.pipe === 'function' && + // node core Writable streams have a pipe() method, but it throws + s.pipe !== Stream.Writable.prototype.pipe; +/** + * Return true if the argument is a valid {@link Minipass.Writable} + */ +export const isWritable = (s) => !!s && + typeof s === 'object' && + s instanceof EventEmitter && + typeof s.write === 'function' && + typeof s.end === 'function'; +const EOF = Symbol('EOF'); +const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); +const EMITTED_END = Symbol('emittedEnd'); +const EMITTING_END = Symbol('emittingEnd'); +const EMITTED_ERROR = Symbol('emittedError'); +const CLOSED = Symbol('closed'); +const READ = Symbol('read'); +const FLUSH = Symbol('flush'); +const FLUSHCHUNK = Symbol('flushChunk'); +const ENCODING = Symbol('encoding'); +const DECODER = Symbol('decoder'); +const FLOWING = Symbol('flowing'); +const PAUSED = Symbol('paused'); +const RESUME = Symbol('resume'); +const BUFFER = Symbol('buffer'); +const PIPES = Symbol('pipes'); +const BUFFERLENGTH = Symbol('bufferLength'); +const BUFFERPUSH = Symbol('bufferPush'); +const BUFFERSHIFT = Symbol('bufferShift'); +const OBJECTMODE = Symbol('objectMode'); +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed'); +// internal event when stream has an error +const ERROR = Symbol('error'); +const EMITDATA = Symbol('emitData'); +const EMITEND = Symbol('emitEnd'); +const EMITEND2 = Symbol('emitEnd2'); +const ASYNC = Symbol('async'); +const ABORT = Symbol('abort'); +const ABORTED = Symbol('aborted'); +const SIGNAL = Symbol('signal'); +const DATALISTENERS = Symbol('dataListeners'); +const DISCARDED = Symbol('discarded'); +const defer = (fn) => Promise.resolve().then(fn); +const nodefer = (fn) => fn(); +const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; +const isArrayBufferLike = (b) => b instanceof ArrayBuffer || + (!!b && + typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0); +const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); +/** + * Internal class representing a pipe to a destination stream. + * + * @internal + */ +class Pipe { + src; + dest; + opts; + ondrain; + constructor(src, dest, opts) { + this.src = src; + this.dest = dest; + this.opts = opts; + this.ondrain = () => src[RESUME](); + this.dest.on('drain', this.ondrain); + } + unpipe() { + this.dest.removeListener('drain', this.ondrain); + } + // only here for the prototype + /* c8 ignore start */ + proxyErrors(_er) { } + /* c8 ignore stop */ + end() { + this.unpipe(); + if (this.opts.end) + this.dest.end(); + } +} +/** + * Internal class representing a pipe to a destination stream where + * errors are proxied. + * + * @internal + */ +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors); + super.unpipe(); + } + constructor(src, dest, opts) { + super(src, dest, opts); + this.proxyErrors = er => dest.emit('error', er); + src.on('error', this.proxyErrors); + } +} +const isObjectModeOptions = (o) => !!o.objectMode; +const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; +/** + * Main export, the Minipass class + * + * `RType` is the type of data emitted, defaults to Buffer + * + * `WType` is the type of data to be written, if RType is buffer or string, + * then any {@link Minipass.ContiguousData} is allowed. + * + * `Events` is the set of event handler signatures that this object + * will emit, see {@link Minipass.Events} + */ +export class Minipass extends EventEmitter { + [FLOWING] = false; + [PAUSED] = false; + [PIPES] = []; + [BUFFER] = []; + [OBJECTMODE]; + [ENCODING]; + [ASYNC]; + [DECODER]; + [EOF] = false; + [EMITTED_END] = false; + [EMITTING_END] = false; + [CLOSED] = false; + [EMITTED_ERROR] = null; + [BUFFERLENGTH] = 0; + [DESTROYED] = false; + [SIGNAL]; + [ABORTED] = false; + [DATALISTENERS] = 0; + [DISCARDED] = false; + /** + * true if the stream can be written + */ + writable = true; + /** + * true if the stream can be read + */ + readable = true; + /** + * If `RType` is Buffer, then options do not need to be provided. + * Otherwise, an options object must be provided to specify either + * {@link Minipass.SharedOptions.objectMode} or + * {@link Minipass.SharedOptions.encoding}, as appropriate. + */ + constructor(...args) { + const options = (args[0] || + {}); + super(); + if (options.objectMode && typeof options.encoding === 'string') { + throw new TypeError('Encoding and objectMode may not be used together'); + } + if (isObjectModeOptions(options)) { + this[OBJECTMODE] = true; + this[ENCODING] = null; + } + else if (isEncodingOptions(options)) { + this[ENCODING] = options.encoding; + this[OBJECTMODE] = false; + } + else { + this[OBJECTMODE] = false; + this[ENCODING] = null; + } + this[ASYNC] = !!options.async; + this[DECODER] = this[ENCODING] + ? new StringDecoder(this[ENCODING]) + : null; + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); + } + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); + } + const { signal } = options; + if (signal) { + this[SIGNAL] = signal; + if (signal.aborted) { + this[ABORT](); + } + else { + signal.addEventListener('abort', () => this[ABORT]()); + } + } + } + /** + * The amount of data stored in the buffer waiting to be read. + * + * For Buffer strings, this will be the total byte length. + * For string encoding streams, this will be the string character length, + * according to JavaScript's `string.length` logic. + * For objectMode streams, this is a count of the items waiting to be + * emitted. + */ + get bufferLength() { + return this[BUFFERLENGTH]; + } + /** + * The `BufferEncoding` currently in use, or `null` + */ + get encoding() { + return this[ENCODING]; + } + /** + * @deprecated - This is a read only property + */ + set encoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * @deprecated - Encoding may only be set at instantiation time + */ + setEncoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * True if this is an objectMode stream + */ + get objectMode() { + return this[OBJECTMODE]; + } + /** + * @deprecated - This is a read-only property + */ + set objectMode(_om) { + throw new Error('objectMode must be set at instantiation time'); + } + /** + * true if this is an async stream + */ + get ['async']() { + return this[ASYNC]; + } + /** + * Set to true to make this stream async. + * + * Once set, it cannot be unset, as this would potentially cause incorrect + * behavior. Ie, a sync stream can be made async, but an async stream + * cannot be safely made sync. + */ + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a; + } + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true; + this.emit('abort', this[SIGNAL]?.reason); + this.destroy(this[SIGNAL]?.reason); + } + /** + * True if the stream has been aborted. + */ + get aborted() { + return this[ABORTED]; + } + /** + * No-op setter. Stream aborted status is set via the AbortSignal provided + * in the constructor options. + */ + set aborted(_) { } + write(chunk, encoding, cb) { + if (this[ABORTED]) + return false; + if (this[EOF]) + throw new Error('write after end'); + if (this[DESTROYED]) { + this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); + return true; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (!encoding) + encoding = 'utf8'; + const fn = this[ASYNC] ? defer : nodefer; + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything is only allowed if in object mode, so throw + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); + } + else if (isArrayBufferLike(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk); + } + else if (typeof chunk !== 'string') { + throw new Error('Non-contiguous data written to non-objectMode stream'); + } + } + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + // maybe impossible? + /* c8 ignore start */ + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + /* c8 ignore stop */ + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { + //@ts-ignore - sinful unsafe type change + chunk = Buffer.from(chunk, encoding); + } + if (Buffer.isBuffer(chunk) && this[ENCODING]) { + //@ts-ignore - sinful unsafe type change + chunk = this[DECODER].write(chunk); + } + // Note: flushing CAN potentially switch us into not-flowing mode + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + /** + * Low-level explicit read method. + * + * In objectMode, the argument is ignored, and one item is returned if + * available. + * + * `n` is the number of bytes (or in the case of encoding streams, + * characters) to consume. If `n` is not provided, then the entire buffer + * is returned, or `null` is returned if no data is available. + * + * If `n` is greater that the amount of data in the internal buffer, + * then `null` is returned. + */ + read(n) { + if (this[DESTROYED]) + return null; + this[DISCARDED] = false; + if (this[BUFFERLENGTH] === 0 || + n === 0 || + (n && n > this[BUFFERLENGTH])) { + this[MAYBE_EMIT_END](); + return null; + } + if (this[OBJECTMODE]) + n = null; + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + // not object mode, so if we have an encoding, then RType is string + // otherwise, must be Buffer + this[BUFFER] = [ + (this[ENCODING] + ? this[BUFFER].join('') + : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), + ]; + } + const ret = this[READ](n || null, this[BUFFER][0]); + this[MAYBE_EMIT_END](); + return ret; + } + [READ](n, chunk) { + if (this[OBJECTMODE]) + this[BUFFERSHIFT](); + else { + const c = chunk; + if (n === c.length || n === null) + this[BUFFERSHIFT](); + else if (typeof c === 'string') { + this[BUFFER][0] = c.slice(n); + chunk = c.slice(0, n); + this[BUFFERLENGTH] -= n; + } + else { + this[BUFFER][0] = c.subarray(n); + chunk = c.subarray(0, n); + this[BUFFERLENGTH] -= n; + } + } + this.emit('data', chunk); + if (!this[BUFFER].length && !this[EOF]) + this.emit('drain'); + return chunk; + } + end(chunk, encoding, cb) { + if (typeof chunk === 'function') { + cb = chunk; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (chunk !== undefined) + this.write(chunk, encoding); + if (cb) + this.once('end', cb); + this[EOF] = true; + this.writable = false; + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this[FLOWING] || !this[PAUSED]) + this[MAYBE_EMIT_END](); + return this; + } + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) + return; + if (!this[DATALISTENERS] && !this[PIPES].length) { + this[DISCARDED] = true; + } + this[PAUSED] = false; + this[FLOWING] = true; + this.emit('resume'); + if (this[BUFFER].length) + this[FLUSH](); + else if (this[EOF]) + this[MAYBE_EMIT_END](); + else + this.emit('drain'); + } + /** + * Resume the stream if it is currently in a paused state + * + * If called when there are no pipe destinations or `data` event listeners, + * this will place the stream in a "discarded" state, where all data will + * be thrown away. The discarded state is removed if a pipe destination or + * data handler is added, if pause() is called, or if any synchronous or + * asynchronous iteration is started. + */ + resume() { + return this[RESUME](); + } + /** + * Pause the stream + */ + pause() { + this[FLOWING] = false; + this[PAUSED] = true; + this[DISCARDED] = false; + } + /** + * true if the stream has been forcibly destroyed + */ + get destroyed() { + return this[DESTROYED]; + } + /** + * true if the stream is currently in a flowing state, meaning that + * any writes will be immediately emitted. + */ + get flowing() { + return this[FLOWING]; + } + /** + * true if the stream is currently in a paused state + */ + get paused() { + return this[PAUSED]; + } + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1; + else + this[BUFFERLENGTH] += chunk.length; + this[BUFFER].push(chunk); + } + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1; + else + this[BUFFERLENGTH] -= this[BUFFER][0].length; + return this[BUFFER].shift(); + } + [FLUSH](noDrain = false) { + do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && + this[BUFFER].length); + if (!noDrain && !this[BUFFER].length && !this[EOF]) + this.emit('drain'); + } + [FLUSHCHUNK](chunk) { + this.emit('data', chunk); + return this[FLOWING]; + } + /** + * Pipe all data emitted by this stream into the destination provided. + * + * Triggers the flow of data. + */ + pipe(dest, opts) { + if (this[DESTROYED]) + return dest; + this[DISCARDED] = false; + const ended = this[EMITTED_END]; + opts = opts || {}; + if (dest === proc.stdout || dest === proc.stderr) + opts.end = false; + else + opts.end = opts.end !== false; + opts.proxyErrors = !!opts.proxyErrors; + // piping an ended stream ends immediately + if (ended) { + if (opts.end) + dest.end(); + } + else { + // "as" here just ignores the WType, which pipes don't care about, + // since they're only consuming from us, and writing to the dest + this[PIPES].push(!opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts)); + if (this[ASYNC]) + defer(() => this[RESUME]()); + else + this[RESUME](); + } + return dest; + } + /** + * Fully unhook a piped destination stream. + * + * If the destination stream was the only consumer of this stream (ie, + * there are no other piped destinations or `'data'` event listeners) + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest); + if (p) { + if (this[PIPES].length === 1) { + if (this[FLOWING] && this[DATALISTENERS] === 0) { + this[FLOWING] = false; + } + this[PIPES] = []; + } + else + this[PIPES].splice(this[PIPES].indexOf(p), 1); + p.unpipe(); + } + } + /** + * Alias for {@link Minipass#on} + */ + addListener(ev, handler) { + return this.on(ev, handler); + } + /** + * Mostly identical to `EventEmitter.on`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * - Adding a 'data' event handler will trigger the flow of data + * + * - Adding a 'readable' event handler when there is data waiting to be read + * will cause 'readable' to be emitted immediately. + * + * - Adding an 'endish' event handler ('end', 'finish', etc.) which has + * already passed will cause the event to be emitted immediately and all + * handlers removed. + * + * - Adding an 'error' event handler after an error has been emitted will + * cause the event to be re-emitted immediately with the error previously + * raised. + */ + on(ev, handler) { + const ret = super.on(ev, handler); + if (ev === 'data') { + this[DISCARDED] = false; + this[DATALISTENERS]++; + if (!this[PIPES].length && !this[FLOWING]) { + this[RESUME](); + } + } + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { + super.emit('readable'); + } + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev); + this.removeAllListeners(ev); + } + else if (ev === 'error' && this[EMITTED_ERROR]) { + const h = handler; + if (this[ASYNC]) + defer(() => h.call(this, this[EMITTED_ERROR])); + else + h.call(this, this[EMITTED_ERROR]); + } + return ret; + } + /** + * Alias for {@link Minipass#off} + */ + removeListener(ev, handler) { + return this.off(ev, handler); + } + /** + * Mostly identical to `EventEmitter.off` + * + * If a 'data' event handler is removed, and it was the last consumer + * (ie, there are no pipe destinations or other 'data' event listeners), + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + off(ev, handler) { + const ret = super.off(ev, handler); + // if we previously had listeners, and now we don't, and we don't + // have any pipes, then stop the flow, unless it's been explicitly + // put in a discarded flowing state via stream.resume(). + if (ev === 'data') { + this[DATALISTENERS] = this.listeners('data').length; + if (this[DATALISTENERS] === 0 && + !this[DISCARDED] && + !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * Mostly identical to `EventEmitter.removeAllListeners` + * + * If all 'data' event handlers are removed, and they were the last consumer + * (ie, there are no pipe destinations), then the flow of data will stop + * until there is another consumer or {@link Minipass#resume} is explicitly + * called. + */ + removeAllListeners(ev) { + const ret = super.removeAllListeners(ev); + if (ev === 'data' || ev === undefined) { + this[DATALISTENERS] = 0; + if (!this[DISCARDED] && !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * true if the 'end' event has been emitted + */ + get emittedEnd() { + return this[EMITTED_END]; + } + [MAYBE_EMIT_END]() { + if (!this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF]) { + this[EMITTING_END] = true; + this.emit('end'); + this.emit('prefinish'); + this.emit('finish'); + if (this[CLOSED]) + this.emit('close'); + this[EMITTING_END] = false; + } + } + /** + * Mostly identical to `EventEmitter.emit`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * If the stream has been destroyed, and the event is something other + * than 'close' or 'error', then `false` is returned and no handlers + * are called. + * + * If the event is 'end', and has already been emitted, then the event + * is ignored. If the stream is in a paused or non-flowing state, then + * the event will be deferred until data flow resumes. If the stream is + * async, then handlers will be called on the next tick rather than + * immediately. + * + * If the event is 'close', and 'end' has not yet been emitted, then + * the event will be deferred until after 'end' is emitted. + * + * If the event is 'error', and an AbortSignal was provided for the stream, + * and there are no listeners, then the event is ignored, matching the + * behavior of node core streams in the presense of an AbortSignal. + * + * If the event is 'finish' or 'prefinish', then all listeners will be + * removed after emitting the event, to prevent double-firing. + */ + emit(ev, ...args) { + const data = args[0]; + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && + ev !== 'close' && + ev !== DESTROYED && + this[DESTROYED]) { + return false; + } + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? (defer(() => this[EMITDATA](data)), true) + : this[EMITDATA](data); + } + else if (ev === 'end') { + return this[EMITEND](); + } + else if (ev === 'close') { + this[CLOSED] = true; + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) + return false; + const ret = super.emit('close'); + this.removeAllListeners('close'); + return ret; + } + else if (ev === 'error') { + this[EMITTED_ERROR] = data; + super.emit(ERROR, data); + const ret = !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false; + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'resume') { + const ret = super.emit('resume'); + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev); + this.removeAllListeners(ev); + return ret; + } + // Some other unknown event + const ret = super.emit(ev, ...args); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) + this.pause(); + } + const ret = this[DISCARDED] ? false : super.emit('data', data); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITEND]() { + if (this[EMITTED_END]) + return false; + this[EMITTED_END] = true; + this.readable = false; + return this[ASYNC] + ? (defer(() => this[EMITEND2]()), true) + : this[EMITEND2](); + } + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end(); + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data); + } + if (!this[DISCARDED]) + super.emit('data', data); + } + } + for (const p of this[PIPES]) { + p.end(); + } + const ret = super.emit('end'); + this.removeAllListeners('end'); + return ret; + } + /** + * Return a Promise that resolves to an array of all emitted data once + * the stream ends. + */ + async collect() { + const buf = Object.assign([], { + dataLength: 0, + }); + if (!this[OBJECTMODE]) + buf.dataLength = 0; + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise(); + this.on('data', c => { + buf.push(c); + if (!this[OBJECTMODE]) + buf.dataLength += c.length; + }); + await p; + return buf; + } + /** + * Return a Promise that resolves to the concatenation of all emitted data + * once the stream ends. + * + * Not allowed on objectMode streams. + */ + async concat() { + if (this[OBJECTMODE]) { + throw new Error('cannot concat in objectMode'); + } + const buf = await this.collect(); + return (this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength)); + } + /** + * Return a void Promise that resolves once the stream ends. + */ + async promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))); + this.on('error', er => reject(er)); + this.on('end', () => resolve()); + }); + } + /** + * Asynchronous `for await of` iteration. + * + * This will continue emitting all chunks until the stream terminates. + */ + [Symbol.asyncIterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = async () => { + this.pause(); + stopped = true; + return { value: undefined, done: true }; + }; + const next = () => { + if (stopped) + return stop(); + const res = this.read(); + if (res !== null) + return Promise.resolve({ done: false, value: res }); + if (this[EOF]) + return stop(); + let resolve; + let reject; + const onerr = (er) => { + this.off('data', ondata); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + stop(); + reject(er); + }; + const ondata = (value) => { + this.off('error', onerr); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + this.pause(); + resolve({ value, done: !!this[EOF] }); + }; + const onend = () => { + this.off('error', onerr); + this.off('data', ondata); + this.off(DESTROYED, ondestroy); + stop(); + resolve({ done: true, value: undefined }); + }; + const ondestroy = () => onerr(new Error('stream destroyed')); + return new Promise((res, rej) => { + reject = rej; + resolve = res; + this.once(DESTROYED, ondestroy); + this.once('error', onerr); + this.once('end', onend); + this.once('data', ondata); + }); + }; + return { + next, + throw: stop, + return: stop, + [Symbol.asyncIterator]() { + return this; + }, + }; + } + /** + * Synchronous `for of` iteration. + * + * The iteration will terminate when the internal buffer runs out, even + * if the stream has not yet terminated. + */ + [Symbol.iterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = () => { + this.pause(); + this.off(ERROR, stop); + this.off(DESTROYED, stop); + this.off('end', stop); + stopped = true; + return { done: true, value: undefined }; + }; + const next = () => { + if (stopped) + return stop(); + const value = this.read(); + return value === null ? stop() : { done: false, value }; + }; + this.once('end', stop); + this.once(ERROR, stop); + this.once(DESTROYED, stop); + return { + next, + throw: stop, + return: stop, + [Symbol.iterator]() { + return this; + }, + }; + } + /** + * Destroy a stream, preventing it from being used for any further purpose. + * + * If the stream has a `close()` method, then it will be called on + * destruction. + * + * After destruction, any attempt to write data, read data, or emit most + * events will be ignored. + * + * If an error argument is provided, then it will be emitted in an + * 'error' event. + */ + destroy(er) { + if (this[DESTROYED]) { + if (er) + this.emit('error', er); + else + this.emit(DESTROYED); + return this; + } + this[DESTROYED] = true; + this[DISCARDED] = true; + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0; + this[BUFFERLENGTH] = 0; + const wc = this; + if (typeof wc.close === 'function' && !this[CLOSED]) + wc.close(); + if (er) + this.emit('error', er); + // if no error to emit, still reject pending promises + else + this.emit(DESTROYED); + return this; + } + /** + * Alias for {@link isStream} + * + * Former export location, maintained for backwards compatibility. + * + * @deprecated + */ + static get isStream() { + return isStream; + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/npm-registry-fetch/node_modules/minipass/dist/mjs/package.json b/node_modules/npm-registry-fetch/node_modules/minipass/dist/mjs/package.json new file mode 100644 index 0000000000000..3dbc1ca591c05 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/minipass/dist/mjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/npm-registry-fetch/node_modules/minipass/package.json b/node_modules/npm-registry-fetch/node_modules/minipass/package.json new file mode 100644 index 0000000000000..355501c0a10c1 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/minipass/package.json @@ -0,0 +1,82 @@ +{ + "name": "minipass", + "version": "7.0.2", + "description": "minimal implementation of a PassThrough stream", + "main": "./dist/cjs/index.js", + "module": "./dist/mjs/index.js", + "types": "./dist/cjs/index.js", + "exports": { + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + }, + "./package.json": "./package.json" + }, + "files": [ + "dist" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" + }, + "tap": { + "coverage": false, + "node-arg": [ + "--enable-source-maps", + "--no-warnings", + "--loader", + "ts-node/esm" + ], + "ts": false + }, + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "devDependencies": { + "@types/node": "^20.1.2", + "@types/tap": "^15.0.8", + "c8": "^7.13.0", + "prettier": "^2.6.2", + "tap": "^16.3.0", + "ts-node": "^10.9.1", + "typedoc": "^0.24.8", + "typescript": "^5.1.3", + "end-of-stream": "^1.4.0", + "node-abort-controller": "^3.1.1", + "sync-content": "^1.0.2", + "through2": "^2.0.3" + }, + "repository": "https://github.com/isaacs/minipass", + "keywords": [ + "passthrough", + "stream" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } +} diff --git a/node_modules/npm-registry-fetch/package.json b/node_modules/npm-registry-fetch/package.json index 63a44725886cc..8832c8a2e95d3 100644 --- a/node_modules/npm-registry-fetch/package.json +++ b/node_modules/npm-registry-fetch/package.json @@ -1,6 +1,6 @@ { "name": "npm-registry-fetch", - "version": "14.0.5", + "version": "15.0.0", "description": "Fetch-based http client for use with npm registry APIs", "main": "lib", "files": [ @@ -31,8 +31,8 @@ "author": "GitHub Inc.", "license": "ISC", "dependencies": { - "make-fetch-happen": "^11.0.0", - "minipass": "^5.0.0", + "make-fetch-happen": "^12.0.0", + "minipass": "^7.0.2", "minipass-fetch": "^3.0.0", "minipass-json-stream": "^1.0.1", "minizlib": "^2.1.2", @@ -41,7 +41,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/template-oss": "4.18.0", "cacache": "^17.0.0", "nock": "^13.2.4", "require-inject": "^1.4.4", @@ -57,11 +57,17 @@ ] }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ], + "version": "4.18.0", "publish": "true" } } diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md b/node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md new file mode 100644 index 0000000000000..5fc208ff122e0 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md @@ -0,0 +1,20 @@ + + +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js new file mode 100644 index 0000000000000..870ce0d923cd0 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js @@ -0,0 +1,145 @@ +'use strict' +const fs = require('fs') +const npa = require('npm-package-arg') +const { URL } = require('url') + +// Find the longest registry key that is used for some kind of auth +// in the options. +const regKeyFromURI = (uri, opts) => { + const parsed = new URL(uri) + // try to find a config key indicating we have auth for this registry + // can be one of :_authToken, :_auth, :_password and :username, or + // :certfile and :keyfile + // We walk up the "path" until we're left with just //[:], + // stopping when we reach '//'. + let regKey = `//${parsed.host}${parsed.pathname}` + while (regKey.length > '//'.length) { + // got some auth for this URI + if (hasAuth(regKey, opts)) { + return regKey + } + + // can be either //host/some/path/:_auth or //host/some/path:_auth + // walk up by removing EITHER what's after the slash OR the slash itself + regKey = regKey.replace(/([^/]+|\/)$/, '') + } +} + +const hasAuth = (regKey, opts) => ( + opts[`${regKey}:_authToken`] || + opts[`${regKey}:_auth`] || + opts[`${regKey}:username`] && opts[`${regKey}:_password`] || + opts[`${regKey}:certfile`] && opts[`${regKey}:keyfile`] +) + +const sameHost = (a, b) => { + const parsedA = new URL(a) + const parsedB = new URL(b) + return parsedA.host === parsedB.host +} + +const getRegistry = opts => { + const { spec } = opts + const { scope: specScope, subSpec } = spec ? npa(spec) : {} + const subSpecScope = subSpec && subSpec.scope + const scope = subSpec ? subSpecScope : specScope + const scopeReg = scope && opts[`${scope}:registry`] + return scopeReg || opts.registry +} + +const maybeReadFile = file => { + try { + return fs.readFileSync(file, 'utf8') + } catch (er) { + if (er.code !== 'ENOENT') { + throw er + } + return null + } +} + +const getAuth = (uri, opts = {}) => { + const { forceAuth } = opts + if (!uri) { + throw new Error('URI is required') + } + const regKey = regKeyFromURI(uri, forceAuth || opts) + + // we are only allowed to use what's in forceAuth if specified + if (forceAuth && !regKey) { + return new Auth({ + scopeAuthKey: null, + token: forceAuth._authToken || forceAuth.token, + username: forceAuth.username, + password: forceAuth._password || forceAuth.password, + auth: forceAuth._auth || forceAuth.auth, + certfile: forceAuth.certfile, + keyfile: forceAuth.keyfile, + }) + } + + // no auth for this URI, but might have it for the registry + if (!regKey) { + const registry = getRegistry(opts) + if (registry && uri !== registry && sameHost(uri, registry)) { + return getAuth(registry, opts) + } else if (registry !== opts.registry) { + // If making a tarball request to a different base URI than the + // registry where we logged in, but the same auth SHOULD be sent + // to that artifact host, then we track where it was coming in from, + // and warn the user if we get a 4xx error on it. + const scopeAuthKey = regKeyFromURI(registry, opts) + return new Auth({ scopeAuthKey }) + } + } + + const { + [`${regKey}:_authToken`]: token, + [`${regKey}:username`]: username, + [`${regKey}:_password`]: password, + [`${regKey}:_auth`]: auth, + [`${regKey}:certfile`]: certfile, + [`${regKey}:keyfile`]: keyfile, + } = opts + + return new Auth({ + scopeAuthKey: null, + token, + auth, + username, + password, + certfile, + keyfile, + }) +} + +class Auth { + constructor ({ token, auth, username, password, scopeAuthKey, certfile, keyfile }) { + this.scopeAuthKey = scopeAuthKey + this.token = null + this.auth = null + this.isBasicAuth = false + this.cert = null + this.key = null + if (token) { + this.token = token + } else if (auth) { + this.auth = auth + } else if (username && password) { + const p = Buffer.from(password, 'base64').toString('utf8') + this.auth = Buffer.from(`${username}:${p}`, 'utf8').toString('base64') + this.isBasicAuth = true + } + // mTLS may be used in conjunction with another auth method above + if (certfile && keyfile) { + const cert = maybeReadFile(certfile, 'utf-8') + const key = maybeReadFile(keyfile, 'utf-8') + if (cert && key) { + this.cert = cert + this.key = key + } + } + } +} + +module.exports = getAuth diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js new file mode 100644 index 0000000000000..066ac3c32420f --- /dev/null +++ b/node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js @@ -0,0 +1,100 @@ +'use strict' + +const errors = require('./errors.js') +const { Response } = require('minipass-fetch') +const defaultOpts = require('./default-opts.js') +const log = require('proc-log') +const cleanUrl = require('./clean-url.js') + +/* eslint-disable-next-line max-len */ +const moreInfoUrl = 'https://github.com/npm/cli/wiki/No-auth-for-URI,-but-auth-present-for-scoped-registry' +const checkResponse = + async ({ method, uri, res, startTime, auth, opts }) => { + opts = { ...defaultOpts, ...opts } + if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) { + log.notice('', res.headers.get('npm-notice')) + } + + if (res.status >= 400) { + logRequest(method, res, startTime) + if (auth && auth.scopeAuthKey && !auth.token && !auth.auth) { + // we didn't have auth for THIS request, but we do have auth for + // requests to the registry indicated by the spec's scope value. + // Warn the user. + log.warn('registry', `No auth for URI, but auth present for scoped registry. + +URI: ${uri} +Scoped Registry Key: ${auth.scopeAuthKey} + +More info here: ${moreInfoUrl}`) + } + return checkErrors(method, res, startTime, opts) + } else { + res.body.on('end', () => logRequest(method, res, startTime, opts)) + if (opts.ignoreBody) { + res.body.resume() + return new Response(null, res) + } + return res + } + } +module.exports = checkResponse + +function logRequest (method, res, startTime) { + const elapsedTime = Date.now() - startTime + const attempt = res.headers.get('x-fetch-attempts') + const attemptStr = attempt && attempt > 1 ? ` attempt #${attempt}` : '' + const cacheStatus = res.headers.get('x-local-cache-status') + const cacheStr = cacheStatus ? ` (cache ${cacheStatus})` : '' + const urlStr = cleanUrl(res.url) + + log.http( + 'fetch', + `${method.toUpperCase()} ${res.status} ${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}` + ) +} + +function checkErrors (method, res, startTime, opts) { + return res.buffer() + .catch(() => null) + .then(body => { + let parsed = body + try { + parsed = JSON.parse(body.toString('utf8')) + } catch { + // ignore errors + } + if (res.status === 401 && res.headers.get('www-authenticate')) { + const auth = res.headers.get('www-authenticate') + .split(/,\s*/) + .map(s => s.toLowerCase()) + if (auth.indexOf('ipaddress') !== -1) { + throw new errors.HttpErrorAuthIPAddress( + method, res, parsed, opts.spec + ) + } else if (auth.indexOf('otp') !== -1) { + throw new errors.HttpErrorAuthOTP( + method, res, parsed, opts.spec + ) + } else { + throw new errors.HttpErrorAuthUnknown( + method, res, parsed, opts.spec + ) + } + } else if ( + res.status === 401 && + body != null && + /one-time pass/.test(body.toString('utf8')) + ) { + // Heuristic for malformed OTP responses that don't include the + // www-authenticate header. + throw new errors.HttpErrorAuthOTP( + method, res, parsed, opts.spec + ) + } else { + throw new errors.HttpErrorGeneral( + method, res, parsed, opts.spec + ) + } + }) +} diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/clean-url.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/clean-url.js new file mode 100644 index 0000000000000..0c2656b5653a0 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-registry-fetch/lib/clean-url.js @@ -0,0 +1,27 @@ +const { URL } = require('url') + +const replace = '***' +const tokenRegex = /\bnpm_[a-zA-Z0-9]{36}\b/g +const guidRegex = /\b[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\b/g + +const cleanUrl = (str) => { + if (typeof str !== 'string' || !str) { + return str + } + + try { + const url = new URL(str) + if (url.password) { + url.password = replace + str = url.toString() + } + } catch { + // ignore errors + } + + return str + .replace(tokenRegex, `npm_${replace}`) + .replace(guidRegex, `npm_${replace}`) +} + +module.exports = cleanUrl diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js new file mode 100644 index 0000000000000..f0847f0b507e2 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js @@ -0,0 +1,19 @@ +const pkg = require('../package.json') +module.exports = { + maxSockets: 12, + method: 'GET', + registry: 'https://registry.npmjs.org/', + timeout: 5 * 60 * 1000, // 5 minutes + strictSSL: true, + noProxy: process.env.NOPROXY, + userAgent: `${pkg.name + }@${ + pkg.version + }/node@${ + process.version + }+${ + process.arch + } (${ + process.platform + })`, +} diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js new file mode 100644 index 0000000000000..cf5ddba6f300c --- /dev/null +++ b/node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js @@ -0,0 +1,80 @@ +'use strict' + +const url = require('url') + +function packageName (href) { + try { + let basePath = new url.URL(href).pathname.slice(1) + if (!basePath.match(/^-/)) { + basePath = basePath.split('/') + var index = basePath.indexOf('_rewrite') + if (index === -1) { + index = basePath.length - 1 + } else { + index++ + } + return decodeURIComponent(basePath[index]) + } + } catch (_) { + // this is ok + } +} + +class HttpErrorBase extends Error { + constructor (method, res, body, spec) { + super() + this.name = this.constructor.name + this.headers = res.headers.raw() + this.statusCode = res.status + this.code = `E${res.status}` + this.method = method + this.uri = res.url + this.body = body + this.pkgid = spec ? spec.toString() : packageName(res.url) + } +} +module.exports.HttpErrorBase = HttpErrorBase + +class HttpErrorGeneral extends HttpErrorBase { + constructor (method, res, body, spec) { + super(method, res, body, spec) + this.message = `${res.status} ${res.statusText} - ${ + this.method.toUpperCase() + } ${ + this.spec || this.uri + }${ + (body && body.error) ? ' - ' + body.error : '' + }` + Error.captureStackTrace(this, HttpErrorGeneral) + } +} +module.exports.HttpErrorGeneral = HttpErrorGeneral + +class HttpErrorAuthOTP extends HttpErrorBase { + constructor (method, res, body, spec) { + super(method, res, body, spec) + this.message = 'OTP required for authentication' + this.code = 'EOTP' + Error.captureStackTrace(this, HttpErrorAuthOTP) + } +} +module.exports.HttpErrorAuthOTP = HttpErrorAuthOTP + +class HttpErrorAuthIPAddress extends HttpErrorBase { + constructor (method, res, body, spec) { + super(method, res, body, spec) + this.message = 'Login is not allowed from your IP address' + this.code = 'EAUTHIP' + Error.captureStackTrace(this, HttpErrorAuthIPAddress) + } +} +module.exports.HttpErrorAuthIPAddress = HttpErrorAuthIPAddress + +class HttpErrorAuthUnknown extends HttpErrorBase { + constructor (method, res, body, spec) { + super(method, res, body, spec) + this.message = 'Unable to authenticate, need: ' + res.headers.get('www-authenticate') + Error.captureStackTrace(this, HttpErrorAuthUnknown) + } +} +module.exports.HttpErrorAuthUnknown = HttpErrorAuthUnknown diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js new file mode 100644 index 0000000000000..23e349c5c5b96 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js @@ -0,0 +1,247 @@ +'use strict' + +const { HttpErrorAuthOTP } = require('./errors.js') +const checkResponse = require('./check-response.js') +const getAuth = require('./auth.js') +const fetch = require('make-fetch-happen') +const JSONStream = require('minipass-json-stream') +const npa = require('npm-package-arg') +const qs = require('querystring') +const url = require('url') +const zlib = require('minizlib') +const { Minipass } = require('minipass') + +const defaultOpts = require('./default-opts.js') + +// WhatWG URL throws if it's not fully resolved +const urlIsValid = u => { + try { + return !!new url.URL(u) + } catch (_) { + return false + } +} + +module.exports = regFetch +function regFetch (uri, /* istanbul ignore next */ opts_ = {}) { + const opts = { + ...defaultOpts, + ...opts_, + } + + // if we did not get a fully qualified URI, then we look at the registry + // config or relevant scope to resolve it. + const uriValid = urlIsValid(uri) + let registry = opts.registry || defaultOpts.registry + if (!uriValid) { + registry = opts.registry = ( + (opts.spec && pickRegistry(opts.spec, opts)) || + opts.registry || + registry + ) + uri = `${ + registry.trim().replace(/\/?$/g, '') + }/${ + uri.trim().replace(/^\//, '') + }` + // asserts that this is now valid + new url.URL(uri) + } + + const method = opts.method || 'GET' + + // through that takes into account the scope, the prefix of `uri`, etc + const startTime = Date.now() + const auth = getAuth(uri, opts) + const headers = getHeaders(uri, auth, opts) + let body = opts.body + const bodyIsStream = Minipass.isStream(body) + const bodyIsPromise = body && + typeof body === 'object' && + typeof body.then === 'function' + + if ( + body && !bodyIsStream && !bodyIsPromise && typeof body !== 'string' && !Buffer.isBuffer(body) + ) { + headers['content-type'] = headers['content-type'] || 'application/json' + body = JSON.stringify(body) + } else if (body && !headers['content-type']) { + headers['content-type'] = 'application/octet-stream' + } + + if (opts.gzip) { + headers['content-encoding'] = 'gzip' + if (bodyIsStream) { + const gz = new zlib.Gzip() + body.on('error', /* istanbul ignore next: unlikely and hard to test */ + err => gz.emit('error', err)) + body = body.pipe(gz) + } else if (!bodyIsPromise) { + body = new zlib.Gzip().end(body).concat() + } + } + + const parsed = new url.URL(uri) + + if (opts.query) { + const q = typeof opts.query === 'string' ? qs.parse(opts.query) + : opts.query + + Object.keys(q).forEach(key => { + if (q[key] !== undefined) { + parsed.searchParams.set(key, q[key]) + } + }) + uri = url.format(parsed) + } + + if (parsed.searchParams.get('write') === 'true' && method === 'GET') { + // do not cache, because this GET is fetching a rev that will be + // used for a subsequent PUT or DELETE, so we need to conditionally + // update cache. + opts.offline = false + opts.preferOffline = false + opts.preferOnline = true + } + + const doFetch = async fetchBody => { + const p = fetch(uri, { + agent: opts.agent, + algorithms: opts.algorithms, + body: fetchBody, + cache: getCacheMode(opts), + cachePath: opts.cache, + ca: opts.ca, + cert: auth.cert || opts.cert, + headers, + integrity: opts.integrity, + key: auth.key || opts.key, + localAddress: opts.localAddress, + maxSockets: opts.maxSockets, + memoize: opts.memoize, + method: method, + noProxy: opts.noProxy, + proxy: opts.httpsProxy || opts.proxy, + retry: opts.retry ? opts.retry : { + retries: opts.fetchRetries, + factor: opts.fetchRetryFactor, + minTimeout: opts.fetchRetryMintimeout, + maxTimeout: opts.fetchRetryMaxtimeout, + }, + strictSSL: opts.strictSSL, + timeout: opts.timeout || 30 * 1000, + }).then(res => checkResponse({ + method, + uri, + res, + registry, + startTime, + auth, + opts, + })) + + if (typeof opts.otpPrompt === 'function') { + return p.catch(async er => { + if (er instanceof HttpErrorAuthOTP) { + let otp + // if otp fails to complete, we fail with that failure + try { + otp = await opts.otpPrompt() + } catch (_) { + // ignore this error + } + // if no otp provided, or otpPrompt errored, throw the original HTTP error + if (!otp) { + throw er + } + return regFetch(uri, { ...opts, otp }) + } + throw er + }) + } else { + return p + } + } + + return Promise.resolve(body).then(doFetch) +} + +module.exports.json = fetchJSON +function fetchJSON (uri, opts) { + return regFetch(uri, opts).then(res => res.json()) +} + +module.exports.json.stream = fetchJSONStream +function fetchJSONStream (uri, jsonPath, + /* istanbul ignore next */ opts_ = {}) { + const opts = { ...defaultOpts, ...opts_ } + const parser = JSONStream.parse(jsonPath, opts.mapJSON) + regFetch(uri, opts).then(res => + res.body.on('error', + /* istanbul ignore next: unlikely and difficult to test */ + er => parser.emit('error', er)).pipe(parser) + ).catch(er => parser.emit('error', er)) + return parser +} + +module.exports.pickRegistry = pickRegistry +function pickRegistry (spec, opts = {}) { + spec = npa(spec) + let registry = spec.scope && + opts[spec.scope.replace(/^@?/, '@') + ':registry'] + + if (!registry && opts.scope) { + registry = opts[opts.scope.replace(/^@?/, '@') + ':registry'] + } + + if (!registry) { + registry = opts.registry || defaultOpts.registry + } + + return registry +} + +function getCacheMode (opts) { + return opts.offline ? 'only-if-cached' + : opts.preferOffline ? 'force-cache' + : opts.preferOnline ? 'no-cache' + : 'default' +} + +function getHeaders (uri, auth, opts) { + const headers = Object.assign({ + 'user-agent': opts.userAgent, + }, opts.headers || {}) + + if (opts.authType) { + headers['npm-auth-type'] = opts.authType + } + + if (opts.scope) { + headers['npm-scope'] = opts.scope + } + + if (opts.npmSession) { + headers['npm-session'] = opts.npmSession + } + + if (opts.npmCommand) { + headers['npm-command'] = opts.npmCommand + } + + // If a tarball is hosted on a different place than the manifest, only send + // credentials on `alwaysAuth` + if (auth.token) { + headers.authorization = `Bearer ${auth.token}` + } else if (auth.auth) { + headers.authorization = `Basic ${auth.auth}` + } + + if (opts.otp) { + headers['npm-otp'] = opts.otp + } + + return headers +} + +module.exports.cleanUrl = require('./clean-url.js') diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/package.json b/node_modules/pacote/node_modules/npm-registry-fetch/package.json new file mode 100644 index 0000000000000..63a44725886cc --- /dev/null +++ b/node_modules/pacote/node_modules/npm-registry-fetch/package.json @@ -0,0 +1,67 @@ +{ + "name": "npm-registry-fetch", + "version": "14.0.5", + "description": "Fetch-based http client for use with npm registry APIs", + "main": "lib", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "eslint": "eslint", + "lint": "eslint \"**/*.js\"", + "lintfix": "npm run lint -- --fix", + "test": "tap", + "posttest": "npm run lint", + "npmclilint": "npmcli-lint", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "snap": "tap", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/npm-registry-fetch.git" + }, + "keywords": [ + "npm", + "registry", + "fetch" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "make-fetch-happen": "^11.0.0", + "minipass": "^5.0.0", + "minipass-fetch": "^3.0.0", + "minipass-json-stream": "^1.0.1", + "minizlib": "^2.1.2", + "npm-package-arg": "^10.0.0", + "proc-log": "^3.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.14.1", + "cacache": "^17.0.0", + "nock": "^13.2.4", + "require-inject": "^1.4.4", + "ssri": "^10.0.0", + "tap": "^16.0.1" + }, + "tap": { + "check-coverage": true, + "test-ignore": "test[\\\\/](util|cache)[\\\\/]", + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.14.1", + "publish": "true" + } +} diff --git a/package-lock.json b/package-lock.json index 995e2f4cc58d2..c0c42086d1b01 100644 --- a/package-lock.json +++ b/package-lock.json @@ -134,7 +134,7 @@ "npm-package-arg": "^10.1.0", "npm-pick-manifest": "^8.0.1", "npm-profile": "^7.0.1", - "npm-registry-fetch": "^14.0.5", + "npm-registry-fetch": "^15.0.0", "npm-user-validate": "^2.0.0", "npmlog": "^7.0.1", "p-map": "^4.0.0", @@ -2340,6 +2340,19 @@ "node": ">= 8" } }, + "node_modules/@npmcli/agent": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-1.1.0.tgz", + "integrity": "sha512-I9g/2XFOkflxm5IDrGSjCcR2d12Jmic0di9w/WpJBbzYuSXmfgoL+WwEV7zY/ajxzQr7o4vSkEJh6piyFLYtuQ==", + "inBundle": true, + "dependencies": { + "lru-cache": "^7.18.3", + "socks": "^2.7.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/@npmcli/arborist": { "resolved": "workspaces/arborist", "link": true @@ -9605,7 +9618,7 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, - "node_modules/npm-registry-fetch": { + "node_modules/npm-profile/node_modules/npm-registry-fetch": { "version": "14.0.5", "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-14.0.5.tgz", "integrity": "sha512-kIDMIo4aBm6xg7jOttupWZamsZRkAqMqwqqbVXnUqstY5+tapvv6bkH/qMR76jdgV+YljEUCyWx3hRYMrJiAgA==", @@ -9623,6 +9636,55 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/npm-registry-fetch": { + "version": "15.0.0", + "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-15.0.0.tgz", + "integrity": "sha512-CMFzk0HMDQ3fmFZ4v62C05g6eBwoU3PxpzFf4QiE360vfmtKZJkj+iCpgLx+I4oJT6Kx8g67Coyk729Q27M2JQ==", + "inBundle": true, + "dependencies": { + "make-fetch-happen": "^12.0.0", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minipass-json-stream": "^1.0.1", + "minizlib": "^2.1.2", + "npm-package-arg": "^10.0.0", + "proc-log": "^3.0.0" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm-registry-fetch/node_modules/make-fetch-happen": { + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-12.0.0.tgz", + "integrity": "sha512-xpuA2kA8Z66uGQjaSXd7rffqJOv60iYpP8X0TsZl3uwXlqxUVmHETImjM71JOPA694TlcX37GhlaCsl6z6fNVg==", + "inBundle": true, + "dependencies": { + "@npmcli/agent": "^1.1.0", + "cacache": "^17.0.0", + "http-cache-semantics": "^4.1.1", + "is-lambda": "^1.0.1", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "promise-retry": "^2.0.1", + "ssri": "^10.0.0" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm-registry-fetch/node_modules/minipass": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.2.tgz", + "integrity": "sha512-eL79dXrE1q9dBbDCLg7xfn/vl7MS4F1gvJAgjJrQli/jbQWdUttuVawphqpffoIYfRdq78LHx6GP4bU/EQ2ATA==", + "inBundle": true, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, "node_modules/npm-run-path": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", @@ -10188,6 +10250,24 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/pacote/node_modules/npm-registry-fetch": { + "version": "14.0.5", + "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-14.0.5.tgz", + "integrity": "sha512-kIDMIo4aBm6xg7jOttupWZamsZRkAqMqwqqbVXnUqstY5+tapvv6bkH/qMR76jdgV+YljEUCyWx3hRYMrJiAgA==", + "inBundle": true, + "dependencies": { + "make-fetch-happen": "^11.0.0", + "minipass": "^5.0.0", + "minipass-fetch": "^3.0.0", + "minipass-json-stream": "^1.0.1", + "minizlib": "^2.1.2", + "npm-package-arg": "^10.0.0", + "proc-log": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -15630,7 +15710,7 @@ "npm-install-checks": "^6.0.0", "npm-package-arg": "^10.1.0", "npm-pick-manifest": "^8.0.1", - "npm-registry-fetch": "^14.0.3", + "npm-registry-fetch": "^15.0.0", "npmlog": "^7.0.1", "pacote": "^15.0.8", "parse-conflict-json": "^3.0.0", @@ -15689,7 +15769,7 @@ "license": "ISC", "dependencies": { "npm-package-arg": "^10.1.0", - "npm-registry-fetch": "^14.0.3" + "npm-registry-fetch": "^15.0.0" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", @@ -15775,7 +15855,7 @@ "license": "ISC", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^14.0.3" + "npm-registry-fetch": "^15.0.0" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", @@ -15792,7 +15872,7 @@ "license": "ISC", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^14.0.3" + "npm-registry-fetch": "^15.0.0" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", @@ -15832,7 +15912,7 @@ "ci-info": "^3.6.1", "normalize-package-data": "^5.0.0", "npm-package-arg": "^10.1.0", - "npm-registry-fetch": "^14.0.3", + "npm-registry-fetch": "^15.0.0", "proc-log": "^3.0.0", "semver": "^7.3.7", "sigstore": "^1.4.0", @@ -15854,7 +15934,7 @@ "version": "6.0.2", "license": "ISC", "dependencies": { - "npm-registry-fetch": "^14.0.3" + "npm-registry-fetch": "^15.0.0" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", @@ -15871,7 +15951,7 @@ "license": "ISC", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^14.0.3" + "npm-registry-fetch": "^15.0.0" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", diff --git a/package.json b/package.json index 94b4e33758560..d0e0cd2e3ea5e 100644 --- a/package.json +++ b/package.json @@ -99,7 +99,7 @@ "npm-package-arg": "^10.1.0", "npm-pick-manifest": "^8.0.1", "npm-profile": "^7.0.1", - "npm-registry-fetch": "^14.0.5", + "npm-registry-fetch": "^15.0.0", "npm-user-validate": "^2.0.0", "npmlog": "^7.0.1", "p-map": "^4.0.0", diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json index a9ec27bacb003..550b389d1ba31 100644 --- a/workspaces/arborist/package.json +++ b/workspaces/arborist/package.json @@ -24,7 +24,7 @@ "npm-install-checks": "^6.0.0", "npm-package-arg": "^10.1.0", "npm-pick-manifest": "^8.0.1", - "npm-registry-fetch": "^14.0.3", + "npm-registry-fetch": "^15.0.0", "npmlog": "^7.0.1", "pacote": "^15.0.8", "parse-conflict-json": "^3.0.0", diff --git a/workspaces/libnpmaccess/package.json b/workspaces/libnpmaccess/package.json index 713cf8c264c98..9a73b13998b46 100644 --- a/workspaces/libnpmaccess/package.json +++ b/workspaces/libnpmaccess/package.json @@ -30,7 +30,7 @@ "homepage": "https://npmjs.com/package/libnpmaccess", "dependencies": { "npm-package-arg": "^10.1.0", - "npm-registry-fetch": "^14.0.3" + "npm-registry-fetch": "^15.0.0" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" diff --git a/workspaces/libnpmhook/package.json b/workspaces/libnpmhook/package.json index 05b34dda75c41..92cc750084957 100644 --- a/workspaces/libnpmhook/package.json +++ b/workspaces/libnpmhook/package.json @@ -31,7 +31,7 @@ "license": "ISC", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^14.0.3" + "npm-registry-fetch": "^15.0.0" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", diff --git a/workspaces/libnpmorg/package.json b/workspaces/libnpmorg/package.json index 675d03b5b2437..91c2593b38c99 100644 --- a/workspaces/libnpmorg/package.json +++ b/workspaces/libnpmorg/package.json @@ -42,7 +42,7 @@ "homepage": "https://npmjs.com/package/libnpmorg", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^14.0.3" + "npm-registry-fetch": "^15.0.0" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json index 8c95f9e9aba7d..c1cec1f0088af 100644 --- a/workspaces/libnpmpublish/package.json +++ b/workspaces/libnpmpublish/package.json @@ -41,7 +41,7 @@ "ci-info": "^3.6.1", "normalize-package-data": "^5.0.0", "npm-package-arg": "^10.1.0", - "npm-registry-fetch": "^14.0.3", + "npm-registry-fetch": "^15.0.0", "proc-log": "^3.0.0", "semver": "^7.3.7", "sigstore": "^1.4.0", diff --git a/workspaces/libnpmsearch/package.json b/workspaces/libnpmsearch/package.json index 32cb1f21b6422..f825e56f51647 100644 --- a/workspaces/libnpmsearch/package.json +++ b/workspaces/libnpmsearch/package.json @@ -38,7 +38,7 @@ "bugs": "https://github.com/npm/libnpmsearch/issues", "homepage": "https://npmjs.com/package/libnpmsearch", "dependencies": { - "npm-registry-fetch": "^14.0.3" + "npm-registry-fetch": "^15.0.0" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" diff --git a/workspaces/libnpmteam/package.json b/workspaces/libnpmteam/package.json index 33a77095fe848..00dde521f449b 100644 --- a/workspaces/libnpmteam/package.json +++ b/workspaces/libnpmteam/package.json @@ -32,7 +32,7 @@ "homepage": "https://npmjs.com/package/libnpmteam", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^14.0.3" + "npm-registry-fetch": "^15.0.0" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" From db176b7b0eeb0ca4aa0485047d7bcc08369a227b Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Fri, 11 Aug 2023 16:32:02 -0700 Subject: [PATCH 02/68] chore: set workspaces with changing engines to prerelease --- release-please-config.json | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/release-please-config.json b/release-please-config.json index 453d831c4c403..a539f69faef81 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -35,10 +35,13 @@ "prerelease": true }, "workspaces/arborist": { + "prerelease": true + }, + "workspaces/config": { "prerelease": false }, "workspaces/libnpmaccess": { - "prerelease": false + "prerelease": true }, "workspaces/libnpmdiff": { "prerelease": false @@ -50,27 +53,26 @@ "prerelease": false }, "workspaces/libnpmhook": { - "prerelease": false + "prerelease": true }, "workspaces/libnpmorg": { - "prerelease": false + "prerelease": true }, "workspaces/libnpmpack": { "prerelease": false }, "workspaces/libnpmpublish": { - "prerelease": false + "prerelease": true }, "workspaces/libnpmsearch": { - "prerelease": false + "prerelease": true }, "workspaces/libnpmteam": { - "prerelease": false + "prerelease": true }, "workspaces/libnpmversion": { "prerelease": false - }, - "workspaces/config": {} + } }, "exclude-packages-from-root": true, "group-pull-request-title-pattern": "chore: release ${version}", From 7b35813d5b937717c7129f2df28ee410d7531948 Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Fri, 11 Aug 2023 16:33:59 -0700 Subject: [PATCH 03/68] fix: drop node14 support BREAKING CHANGE: support for node 14 has been removed --- .github/workflows/ci-npmcli-arborist.yml | 2 -- workspaces/arborist/package.json | 10 ++++++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-npmcli-arborist.yml b/.github/workflows/ci-npmcli-arborist.yml index f833c7946e36c..c032dc33e9e09 100644 --- a/.github/workflows/ci-npmcli-arborist.yml +++ b/.github/workflows/ci-npmcli-arborist.yml @@ -64,8 +64,6 @@ jobs: os: windows-latest shell: cmd node-version: - - 14.17.0 - - 14.x - 16.13.0 - 16.x - 18.0.0 diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json index 550b389d1ba31..018f77b91461c 100644 --- a/workspaces/arborist/package.json +++ b/workspaces/arborist/package.json @@ -87,11 +87,17 @@ ] }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ] } } From 95cee45b2f9343fbd6b0fc4a4d0d025ef3a42ef4 Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Fri, 11 Aug 2023 16:34:40 -0700 Subject: [PATCH 04/68] fix: drop node14 support BREAKING CHANGE: support for node 14 has been removed --- .github/workflows/ci-libnpmaccess.yml | 2 -- workspaces/libnpmaccess/package.json | 10 ++++++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-libnpmaccess.yml b/.github/workflows/ci-libnpmaccess.yml index c4b51b0ec089f..2f97a1ec1b7e5 100644 --- a/.github/workflows/ci-libnpmaccess.yml +++ b/.github/workflows/ci-libnpmaccess.yml @@ -64,8 +64,6 @@ jobs: os: windows-latest shell: cmd node-version: - - 14.17.0 - - 14.x - 16.13.0 - 16.x - 18.0.0 diff --git a/workspaces/libnpmaccess/package.json b/workspaces/libnpmaccess/package.json index 9a73b13998b46..3ad8bab6f80ef 100644 --- a/workspaces/libnpmaccess/package.json +++ b/workspaces/libnpmaccess/package.json @@ -33,7 +33,7 @@ "npm-registry-fetch": "^15.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" }, "files": [ "bin/", @@ -42,7 +42,13 @@ "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { "nyc-arg": [ From 41f21a4cf5ae8fc86e0a0d846b17a9c3a585f5ab Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Fri, 11 Aug 2023 16:35:02 -0700 Subject: [PATCH 05/68] fix: drop node14 support BREAKING CHANGE: support for node 14 has been removed --- .github/workflows/ci-libnpmhook.yml | 2 -- workspaces/libnpmhook/package.json | 10 ++++++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-libnpmhook.yml b/.github/workflows/ci-libnpmhook.yml index 1e656321cf743..a4d7a170a2636 100644 --- a/.github/workflows/ci-libnpmhook.yml +++ b/.github/workflows/ci-libnpmhook.yml @@ -64,8 +64,6 @@ jobs: os: windows-latest shell: cmd node-version: - - 14.17.0 - - 14.x - 16.13.0 - 16.x - 18.0.0 diff --git a/workspaces/libnpmhook/package.json b/workspaces/libnpmhook/package.json index 92cc750084957..5fc4d372632d3 100644 --- a/workspaces/libnpmhook/package.json +++ b/workspaces/libnpmhook/package.json @@ -40,12 +40,18 @@ "tap": "^16.3.4" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { "nyc-arg": [ From 9033fe56b2230d925f5a083c0e716351d0492219 Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Fri, 11 Aug 2023 16:35:23 -0700 Subject: [PATCH 06/68] fix: drop node14 support BREAKING CHANGE: support for node 14 has been removed --- .github/workflows/ci-libnpmorg.yml | 2 -- workspaces/libnpmorg/package.json | 10 ++++++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-libnpmorg.yml b/.github/workflows/ci-libnpmorg.yml index bbff4cbbd1636..0d01c4f7c197f 100644 --- a/.github/workflows/ci-libnpmorg.yml +++ b/.github/workflows/ci-libnpmorg.yml @@ -64,8 +64,6 @@ jobs: os: windows-latest shell: cmd node-version: - - 14.17.0 - - 14.x - 16.13.0 - 16.x - 18.0.0 diff --git a/workspaces/libnpmorg/package.json b/workspaces/libnpmorg/package.json index 91c2593b38c99..03e58a2fc13f2 100644 --- a/workspaces/libnpmorg/package.json +++ b/workspaces/libnpmorg/package.json @@ -45,12 +45,18 @@ "npm-registry-fetch": "^15.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { "nyc-arg": [ From e230a0898fbc2d3b374a320ba668c2b530e03be8 Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Fri, 11 Aug 2023 16:35:43 -0700 Subject: [PATCH 07/68] fix: drop node14 support BREAKING CHANGE: support for node 14 has been removed --- .github/workflows/ci-libnpmpublish.yml | 2 -- workspaces/libnpmpublish/package.json | 10 ++++++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-libnpmpublish.yml b/.github/workflows/ci-libnpmpublish.yml index b3c3d47213c33..0312720c95a7c 100644 --- a/.github/workflows/ci-libnpmpublish.yml +++ b/.github/workflows/ci-libnpmpublish.yml @@ -64,8 +64,6 @@ jobs: os: windows-latest shell: cmd node-version: - - 14.17.0 - - 14.x - 16.13.0 - 16.x - 18.0.0 diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json index c1cec1f0088af..ba634dfbbb2b7 100644 --- a/workspaces/libnpmpublish/package.json +++ b/workspaces/libnpmpublish/package.json @@ -48,12 +48,18 @@ "ssri": "^10.0.1" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { "nyc-arg": [ From 7946827878034b983646eb8f4c42d95bb9c2fb37 Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Fri, 11 Aug 2023 16:36:00 -0700 Subject: [PATCH 08/68] fix: drop node14 support BREAKING CHANGE: support for node 14 has been removed --- .github/workflows/ci-libnpmsearch.yml | 2 -- workspaces/libnpmsearch/package.json | 10 ++++++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-libnpmsearch.yml b/.github/workflows/ci-libnpmsearch.yml index 84d3c47582a03..8f3d8f6a5802f 100644 --- a/.github/workflows/ci-libnpmsearch.yml +++ b/.github/workflows/ci-libnpmsearch.yml @@ -64,8 +64,6 @@ jobs: os: windows-latest shell: cmd node-version: - - 14.17.0 - - 14.x - 16.13.0 - 16.x - 18.0.0 diff --git a/workspaces/libnpmsearch/package.json b/workspaces/libnpmsearch/package.json index f825e56f51647..c00101a99ede3 100644 --- a/workspaces/libnpmsearch/package.json +++ b/workspaces/libnpmsearch/package.json @@ -41,12 +41,18 @@ "npm-registry-fetch": "^15.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { "nyc-arg": [ From ded745cb0b37a5c0cd2adb37d86d8aec64a23fd4 Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Fri, 11 Aug 2023 16:36:17 -0700 Subject: [PATCH 09/68] fix: drop node14 support BREAKING CHANGE: support for node 14 has been removed --- .github/workflows/ci-libnpmteam.yml | 2 -- workspaces/libnpmteam/package.json | 10 ++++++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-libnpmteam.yml b/.github/workflows/ci-libnpmteam.yml index 41aeea5d0f975..2d9266f8043f5 100644 --- a/.github/workflows/ci-libnpmteam.yml +++ b/.github/workflows/ci-libnpmteam.yml @@ -64,8 +64,6 @@ jobs: os: windows-latest shell: cmd node-version: - - 14.17.0 - - 14.x - 16.13.0 - 16.x - 18.0.0 diff --git a/workspaces/libnpmteam/package.json b/workspaces/libnpmteam/package.json index 00dde521f449b..9379c8e0c718c 100644 --- a/workspaces/libnpmteam/package.json +++ b/workspaces/libnpmteam/package.json @@ -35,12 +35,18 @@ "npm-registry-fetch": "^15.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { "nyc-arg": [ From c8b5633c25fa276b79a9e11a86d02e1a92958246 Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Fri, 11 Aug 2023 16:41:41 -0700 Subject: [PATCH 10/68] deps: pacote@16.0.0 --- mock-registry/package.json | 2 +- node_modules/.gitignore | 6 +- .../npm-registry-fetch/LICENSE.md | 0 .../npm-registry-fetch/lib/auth.js | 0 .../npm-registry-fetch/lib/check-response.js | 0 .../npm-registry-fetch/lib/clean-url.js | 0 .../npm-registry-fetch/lib/default-opts.js | 0 .../npm-registry-fetch/lib/errors.js | 0 .../npm-registry-fetch/lib/index.js | 0 .../npm-registry-fetch/package.json | 0 .../node_modules/pacote/LICENSE | 15 + .../node_modules/pacote/lib/bin.js | 158 +++ .../node_modules/pacote/lib/dir.js | 108 ++ .../node_modules/pacote/lib/fetcher.js | 505 ++++++++ .../node_modules/pacote/lib/file.js | 96 ++ .../node_modules/pacote/lib/git.js | 327 ++++++ .../node_modules/pacote/lib/index.js | 23 + .../node_modules/pacote/lib/registry.js | 344 ++++++ .../node_modules/pacote/lib/remote.js | 91 ++ .../pacote/lib/util/add-git-sha.js | 15 + .../node_modules/pacote/lib/util/cache-dir.js | 15 + .../pacote/lib/util/is-package-bin.js | 25 + .../node_modules/pacote/lib/util/npm.js | 14 + .../pacote/lib/util/tar-create-options.js | 31 + .../pacote/lib/util/trailing-slashes.js | 10 + .../node_modules/pacote/package.json | 79 ++ .../pacote/node_modules/minipass/LICENSE | 15 + .../node_modules/minipass/dist/cjs/index.js | 1028 +++++++++++++++++ .../minipass/dist/cjs/package.json | 3 + .../node_modules/minipass/dist/mjs/index.js | 1018 ++++++++++++++++ .../minipass/dist/mjs/package.json | 3 + .../pacote/node_modules/minipass/package.json | 82 ++ node_modules/pacote/package.json | 18 +- package-lock.json | 105 +- package.json | 2 +- workspaces/arborist/package.json | 2 +- workspaces/libnpmdiff/package.json | 2 +- workspaces/libnpmexec/package.json | 2 +- workspaces/libnpmpack/package.json | 2 +- 39 files changed, 4100 insertions(+), 46 deletions(-) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-registry-fetch/LICENSE.md (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-registry-fetch/lib/auth.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-registry-fetch/lib/check-response.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-registry-fetch/lib/clean-url.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-registry-fetch/lib/default-opts.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-registry-fetch/lib/errors.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-registry-fetch/lib/index.js (100%) rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-registry-fetch/package.json (100%) create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE create mode 100755 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/bin.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json create mode 100644 node_modules/pacote/node_modules/minipass/LICENSE create mode 100644 node_modules/pacote/node_modules/minipass/dist/cjs/index.js create mode 100644 node_modules/pacote/node_modules/minipass/dist/cjs/package.json create mode 100644 node_modules/pacote/node_modules/minipass/dist/mjs/index.js create mode 100644 node_modules/pacote/node_modules/minipass/dist/mjs/package.json create mode 100644 node_modules/pacote/node_modules/minipass/package.json diff --git a/mock-registry/package.json b/mock-registry/package.json index 530be56427040..8187bbe52b722 100644 --- a/mock-registry/package.json +++ b/mock-registry/package.json @@ -50,7 +50,7 @@ "json-stringify-safe": "^5.0.1", "nock": "^13.3.0", "npm-package-arg": "^10.1.0", - "pacote": "^15.0.8", + "pacote": "^16.0.0", "tap": "^16.3.4" } } diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 851e54092990b..069ee05e38167 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -25,6 +25,10 @@ !/@npmcli/installed-package-contents !/@npmcli/map-workspaces !/@npmcli/metavuln-calculator +!/@npmcli/metavuln-calculator/node_modules/ +/@npmcli/metavuln-calculator/node_modules/* +!/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch +!/@npmcli/metavuln-calculator/node_modules/pacote !/@npmcli/name-from-folder !/@npmcli/node-gyp !/@npmcli/package-json @@ -209,7 +213,7 @@ !/pacote !/pacote/node_modules/ /pacote/node_modules/* -!/pacote/node_modules/npm-registry-fetch +!/pacote/node_modules/minipass !/parse-conflict-json !/path-is-absolute !/path-key diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/LICENSE.md similarity index 100% rename from node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/LICENSE.md diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/auth.js similarity index 100% rename from node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/auth.js diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/check-response.js similarity index 100% rename from node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/check-response.js diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/clean-url.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/clean-url.js similarity index 100% rename from node_modules/pacote/node_modules/npm-registry-fetch/lib/clean-url.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/clean-url.js diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/default-opts.js similarity index 100% rename from node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/default-opts.js diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/errors.js similarity index 100% rename from node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/errors.js diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/index.js similarity index 100% rename from node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/index.js diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/package.json similarity index 100% rename from node_modules/pacote/node_modules/npm-registry-fetch/package.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/package.json diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE new file mode 100644 index 0000000000000..a03cd0ed0b338 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter, Kat Marchán, npm, Inc., and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/bin.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/bin.js new file mode 100755 index 0000000000000..f35b62ca71a53 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/bin.js @@ -0,0 +1,158 @@ +#!/usr/bin/env node + +const run = conf => { + const pacote = require('../') + switch (conf._[0]) { + case 'resolve': + case 'manifest': + case 'packument': + if (conf._[0] === 'resolve' && conf.long) { + return pacote.manifest(conf._[1], conf).then(mani => ({ + resolved: mani._resolved, + integrity: mani._integrity, + from: mani._from, + })) + } + return pacote[conf._[0]](conf._[1], conf) + + case 'tarball': + if (!conf._[2] || conf._[2] === '-') { + return pacote.tarball.stream(conf._[1], stream => { + stream.pipe( + conf.testStdout || + /* istanbul ignore next */ + process.stdout + ) + // make sure it resolves something falsey + return stream.promise().then(() => { + return false + }) + }, conf) + } else { + return pacote.tarball.file(conf._[1], conf._[2], conf) + } + + case 'extract': + return pacote.extract(conf._[1], conf._[2], conf) + + default: /* istanbul ignore next */ { + throw new Error(`bad command: ${conf._[0]}`) + } + } +} + +const version = require('../package.json').version +const usage = () => +`Pacote - The JavaScript Package Handler, v${version} + +Usage: + + pacote resolve + Resolve a specifier and output the fully resolved target + Returns integrity and from if '--long' flag is set. + + pacote manifest + Fetch a manifest and print to stdout + + pacote packument + Fetch a full packument and print to stdout + + pacote tarball [] + Fetch a package tarball and save to + If is missing or '-', the tarball will be streamed to stdout. + + pacote extract + Extract a package to the destination folder. + +Configuration values all match the names of configs passed to npm, or +options passed to Pacote. Additional flags for this executable: + + --long Print an object from 'resolve', including integrity and spec. + --json Print result objects as JSON rather than node's default. + (This is the default if stdout is not a TTY.) + --help -h Print this helpful text. + +For example '--cache=/path/to/folder' will use that folder as the cache. +` + +const shouldJSON = (conf, result) => + conf.json || + !process.stdout.isTTY && + conf.json === undefined && + result && + typeof result === 'object' + +const pretty = (conf, result) => + shouldJSON(conf, result) ? JSON.stringify(result, 0, 2) : result + +let addedLogListener = false +const main = args => { + const conf = parse(args) + if (conf.help || conf.h) { + return console.log(usage()) + } + + if (!addedLogListener) { + process.on('log', console.error) + addedLogListener = true + } + + try { + return run(conf) + .then(result => result && console.log(pretty(conf, result))) + .catch(er => { + console.error(er) + process.exit(1) + }) + } catch (er) { + console.error(er.message) + console.error(usage()) + } +} + +const parseArg = arg => { + const split = arg.slice(2).split('=') + const k = split.shift() + const v = split.join('=') + const no = /^no-/.test(k) && !v + const key = (no ? k.slice(3) : k) + .replace(/^tag$/, 'defaultTag') + .replace(/-([a-z])/g, (_, c) => c.toUpperCase()) + const value = v ? v.replace(/^~/, process.env.HOME) : !no + return { key, value } +} + +const parse = args => { + const conf = { + _: [], + cache: process.env.HOME + '/.npm/_cacache', + } + let dashdash = false + args.forEach(arg => { + if (dashdash) { + conf._.push(arg) + } else if (arg === '--') { + dashdash = true + } else if (arg === '-h') { + conf.help = true + } else if (/^--/.test(arg)) { + const { key, value } = parseArg(arg) + conf[key] = value + } else { + conf._.push(arg) + } + }) + return conf +} + +if (module === require.main) { + main(process.argv.slice(2)) +} else { + module.exports = { + main, + run, + usage, + parseArg, + parse, + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js new file mode 100644 index 0000000000000..420afc5802cb2 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js @@ -0,0 +1,108 @@ +const Fetcher = require('./fetcher.js') +const FileFetcher = require('./file.js') +const { Minipass } = require('minipass') +const tarCreateOptions = require('./util/tar-create-options.js') +const packlist = require('npm-packlist') +const tar = require('tar') +const _prepareDir = Symbol('_prepareDir') +const { resolve } = require('path') +const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson') + +const runScript = require('@npmcli/run-script') + +const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') +class DirFetcher extends Fetcher { + constructor (spec, opts) { + super(spec, opts) + // just the fully resolved filename + this.resolved = this.spec.fetchSpec + + this.tree = opts.tree || null + this.Arborist = opts.Arborist || null + } + + // exposes tarCreateOptions as public API + static tarCreateOptions (manifest) { + return tarCreateOptions(manifest) + } + + get types () { + return ['directory'] + } + + [_prepareDir] () { + return this.manifest().then(mani => { + if (!mani.scripts || !mani.scripts.prepare) { + return + } + + // we *only* run prepare. + // pre/post-pack is run by the npm CLI for publish and pack, + // but this function is *also* run when installing git deps + const stdio = this.opts.foregroundScripts ? 'inherit' : 'pipe' + + // hide the banner if silent opt is passed in, or if prepare running + // in the background. + const banner = this.opts.silent ? false : stdio === 'inherit' + + return runScript({ + pkg: mani, + event: 'prepare', + path: this.resolved, + stdio, + banner, + env: { + npm_package_resolved: this.resolved, + npm_package_integrity: this.integrity, + npm_package_json: resolve(this.resolved, 'package.json'), + }, + }) + }) + } + + [_tarballFromResolved] () { + if (!this.tree && !this.Arborist) { + throw new Error('DirFetcher requires either a tree or an Arborist constructor to pack') + } + + const stream = new Minipass() + stream.resolved = this.resolved + stream.integrity = this.integrity + + const { prefix, workspaces } = this.opts + + // run the prepare script, get the list of files, and tar it up + // pipe to the stream, and proxy errors the chain. + this[_prepareDir]() + .then(async () => { + if (!this.tree) { + const arb = new this.Arborist({ path: this.resolved }) + this.tree = await arb.loadActual() + } + return packlist(this.tree, { path: this.resolved, prefix, workspaces }) + }) + .then(files => tar.c(tarCreateOptions(this.package), files) + .on('error', er => stream.emit('error', er)).pipe(stream)) + .catch(er => stream.emit('error', er)) + return stream + } + + manifest () { + if (this.package) { + return Promise.resolve(this.package) + } + + return this[_readPackageJson](this.resolved + '/package.json') + .then(mani => this.package = { + ...mani, + _integrity: this.integrity && String(this.integrity), + _resolved: this.resolved, + _from: this.from, + }) + } + + packument () { + return FileFetcher.prototype.packument.apply(this) + } +} +module.exports = DirFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js new file mode 100644 index 0000000000000..f961a45c7d346 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js @@ -0,0 +1,505 @@ +// This is the base class that the other fetcher types in lib +// all descend from. +// It handles the unpacking and retry logic that is shared among +// all of the other Fetcher types. + +const npa = require('npm-package-arg') +const ssri = require('ssri') +const { promisify } = require('util') +const { basename, dirname } = require('path') +const tar = require('tar') +const log = require('proc-log') +const retry = require('promise-retry') +const fs = require('fs/promises') +const fsm = require('fs-minipass') +const cacache = require('cacache') +const isPackageBin = require('./util/is-package-bin.js') +const removeTrailingSlashes = require('./util/trailing-slashes.js') +const getContents = require('@npmcli/installed-package-contents') +const readPackageJsonFast = require('read-package-json-fast') +const readPackageJson = promisify(require('read-package-json')) +const { Minipass } = require('minipass') + +const cacheDir = require('./util/cache-dir.js') + +// Private methods. +// Child classes should not have to override these. +// Users should never call them. +const _extract = Symbol('_extract') +const _mkdir = Symbol('_mkdir') +const _empty = Symbol('_empty') +const _toFile = Symbol('_toFile') +const _tarxOptions = Symbol('_tarxOptions') +const _entryMode = Symbol('_entryMode') +const _istream = Symbol('_istream') +const _assertType = Symbol('_assertType') +const _tarballFromCache = Symbol('_tarballFromCache') +const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') +const _cacheFetches = Symbol.for('pacote.Fetcher._cacheFetches') +const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson') + +class FetcherBase { + constructor (spec, opts) { + if (!opts || typeof opts !== 'object') { + throw new TypeError('options object is required') + } + this.spec = npa(spec, opts.where) + + this.allowGitIgnore = !!opts.allowGitIgnore + + // a bit redundant because presumably the caller already knows this, + // but it makes it easier to not have to keep track of the requested + // spec when we're dispatching thousands of these at once, and normalizing + // is nice. saveSpec is preferred if set, because it turns stuff like + // x/y#committish into github:x/y#committish. use name@rawSpec for + // registry deps so that we turn xyz and xyz@ -> xyz@ + this.from = this.spec.registry + ? `${this.spec.name}@${this.spec.rawSpec}` : this.spec.saveSpec + + this[_assertType]() + // clone the opts object so that others aren't upset when we mutate it + // by adding/modifying the integrity value. + this.opts = { ...opts } + + this.cache = opts.cache || cacheDir().cacache + this.tufCache = opts.tufCache || cacheDir().tufcache + this.resolved = opts.resolved || null + + // default to caching/verifying with sha512, that's what we usually have + // need to change this default, or start overriding it, when sha512 + // is no longer strong enough. + this.defaultIntegrityAlgorithm = opts.defaultIntegrityAlgorithm || 'sha512' + + if (typeof opts.integrity === 'string') { + this.opts.integrity = ssri.parse(opts.integrity) + } + + this.package = null + this.type = this.constructor.name + this.fmode = opts.fmode || 0o666 + this.dmode = opts.dmode || 0o777 + // we don't need a default umask, because we don't chmod files coming + // out of package tarballs. they're forced to have a mode that is + // valid, regardless of what's in the tarball entry, and then we let + // the process's umask setting do its job. but if configured, we do + // respect it. + this.umask = opts.umask || 0 + + this.preferOnline = !!opts.preferOnline + this.preferOffline = !!opts.preferOffline + this.offline = !!opts.offline + + this.before = opts.before + this.fullMetadata = this.before ? true : !!opts.fullMetadata + this.fullReadJson = !!opts.fullReadJson + if (this.fullReadJson) { + this[_readPackageJson] = readPackageJson + } else { + this[_readPackageJson] = readPackageJsonFast + } + + // rrh is a registry hostname or 'never' or 'always' + // defaults to registry.npmjs.org + this.replaceRegistryHost = (!opts.replaceRegistryHost || opts.replaceRegistryHost === 'npmjs') ? + 'registry.npmjs.org' : opts.replaceRegistryHost + + this.defaultTag = opts.defaultTag || 'latest' + this.registry = removeTrailingSlashes(opts.registry || 'https://registry.npmjs.org') + + // command to run 'prepare' scripts on directories and git dirs + // To use pacote with yarn, for example, set npmBin to 'yarn' + // and npmCliConfig with yarn's equivalents. + this.npmBin = opts.npmBin || 'npm' + + // command to install deps for preparing + this.npmInstallCmd = opts.npmInstallCmd || ['install', '--force'] + + // XXX fill more of this in based on what we know from this.opts + // we explicitly DO NOT fill in --tag, though, since we are often + // going to be packing in the context of a publish, which may set + // a dist-tag, but certainly wants to keep defaulting to latest. + this.npmCliConfig = opts.npmCliConfig || [ + `--cache=${dirname(this.cache)}`, + `--prefer-offline=${!!this.preferOffline}`, + `--prefer-online=${!!this.preferOnline}`, + `--offline=${!!this.offline}`, + ...(this.before ? [`--before=${this.before.toISOString()}`] : []), + '--no-progress', + '--no-save', + '--no-audit', + // override any omit settings from the environment + '--include=dev', + '--include=peer', + '--include=optional', + // we need the actual things, not just the lockfile + '--no-package-lock-only', + '--no-dry-run', + ] + } + + get integrity () { + return this.opts.integrity || null + } + + set integrity (i) { + if (!i) { + return + } + + i = ssri.parse(i) + const current = this.opts.integrity + + // do not ever update an existing hash value, but do + // merge in NEW algos and hashes that we don't already have. + if (current) { + current.merge(i) + } else { + this.opts.integrity = i + } + } + + get notImplementedError () { + return new Error('not implemented in this fetcher type: ' + this.type) + } + + // override in child classes + // Returns a Promise that resolves to this.resolved string value + resolve () { + return this.resolved ? Promise.resolve(this.resolved) + : Promise.reject(this.notImplementedError) + } + + packument () { + return Promise.reject(this.notImplementedError) + } + + // override in child class + // returns a manifest containing: + // - name + // - version + // - _resolved + // - _integrity + // - plus whatever else was in there (corgi, full metadata, or pj file) + manifest () { + return Promise.reject(this.notImplementedError) + } + + // private, should be overridden. + // Note that they should *not* calculate or check integrity or cache, + // but *just* return the raw tarball data stream. + [_tarballFromResolved] () { + throw this.notImplementedError + } + + // public, should not be overridden + tarball () { + return this.tarballStream(stream => stream.concat().then(data => { + data.integrity = this.integrity && String(this.integrity) + data.resolved = this.resolved + data.from = this.from + return data + })) + } + + // private + // Note: cacache will raise a EINTEGRITY error if the integrity doesn't match + [_tarballFromCache] () { + return cacache.get.stream.byDigest(this.cache, this.integrity, this.opts) + } + + get [_cacheFetches] () { + return true + } + + [_istream] (stream) { + // if not caching this, just return it + if (!this.opts.cache || !this[_cacheFetches]) { + // instead of creating a new integrity stream, we only piggyback on the + // provided stream's events + if (stream.hasIntegrityEmitter) { + stream.on('integrity', i => this.integrity = i) + return stream + } + + const istream = ssri.integrityStream(this.opts) + istream.on('integrity', i => this.integrity = i) + stream.on('error', err => istream.emit('error', err)) + return stream.pipe(istream) + } + + // we have to return a stream that gets ALL the data, and proxies errors, + // but then pipe from the original tarball stream into the cache as well. + // To do this without losing any data, and since the cacache put stream + // is not a passthrough, we have to pipe from the original stream into + // the cache AFTER we pipe into the middleStream. Since the cache stream + // has an asynchronous flush to write its contents to disk, we need to + // defer the middleStream end until the cache stream ends. + const middleStream = new Minipass() + stream.on('error', err => middleStream.emit('error', err)) + stream.pipe(middleStream, { end: false }) + const cstream = cacache.put.stream( + this.opts.cache, + `pacote:tarball:${this.from}`, + this.opts + ) + cstream.on('integrity', i => this.integrity = i) + cstream.on('error', err => stream.emit('error', err)) + stream.pipe(cstream) + + // eslint-disable-next-line promise/catch-or-return + cstream.promise().catch(() => {}).then(() => middleStream.end()) + return middleStream + } + + pickIntegrityAlgorithm () { + return this.integrity ? this.integrity.pickAlgorithm(this.opts) + : this.defaultIntegrityAlgorithm + } + + // TODO: check error class, once those are rolled out to our deps + isDataCorruptionError (er) { + return er.code === 'EINTEGRITY' || er.code === 'Z_DATA_ERROR' + } + + // override the types getter + get types () { + return false + } + + [_assertType] () { + if (this.types && !this.types.includes(this.spec.type)) { + throw new TypeError(`Wrong spec type (${ + this.spec.type + }) for ${ + this.constructor.name + }. Supported types: ${this.types.join(', ')}`) + } + } + + // We allow ENOENTs from cacache, but not anywhere else. + // An ENOENT trying to read a tgz file, for example, is Right Out. + isRetriableError (er) { + // TODO: check error class, once those are rolled out to our deps + return this.isDataCorruptionError(er) || + er.code === 'ENOENT' || + er.code === 'EISDIR' + } + + // Mostly internal, but has some uses + // Pass in a function which returns a promise + // Function will be called 1 or more times with streams that may fail. + // Retries: + // Function MUST handle errors on the stream by rejecting the promise, + // so that retry logic can pick it up and either retry or fail whatever + // promise it was making (ie, failing extraction, etc.) + // + // The return value of this method is a Promise that resolves the same + // as whatever the streamHandler resolves to. + // + // This should never be overridden by child classes, but it is public. + tarballStream (streamHandler) { + // Only short-circuit via cache if we have everything else we'll need, + // and the user has not expressed a preference for checking online. + + const fromCache = ( + !this.preferOnline && + this.integrity && + this.resolved + ) ? streamHandler(this[_tarballFromCache]()).catch(er => { + if (this.isDataCorruptionError(er)) { + log.warn('tarball', `cached data for ${ + this.spec + } (${this.integrity}) seems to be corrupted. Refreshing cache.`) + return this.cleanupCached().then(() => { + throw er + }) + } else { + throw er + } + }) : null + + const fromResolved = er => { + if (er) { + if (!this.isRetriableError(er)) { + throw er + } + log.silly('tarball', `no local data for ${ + this.spec + }. Extracting by manifest.`) + } + return this.resolve().then(() => retry(tryAgain => + streamHandler(this[_istream](this[_tarballFromResolved]())) + .catch(streamErr => { + // Most likely data integrity. A cache ENOENT error is unlikely + // here, since we're definitely not reading from the cache, but it + // IS possible that the fetch subsystem accessed the cache, and the + // entry got blown away or something. Try one more time to be sure. + if (this.isRetriableError(streamErr)) { + log.warn('tarball', `tarball data for ${ + this.spec + } (${this.integrity}) seems to be corrupted. Trying again.`) + return this.cleanupCached().then(() => tryAgain(streamErr)) + } + throw streamErr + }), { retries: 1, minTimeout: 0, maxTimeout: 0 })) + } + + return fromCache ? fromCache.catch(fromResolved) : fromResolved() + } + + cleanupCached () { + return cacache.rm.content(this.cache, this.integrity, this.opts) + } + + [_empty] (path) { + return getContents({ path, depth: 1 }).then(contents => Promise.all( + contents.map(entry => fs.rm(entry, { recursive: true, force: true })))) + } + + async [_mkdir] (dest) { + await this[_empty](dest) + return await fs.mkdir(dest, { recursive: true }) + } + + // extraction is always the same. the only difference is where + // the tarball comes from. + async extract (dest) { + await this[_mkdir](dest) + return this.tarballStream((tarball) => this[_extract](dest, tarball)) + } + + [_toFile] (dest) { + return this.tarballStream(str => new Promise((res, rej) => { + const writer = new fsm.WriteStream(dest) + str.on('error', er => writer.emit('error', er)) + writer.on('error', er => rej(er)) + writer.on('close', () => res({ + integrity: this.integrity && String(this.integrity), + resolved: this.resolved, + from: this.from, + })) + str.pipe(writer) + })) + } + + // don't use this[_mkdir] because we don't want to rimraf anything + async tarballFile (dest) { + const dir = dirname(dest) + await fs.mkdir(dir, { recursive: true }) + return this[_toFile](dest) + } + + [_extract] (dest, tarball) { + const extractor = tar.x(this[_tarxOptions]({ cwd: dest })) + const p = new Promise((resolve, reject) => { + extractor.on('end', () => { + resolve({ + resolved: this.resolved, + integrity: this.integrity && String(this.integrity), + from: this.from, + }) + }) + + extractor.on('error', er => { + log.warn('tar', er.message) + log.silly('tar', er) + reject(er) + }) + + tarball.on('error', er => reject(er)) + }) + + tarball.pipe(extractor) + return p + } + + // always ensure that entries are at least as permissive as our configured + // dmode/fmode, but never more permissive than the umask allows. + [_entryMode] (path, mode, type) { + const m = /Directory|GNUDumpDir/.test(type) ? this.dmode + : /File$/.test(type) ? this.fmode + : /* istanbul ignore next - should never happen in a pkg */ 0 + + // make sure package bins are executable + const exe = isPackageBin(this.package, path) ? 0o111 : 0 + // always ensure that files are read/writable by the owner + return ((mode | m) & ~this.umask) | exe | 0o600 + } + + [_tarxOptions] ({ cwd }) { + const sawIgnores = new Set() + return { + cwd, + noChmod: true, + noMtime: true, + filter: (name, entry) => { + if (/Link$/.test(entry.type)) { + return false + } + entry.mode = this[_entryMode](entry.path, entry.mode, entry.type) + // this replicates the npm pack behavior where .gitignore files + // are treated like .npmignore files, but only if a .npmignore + // file is not present. + if (/File$/.test(entry.type)) { + const base = basename(entry.path) + if (base === '.npmignore') { + sawIgnores.add(entry.path) + } else if (base === '.gitignore' && !this.allowGitIgnore) { + // rename, but only if there's not already a .npmignore + const ni = entry.path.replace(/\.gitignore$/, '.npmignore') + if (sawIgnores.has(ni)) { + return false + } + entry.path = ni + } + return true + } + }, + strip: 1, + onwarn: /* istanbul ignore next - we can trust that tar logs */ + (code, msg, data) => { + log.warn('tar', code, msg) + log.silly('tar', code, msg, data) + }, + umask: this.umask, + // always ignore ownership info from tarball metadata + preserveOwner: false, + } + } +} + +module.exports = FetcherBase + +// Child classes +const GitFetcher = require('./git.js') +const RegistryFetcher = require('./registry.js') +const FileFetcher = require('./file.js') +const DirFetcher = require('./dir.js') +const RemoteFetcher = require('./remote.js') + +// Get an appropriate fetcher object from a spec and options +FetcherBase.get = (rawSpec, opts = {}) => { + const spec = npa(rawSpec, opts.where) + switch (spec.type) { + case 'git': + return new GitFetcher(spec, opts) + + case 'remote': + return new RemoteFetcher(spec, opts) + + case 'version': + case 'range': + case 'tag': + case 'alias': + return new RegistryFetcher(spec.subSpec || spec, opts) + + case 'file': + return new FileFetcher(spec, opts) + + case 'directory': + return new DirFetcher(spec, opts) + + default: + throw new TypeError('Unknown spec type: ' + spec.type) + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js new file mode 100644 index 0000000000000..bf99bb86e359e --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js @@ -0,0 +1,96 @@ +const Fetcher = require('./fetcher.js') +const fsm = require('fs-minipass') +const cacache = require('cacache') +const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') +const _exeBins = Symbol('_exeBins') +const { resolve } = require('path') +const fs = require('fs') +const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson') + +class FileFetcher extends Fetcher { + constructor (spec, opts) { + super(spec, opts) + // just the fully resolved filename + this.resolved = this.spec.fetchSpec + } + + get types () { + return ['file'] + } + + manifest () { + if (this.package) { + return Promise.resolve(this.package) + } + + // have to unpack the tarball for this. + return cacache.tmp.withTmp(this.cache, this.opts, dir => + this.extract(dir) + .then(() => this[_readPackageJson](dir + '/package.json')) + .then(mani => this.package = { + ...mani, + _integrity: this.integrity && String(this.integrity), + _resolved: this.resolved, + _from: this.from, + })) + } + + [_exeBins] (pkg, dest) { + if (!pkg.bin) { + return Promise.resolve() + } + + return Promise.all(Object.keys(pkg.bin).map(k => new Promise(res => { + const script = resolve(dest, pkg.bin[k]) + // Best effort. Ignore errors here, the only result is that + // a bin script is not executable. But if it's missing or + // something, we just leave it for a later stage to trip over + // when we can provide a more useful contextual error. + fs.stat(script, (er, st) => { + if (er) { + return res() + } + const mode = st.mode | 0o111 + if (mode === st.mode) { + return res() + } + fs.chmod(script, mode, res) + }) + }))) + } + + extract (dest) { + // if we've already loaded the manifest, then the super got it. + // but if not, read the unpacked manifest and chmod properly. + return super.extract(dest) + .then(result => this.package ? result + : this[_readPackageJson](dest + '/package.json').then(pkg => + this[_exeBins](pkg, dest)).then(() => result)) + } + + [_tarballFromResolved] () { + // create a read stream and return it + return new fsm.ReadStream(this.resolved) + } + + packument () { + // simulate based on manifest + return this.manifest().then(mani => ({ + name: mani.name, + 'dist-tags': { + [this.defaultTag]: mani.version, + }, + versions: { + [mani.version]: { + ...mani, + dist: { + tarball: `file:${this.resolved}`, + integrity: this.integrity && String(this.integrity), + }, + }, + }, + })) + } +} + +module.exports = FileFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js new file mode 100644 index 0000000000000..5d24f72497ec9 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js @@ -0,0 +1,327 @@ +const Fetcher = require('./fetcher.js') +const FileFetcher = require('./file.js') +const RemoteFetcher = require('./remote.js') +const DirFetcher = require('./dir.js') +const hashre = /^[a-f0-9]{40}$/ +const git = require('@npmcli/git') +const pickManifest = require('npm-pick-manifest') +const npa = require('npm-package-arg') +const { Minipass } = require('minipass') +const cacache = require('cacache') +const log = require('proc-log') +const npm = require('./util/npm.js') + +const _resolvedFromRepo = Symbol('_resolvedFromRepo') +const _resolvedFromHosted = Symbol('_resolvedFromHosted') +const _resolvedFromClone = Symbol('_resolvedFromClone') +const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') +const _addGitSha = Symbol('_addGitSha') +const addGitSha = require('./util/add-git-sha.js') +const _clone = Symbol('_clone') +const _cloneHosted = Symbol('_cloneHosted') +const _cloneRepo = Symbol('_cloneRepo') +const _setResolvedWithSha = Symbol('_setResolvedWithSha') +const _prepareDir = Symbol('_prepareDir') +const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson') + +// get the repository url. +// prefer https if there's auth, since ssh will drop that. +// otherwise, prefer ssh if available (more secure). +// We have to add the git+ back because npa suppresses it. +const repoUrl = (h, opts) => + h.sshurl && !(h.https && h.auth) && addGitPlus(h.sshurl(opts)) || + h.https && addGitPlus(h.https(opts)) + +// add git+ to the url, but only one time. +const addGitPlus = url => url && `git+${url}`.replace(/^(git\+)+/, 'git+') + +class GitFetcher extends Fetcher { + constructor (spec, opts) { + super(spec, opts) + + // we never want to compare integrity for git dependencies: npm/rfcs#525 + if (this.opts.integrity) { + delete this.opts.integrity + log.warn(`skipping integrity check for git dependency ${this.spec.fetchSpec}`) + } + + this.resolvedRef = null + if (this.spec.hosted) { + this.from = this.spec.hosted.shortcut({ noCommittish: false }) + } + + // shortcut: avoid full clone when we can go straight to the tgz + // if we have the full sha and it's a hosted git platform + if (this.spec.gitCommittish && hashre.test(this.spec.gitCommittish)) { + this.resolvedSha = this.spec.gitCommittish + // use hosted.tarball() when we shell to RemoteFetcher later + this.resolved = this.spec.hosted + ? repoUrl(this.spec.hosted, { noCommittish: false }) + : this.spec.rawSpec + } else { + this.resolvedSha = '' + } + + this.Arborist = opts.Arborist || null + } + + // just exposed to make it easier to test all the combinations + static repoUrl (hosted, opts) { + return repoUrl(hosted, opts) + } + + get types () { + return ['git'] + } + + resolve () { + // likely a hosted git repo with a sha, so get the tarball url + // but in general, no reason to resolve() more than necessary! + if (this.resolved) { + return super.resolve() + } + + // fetch the git repo and then look at the current hash + const h = this.spec.hosted + // try to use ssh, fall back to git. + return h ? this[_resolvedFromHosted](h) + : this[_resolvedFromRepo](this.spec.fetchSpec) + } + + // first try https, since that's faster and passphrase-less for + // public repos, and supports private repos when auth is provided. + // Fall back to SSH to support private repos + // NB: we always store the https url in resolved field if auth + // is present, otherwise ssh if the hosted type provides it + [_resolvedFromHosted] (hosted) { + return this[_resolvedFromRepo](hosted.https && hosted.https()) + .catch(er => { + // Throw early since we know pathspec errors will fail again if retried + if (er instanceof git.errors.GitPathspecError) { + throw er + } + const ssh = hosted.sshurl && hosted.sshurl() + // no fallthrough if we can't fall through or have https auth + if (!ssh || hosted.auth) { + throw er + } + return this[_resolvedFromRepo](ssh) + }) + } + + [_resolvedFromRepo] (gitRemote) { + // XXX make this a custom error class + if (!gitRemote) { + return Promise.reject(new Error(`No git url for ${this.spec}`)) + } + const gitRange = this.spec.gitRange + const name = this.spec.name + return git.revs(gitRemote, this.opts).then(remoteRefs => { + return gitRange ? pickManifest({ + versions: remoteRefs.versions, + 'dist-tags': remoteRefs['dist-tags'], + name, + }, gitRange, this.opts) + : this.spec.gitCommittish ? + remoteRefs.refs[this.spec.gitCommittish] || + remoteRefs.refs[remoteRefs.shas[this.spec.gitCommittish]] + : remoteRefs.refs.HEAD // no git committish, get default head + }).then(revDoc => { + // the committish provided isn't in the rev list + // things like HEAD~3 or @yesterday can land here. + if (!revDoc || !revDoc.sha) { + return this[_resolvedFromClone]() + } + + this.resolvedRef = revDoc + this.resolvedSha = revDoc.sha + this[_addGitSha](revDoc.sha) + return this.resolved + }) + } + + [_setResolvedWithSha] (withSha) { + // we haven't cloned, so a tgz download is still faster + // of course, if it's not a known host, we can't do that. + this.resolved = !this.spec.hosted ? withSha + : repoUrl(npa(withSha).hosted, { noCommittish: false }) + } + + // when we get the git sha, we affix it to our spec to build up + // either a git url with a hash, or a tarball download URL + [_addGitSha] (sha) { + this[_setResolvedWithSha](addGitSha(this.spec, sha)) + } + + [_resolvedFromClone] () { + // do a full or shallow clone, then look at the HEAD + // kind of wasteful, but no other option, really + return this[_clone](dir => this.resolved) + } + + [_prepareDir] (dir) { + return this[_readPackageJson](dir + '/package.json').then(mani => { + // no need if we aren't going to do any preparation. + const scripts = mani.scripts + if (!mani.workspaces && (!scripts || !( + scripts.postinstall || + scripts.build || + scripts.preinstall || + scripts.install || + scripts.prepack || + scripts.prepare))) { + return + } + + // to avoid cases where we have an cycle of git deps that depend + // on one another, we only ever do preparation for one instance + // of a given git dep along the chain of installations. + // Note that this does mean that a dependency MAY in theory end up + // trying to run its prepare script using a dependency that has not + // been properly prepared itself, but that edge case is smaller + // and less hazardous than a fork bomb of npm and git commands. + const noPrepare = !process.env._PACOTE_NO_PREPARE_ ? [] + : process.env._PACOTE_NO_PREPARE_.split('\n') + if (noPrepare.includes(this.resolved)) { + log.info('prepare', 'skip prepare, already seen', this.resolved) + return + } + noPrepare.push(this.resolved) + + // the DirFetcher will do its own preparation to run the prepare scripts + // All we have to do is put the deps in place so that it can succeed. + return npm( + this.npmBin, + [].concat(this.npmInstallCmd).concat(this.npmCliConfig), + dir, + { ...process.env, _PACOTE_NO_PREPARE_: noPrepare.join('\n') }, + { message: 'git dep preparation failed' } + ) + }) + } + + [_tarballFromResolved] () { + const stream = new Minipass() + stream.resolved = this.resolved + stream.from = this.from + + // check it out and then shell out to the DirFetcher tarball packer + this[_clone](dir => this[_prepareDir](dir) + .then(() => new Promise((res, rej) => { + if (!this.Arborist) { + throw new Error('GitFetcher requires an Arborist constructor to pack a tarball') + } + const df = new DirFetcher(`file:${dir}`, { + ...this.opts, + Arborist: this.Arborist, + resolved: null, + integrity: null, + }) + const dirStream = df[_tarballFromResolved]() + dirStream.on('error', rej) + dirStream.on('end', res) + dirStream.pipe(stream) + }))).catch( + /* istanbul ignore next: very unlikely and hard to test */ + er => stream.emit('error', er) + ) + return stream + } + + // clone a git repo into a temp folder (or fetch and unpack if possible) + // handler accepts a directory, and returns a promise that resolves + // when we're done with it, at which point, cacache deletes it + // + // TODO: after cloning, create a tarball of the folder, and add to the cache + // with cacache.put.stream(), using a key that's deterministic based on the + // spec and repo, so that we don't ever clone the same thing multiple times. + [_clone] (handler, tarballOk = true) { + const o = { tmpPrefix: 'git-clone' } + const ref = this.resolvedSha || this.spec.gitCommittish + const h = this.spec.hosted + const resolved = this.resolved + + // can be set manually to false to fall back to actual git clone + tarballOk = tarballOk && + h && resolved === repoUrl(h, { noCommittish: false }) && h.tarball + + return cacache.tmp.withTmp(this.cache, o, async tmp => { + // if we're resolved, and have a tarball url, shell out to RemoteFetcher + if (tarballOk) { + const nameat = this.spec.name ? `${this.spec.name}@` : '' + return new RemoteFetcher(h.tarball({ noCommittish: false }), { + ...this.opts, + allowGitIgnore: true, + pkgid: `git:${nameat}${this.resolved}`, + resolved: this.resolved, + integrity: null, // it'll always be different, if we have one + }).extract(tmp).then(() => handler(tmp), er => { + // fall back to ssh download if tarball fails + if (er.constructor.name.match(/^Http/)) { + return this[_clone](handler, false) + } else { + throw er + } + }) + } + + const sha = await ( + h ? this[_cloneHosted](ref, tmp) + : this[_cloneRepo](this.spec.fetchSpec, ref, tmp) + ) + this.resolvedSha = sha + if (!this.resolved) { + await this[_addGitSha](sha) + } + return handler(tmp) + }) + } + + // first try https, since that's faster and passphrase-less for + // public repos, and supports private repos when auth is provided. + // Fall back to SSH to support private repos + // NB: we always store the https url in resolved field if auth + // is present, otherwise ssh if the hosted type provides it + [_cloneHosted] (ref, tmp) { + const hosted = this.spec.hosted + return this[_cloneRepo](hosted.https({ noCommittish: true }), ref, tmp) + .catch(er => { + // Throw early since we know pathspec errors will fail again if retried + if (er instanceof git.errors.GitPathspecError) { + throw er + } + const ssh = hosted.sshurl && hosted.sshurl({ noCommittish: true }) + // no fallthrough if we can't fall through or have https auth + if (!ssh || hosted.auth) { + throw er + } + return this[_cloneRepo](ssh, ref, tmp) + }) + } + + [_cloneRepo] (repo, ref, tmp) { + const { opts, spec } = this + return git.clone(repo, ref, tmp, { ...opts, spec }) + } + + manifest () { + if (this.package) { + return Promise.resolve(this.package) + } + + return this.spec.hosted && this.resolved + ? FileFetcher.prototype.manifest.apply(this) + : this[_clone](dir => + this[_readPackageJson](dir + '/package.json') + .then(mani => this.package = { + ...mani, + _resolved: this.resolved, + _from: this.from, + })) + } + + packument () { + return FileFetcher.prototype.packument.apply(this) + } +} +module.exports = GitFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js new file mode 100644 index 0000000000000..cbcbd7c92d15f --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js @@ -0,0 +1,23 @@ +const { get } = require('./fetcher.js') +const GitFetcher = require('./git.js') +const RegistryFetcher = require('./registry.js') +const FileFetcher = require('./file.js') +const DirFetcher = require('./dir.js') +const RemoteFetcher = require('./remote.js') + +module.exports = { + GitFetcher, + RegistryFetcher, + FileFetcher, + DirFetcher, + RemoteFetcher, + resolve: (spec, opts) => get(spec, opts).resolve(), + extract: (spec, dest, opts) => get(spec, opts).extract(dest), + manifest: (spec, opts) => get(spec, opts).manifest(), + tarball: (spec, opts) => get(spec, opts).tarball(), + packument: (spec, opts) => get(spec, opts).packument(), +} +module.exports.tarball.stream = (spec, handler, opts) => + get(spec, opts).tarballStream(handler) +module.exports.tarball.file = (spec, dest, opts) => + get(spec, opts).tarballFile(dest) diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js new file mode 100644 index 0000000000000..34d9b2b87f3f3 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js @@ -0,0 +1,344 @@ +const Fetcher = require('./fetcher.js') +const RemoteFetcher = require('./remote.js') +const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') +const pacoteVersion = require('../package.json').version +const removeTrailingSlashes = require('./util/trailing-slashes.js') +const rpj = require('read-package-json-fast') +const pickManifest = require('npm-pick-manifest') +const ssri = require('ssri') +const crypto = require('crypto') +const npa = require('npm-package-arg') +const { sigstore } = require('sigstore') + +// Corgis are cute. 🐕🐶 +const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*' +const fullDoc = 'application/json' + +const fetch = require('npm-registry-fetch') + +const _headers = Symbol('_headers') +class RegistryFetcher extends Fetcher { + constructor (spec, opts) { + super(spec, opts) + + // you usually don't want to fetch the same packument multiple times in + // the span of a given script or command, no matter how many pacote calls + // are made, so this lets us avoid doing that. It's only relevant for + // registry fetchers, because other types simulate their packument from + // the manifest, which they memoize on this.package, so it's very cheap + // already. + this.packumentCache = this.opts.packumentCache || null + + this.registry = fetch.pickRegistry(spec, opts) + this.packumentUrl = removeTrailingSlashes(this.registry) + '/' + + this.spec.escapedName + + const parsed = new URL(this.registry) + const regKey = `//${parsed.host}${parsed.pathname}` + // unlike the nerf-darted auth keys, this one does *not* allow a mismatch + // of trailing slashes. It must match exactly. + if (this.opts[`${regKey}:_keys`]) { + this.registryKeys = this.opts[`${regKey}:_keys`] + } + + // XXX pacote <=9 has some logic to ignore opts.resolved if + // the resolved URL doesn't go to the same registry. + // Consider reproducing that here, to throw away this.resolved + // in that case. + } + + async resolve () { + // fetching the manifest sets resolved and (if present) integrity + await this.manifest() + if (!this.resolved) { + throw Object.assign( + new Error('Invalid package manifest: no `dist.tarball` field'), + { package: this.spec.toString() } + ) + } + return this.resolved + } + + [_headers] () { + return { + // npm will override UA, but ensure that we always send *something* + 'user-agent': this.opts.userAgent || + `pacote/${pacoteVersion} node/${process.version}`, + ...(this.opts.headers || {}), + 'pacote-version': pacoteVersion, + 'pacote-req-type': 'packument', + 'pacote-pkg-id': `registry:${this.spec.name}`, + accept: this.fullMetadata ? fullDoc : corgiDoc, + } + } + + async packument () { + // note this might be either an in-flight promise for a request, + // or the actual packument, but we never want to make more than + // one request at a time for the same thing regardless. + if (this.packumentCache && this.packumentCache.has(this.packumentUrl)) { + return this.packumentCache.get(this.packumentUrl) + } + + // npm-registry-fetch the packument + // set the appropriate header for corgis if fullMetadata isn't set + // return the res.json() promise + try { + const res = await fetch(this.packumentUrl, { + ...this.opts, + headers: this[_headers](), + spec: this.spec, + // never check integrity for packuments themselves + integrity: null, + }) + const packument = await res.json() + packument._contentLength = +res.headers.get('content-length') + if (this.packumentCache) { + this.packumentCache.set(this.packumentUrl, packument) + } + return packument + } catch (err) { + if (this.packumentCache) { + this.packumentCache.delete(this.packumentUrl) + } + if (err.code !== 'E404' || this.fullMetadata) { + throw err + } + // possible that corgis are not supported by this registry + this.fullMetadata = true + return this.packument() + } + } + + async manifest () { + if (this.package) { + return this.package + } + + const packument = await this.packument() + let mani = await pickManifest(packument, this.spec.fetchSpec, { + ...this.opts, + defaultTag: this.defaultTag, + before: this.before, + }) + mani = rpj.normalize(mani) + /* XXX add ETARGET and E403 revalidation of cached packuments here */ + + // add _resolved and _integrity from dist object + const { dist } = mani + if (dist) { + this.resolved = mani._resolved = dist.tarball + mani._from = this.from + const distIntegrity = dist.integrity ? ssri.parse(dist.integrity) + : dist.shasum ? ssri.fromHex(dist.shasum, 'sha1', { ...this.opts }) + : null + if (distIntegrity) { + if (this.integrity && !this.integrity.match(distIntegrity)) { + // only bork if they have algos in common. + // otherwise we end up breaking if we have saved a sha512 + // previously for the tarball, but the manifest only + // provides a sha1, which is possible for older publishes. + // Otherwise, this is almost certainly a case of holding it + // wrong, and will result in weird or insecure behavior + // later on when building package tree. + for (const algo of Object.keys(this.integrity)) { + if (distIntegrity[algo]) { + throw Object.assign(new Error( + `Integrity checksum failed when using ${algo}: ` + + `wanted ${this.integrity} but got ${distIntegrity}.` + ), { code: 'EINTEGRITY' }) + } + } + } + // made it this far, the integrity is worthwhile. accept it. + // the setter here will take care of merging it into what we already + // had. + this.integrity = distIntegrity + } + } + if (this.integrity) { + mani._integrity = String(this.integrity) + if (dist.signatures) { + if (this.opts.verifySignatures) { + // validate and throw on error, then set _signatures + const message = `${mani._id}:${mani._integrity}` + for (const signature of dist.signatures) { + const publicKey = this.registryKeys && + this.registryKeys.filter(key => (key.keyid === signature.keyid))[0] + if (!publicKey) { + throw Object.assign(new Error( + `${mani._id} has a registry signature with keyid: ${signature.keyid} ` + + 'but no corresponding public key can be found' + ), { code: 'EMISSINGSIGNATUREKEY' }) + } + const validPublicKey = + !publicKey.expires || (Date.parse(publicKey.expires) > Date.now()) + if (!validPublicKey) { + throw Object.assign(new Error( + `${mani._id} has a registry signature with keyid: ${signature.keyid} ` + + `but the corresponding public key has expired ${publicKey.expires}` + ), { code: 'EEXPIREDSIGNATUREKEY' }) + } + const verifier = crypto.createVerify('SHA256') + verifier.write(message) + verifier.end() + const valid = verifier.verify( + publicKey.pemkey, + signature.sig, + 'base64' + ) + if (!valid) { + throw Object.assign(new Error( + `${mani._id} has an invalid registry signature with ` + + `keyid: ${publicKey.keyid} and signature: ${signature.sig}` + ), { + code: 'EINTEGRITYSIGNATURE', + keyid: publicKey.keyid, + signature: signature.sig, + resolved: mani._resolved, + integrity: mani._integrity, + }) + } + } + mani._signatures = dist.signatures + } else { + mani._signatures = dist.signatures + } + } + + if (dist.attestations) { + if (this.opts.verifyAttestations) { + // Always fetch attestations from the current registry host + const attestationsPath = new URL(dist.attestations.url).pathname + const attestationsUrl = removeTrailingSlashes(this.registry) + attestationsPath + const res = await fetch(attestationsUrl, { + ...this.opts, + // disable integrity check for attestations json payload, we check the + // integrity in the verification steps below + integrity: null, + }) + const { attestations } = await res.json() + const bundles = attestations.map(({ predicateType, bundle }) => { + const statement = JSON.parse( + Buffer.from(bundle.dsseEnvelope.payload, 'base64').toString('utf8') + ) + const keyid = bundle.dsseEnvelope.signatures[0].keyid + const signature = bundle.dsseEnvelope.signatures[0].sig + + return { + predicateType, + bundle, + statement, + keyid, + signature, + } + }) + + const attestationKeyIds = bundles.map((b) => b.keyid).filter((k) => !!k) + const attestationRegistryKeys = (this.registryKeys || []) + .filter(key => attestationKeyIds.includes(key.keyid)) + if (!attestationRegistryKeys.length) { + throw Object.assign(new Error( + `${mani._id} has attestations but no corresponding public key(s) can be found` + ), { code: 'EMISSINGSIGNATUREKEY' }) + } + + for (const { predicateType, bundle, keyid, signature, statement } of bundles) { + const publicKey = attestationRegistryKeys.find(key => key.keyid === keyid) + // Publish attestations have a keyid set and a valid public key must be found + if (keyid) { + if (!publicKey) { + throw Object.assign(new Error( + `${mani._id} has attestations with keyid: ${keyid} ` + + 'but no corresponding public key can be found' + ), { code: 'EMISSINGSIGNATUREKEY' }) + } + + const validPublicKey = + !publicKey.expires || (Date.parse(publicKey.expires) > Date.now()) + if (!validPublicKey) { + throw Object.assign(new Error( + `${mani._id} has attestations with keyid: ${keyid} ` + + `but the corresponding public key has expired ${publicKey.expires}` + ), { code: 'EEXPIREDSIGNATUREKEY' }) + } + } + + const subject = { + name: statement.subject[0].name, + sha512: statement.subject[0].digest.sha512, + } + + // Only type 'version' can be turned into a PURL + const purl = this.spec.type === 'version' ? npa.toPurl(this.spec) : this.spec + // Verify the statement subject matches the package, version + if (subject.name !== purl) { + throw Object.assign(new Error( + `${mani._id} package name and version (PURL): ${purl} ` + + `doesn't match what was signed: ${subject.name}` + ), { code: 'EATTESTATIONSUBJECT' }) + } + + // Verify the statement subject matches the tarball integrity + const integrityHexDigest = ssri.parse(this.integrity).hexDigest() + if (subject.sha512 !== integrityHexDigest) { + throw Object.assign(new Error( + `${mani._id} package integrity (hex digest): ` + + `${integrityHexDigest} ` + + `doesn't match what was signed: ${subject.sha512}` + ), { code: 'EATTESTATIONSUBJECT' }) + } + + try { + // Provenance attestations are signed with a signing certificate + // (including the key) so we don't need to return a public key. + // + // Publish attestations are signed with a keyid so we need to + // specify a public key from the keys endpoint: `registry-host.tld/-/npm/v1/keys` + const options = { + tufCachePath: this.tufCache, + keySelector: publicKey ? () => publicKey.pemkey : undefined, + } + await sigstore.verify(bundle, null, options) + } catch (e) { + throw Object.assign(new Error( + `${mani._id} failed to verify attestation: ${e.message}` + ), { + code: 'EATTESTATIONVERIFY', + predicateType, + keyid, + signature, + resolved: mani._resolved, + integrity: mani._integrity, + }) + } + } + mani._attestations = dist.attestations + } else { + mani._attestations = dist.attestations + } + } + } + + this.package = mani + return this.package + } + + [_tarballFromResolved] () { + // we use a RemoteFetcher to get the actual tarball stream + return new RemoteFetcher(this.resolved, { + ...this.opts, + resolved: this.resolved, + pkgid: `registry:${this.spec.name}@${this.resolved}`, + })[_tarballFromResolved]() + } + + get types () { + return [ + 'tag', + 'version', + 'range', + ] + } +} +module.exports = RegistryFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js new file mode 100644 index 0000000000000..fd617459fb031 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js @@ -0,0 +1,91 @@ +const Fetcher = require('./fetcher.js') +const FileFetcher = require('./file.js') +const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') +const pacoteVersion = require('../package.json').version +const fetch = require('npm-registry-fetch') +const { Minipass } = require('minipass') + +const _cacheFetches = Symbol.for('pacote.Fetcher._cacheFetches') +const _headers = Symbol('_headers') +class RemoteFetcher extends Fetcher { + constructor (spec, opts) { + super(spec, opts) + this.resolved = this.spec.fetchSpec + const resolvedURL = new URL(this.resolved) + if (this.replaceRegistryHost !== 'never' + && (this.replaceRegistryHost === 'always' + || this.replaceRegistryHost === resolvedURL.host)) { + this.resolved = new URL(resolvedURL.pathname, this.registry).href + } + + // nam is a fermented pork sausage that is good to eat + const nameat = this.spec.name ? `${this.spec.name}@` : '' + this.pkgid = opts.pkgid ? opts.pkgid : `remote:${nameat}${this.resolved}` + } + + // Don't need to cache tarball fetches in pacote, because make-fetch-happen + // will write into cacache anyway. + get [_cacheFetches] () { + return false + } + + [_tarballFromResolved] () { + const stream = new Minipass() + stream.hasIntegrityEmitter = true + + const fetchOpts = { + ...this.opts, + headers: this[_headers](), + spec: this.spec, + integrity: this.integrity, + algorithms: [this.pickIntegrityAlgorithm()], + } + + // eslint-disable-next-line promise/always-return + fetch(this.resolved, fetchOpts).then(res => { + res.body.on('error', + /* istanbul ignore next - exceedingly rare and hard to simulate */ + er => stream.emit('error', er) + ) + + res.body.on('integrity', i => { + this.integrity = i + stream.emit('integrity', i) + }) + + res.body.pipe(stream) + }).catch(er => stream.emit('error', er)) + + return stream + } + + [_headers] () { + return { + // npm will override this, but ensure that we always send *something* + 'user-agent': this.opts.userAgent || + `pacote/${pacoteVersion} node/${process.version}`, + ...(this.opts.headers || {}), + 'pacote-version': pacoteVersion, + 'pacote-req-type': 'tarball', + 'pacote-pkg-id': this.pkgid, + ...(this.integrity ? { 'pacote-integrity': String(this.integrity) } + : {}), + ...(this.opts.headers || {}), + } + } + + get types () { + return ['remote'] + } + + // getting a packument and/or manifest is the same as with a file: spec. + // unpack the tarball stream, and then read from the package.json file. + packument () { + return FileFetcher.prototype.packument.apply(this) + } + + manifest () { + return FileFetcher.prototype.manifest.apply(this) + } +} +module.exports = RemoteFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js new file mode 100644 index 0000000000000..843fe5b600caf --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js @@ -0,0 +1,15 @@ +// add a sha to a git remote url spec +const addGitSha = (spec, sha) => { + if (spec.hosted) { + const h = spec.hosted + const opt = { noCommittish: true } + const base = h.https && h.auth ? h.https(opt) : h.shortcut(opt) + + return `${base}#${sha}` + } else { + // don't use new URL for this, because it doesn't handle scp urls + return spec.rawSpec.replace(/#.*$/, '') + `#${sha}` + } +} + +module.exports = addGitSha diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js new file mode 100644 index 0000000000000..ac83b1793f199 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js @@ -0,0 +1,15 @@ +const os = require('os') +const { resolve } = require('path') + +module.exports = (fakePlatform = false) => { + const temp = os.tmpdir() + const uidOrPid = process.getuid ? process.getuid() : process.pid + const home = os.homedir() || resolve(temp, 'npm-' + uidOrPid) + const platform = fakePlatform || process.platform + const cacheExtra = platform === 'win32' ? 'npm-cache' : '.npm' + const cacheRoot = (platform === 'win32' && process.env.LOCALAPPDATA) || home + return { + cacache: resolve(cacheRoot, cacheExtra, '_cacache'), + tufcache: resolve(cacheRoot, cacheExtra, '_tuf'), + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js new file mode 100644 index 0000000000000..49a3f73f537ce --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js @@ -0,0 +1,25 @@ +// Function to determine whether a path is in the package.bin set. +// Used to prevent issues when people publish a package from a +// windows machine, and then install with --no-bin-links. +// +// Note: this is not possible in remote or file fetchers, since +// we don't have the manifest until AFTER we've unpacked. But the +// main use case is registry fetching with git a distant second, +// so that's an acceptable edge case to not handle. + +const binObj = (name, bin) => + typeof bin === 'string' ? { [name]: bin } : bin + +const hasBin = (pkg, path) => { + const bin = binObj(pkg.name, pkg.bin) + const p = path.replace(/^[^\\/]*\//, '') + for (const kv of Object.entries(bin)) { + if (kv[1] === p) { + return true + } + } + return false +} + +module.exports = (pkg, path) => + pkg && pkg.bin ? hasBin(pkg, path) : false diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js new file mode 100644 index 0000000000000..a3005c255565f --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js @@ -0,0 +1,14 @@ +// run an npm command +const spawn = require('@npmcli/promise-spawn') + +module.exports = (npmBin, npmCommand, cwd, env, extra) => { + const isJS = npmBin.endsWith('.js') + const cmd = isJS ? process.execPath : npmBin + const args = (isJS ? [npmBin] : []).concat(npmCommand) + // when installing to run the `prepare` script for a git dep, we need + // to ensure that we don't run into a cycle of checking out packages + // in temp directories. this lets us link previously-seen repos that + // are also being prepared. + + return spawn(cmd, args, { cwd, env }, extra) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js new file mode 100644 index 0000000000000..d070f0f7ba2d4 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js @@ -0,0 +1,31 @@ +const isPackageBin = require('./is-package-bin.js') + +const tarCreateOptions = manifest => ({ + cwd: manifest._resolved, + prefix: 'package/', + portable: true, + gzip: { + // forcing the level to 9 seems to avoid some + // platform specific optimizations that cause + // integrity mismatch errors due to differing + // end results after compression + level: 9, + }, + + // ensure that package bins are always executable + // Note that npm-packlist is already filtering out + // anything that is not a regular file, ignored by + // .npmignore or package.json "files", etc. + filter: (path, stat) => { + if (isPackageBin(manifest, path)) { + stat.mode |= 0o111 + } + return true + }, + + // Provide a specific date in the 1980s for the benefit of zip, + // which is confounded by files dated at the Unix epoch 0. + mtime: new Date('1985-10-26T08:15:00.000Z'), +}) + +module.exports = tarCreateOptions diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js new file mode 100644 index 0000000000000..c50cb6173b92e --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js @@ -0,0 +1,10 @@ +const removeTrailingSlashes = (input) => { + // in order to avoid regexp redos detection + let output = input + while (output.endsWith('/')) { + output = output.slice(0, -1) + } + return output +} + +module.exports = removeTrailingSlashes diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json new file mode 100644 index 0000000000000..bc8d984704af5 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json @@ -0,0 +1,79 @@ +{ + "name": "pacote", + "version": "15.2.0", + "description": "JavaScript package downloader", + "author": "GitHub Inc.", + "bin": { + "pacote": "lib/bin.js" + }, + "license": "ISC", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "snap": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "tap": { + "timeout": 300, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "devDependencies": { + "@npmcli/arborist": "^6.0.0 || ^6.0.0-pre.0", + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.14.1", + "hosted-git-info": "^6.0.0", + "mutate-fs": "^2.1.1", + "nock": "^13.2.4", + "npm-registry-mock": "^1.3.2", + "tap": "^16.0.1" + }, + "files": [ + "bin/", + "lib/" + ], + "keywords": [ + "packages", + "npm", + "git" + ], + "dependencies": { + "@npmcli/git": "^4.0.0", + "@npmcli/installed-package-contents": "^2.0.1", + "@npmcli/promise-spawn": "^6.0.1", + "@npmcli/run-script": "^6.0.0", + "cacache": "^17.0.0", + "fs-minipass": "^3.0.0", + "minipass": "^5.0.0", + "npm-package-arg": "^10.0.0", + "npm-packlist": "^7.0.0", + "npm-pick-manifest": "^8.0.0", + "npm-registry-fetch": "^14.0.0", + "proc-log": "^3.0.0", + "promise-retry": "^2.0.1", + "read-package-json": "^6.0.0", + "read-package-json-fast": "^3.0.0", + "sigstore": "^1.3.0", + "ssri": "^10.0.0", + "tar": "^6.1.11" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/pacote.git" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.14.1", + "windowsCI": false, + "publish": "true" + } +} diff --git a/node_modules/pacote/node_modules/minipass/LICENSE b/node_modules/pacote/node_modules/minipass/LICENSE new file mode 100644 index 0000000000000..97f8e32ed82e4 --- /dev/null +++ b/node_modules/pacote/node_modules/minipass/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/minipass/dist/cjs/index.js b/node_modules/pacote/node_modules/minipass/dist/cjs/index.js new file mode 100644 index 0000000000000..068c095b69793 --- /dev/null +++ b/node_modules/pacote/node_modules/minipass/dist/cjs/index.js @@ -0,0 +1,1028 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0; +const proc = typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + }; +const node_events_1 = require("node:events"); +const node_stream_1 = __importDefault(require("node:stream")); +const node_string_decoder_1 = require("node:string_decoder"); +/** + * Return true if the argument is a Minipass stream, Node stream, or something + * else that Minipass can interact with. + */ +const isStream = (s) => !!s && + typeof s === 'object' && + (s instanceof Minipass || + s instanceof node_stream_1.default || + (0, exports.isReadable)(s) || + (0, exports.isWritable)(s)); +exports.isStream = isStream; +/** + * Return true if the argument is a valid {@link Minipass.Readable} + */ +const isReadable = (s) => !!s && + typeof s === 'object' && + s instanceof node_events_1.EventEmitter && + typeof s.pipe === 'function' && + // node core Writable streams have a pipe() method, but it throws + s.pipe !== node_stream_1.default.Writable.prototype.pipe; +exports.isReadable = isReadable; +/** + * Return true if the argument is a valid {@link Minipass.Writable} + */ +const isWritable = (s) => !!s && + typeof s === 'object' && + s instanceof node_events_1.EventEmitter && + typeof s.write === 'function' && + typeof s.end === 'function'; +exports.isWritable = isWritable; +const EOF = Symbol('EOF'); +const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); +const EMITTED_END = Symbol('emittedEnd'); +const EMITTING_END = Symbol('emittingEnd'); +const EMITTED_ERROR = Symbol('emittedError'); +const CLOSED = Symbol('closed'); +const READ = Symbol('read'); +const FLUSH = Symbol('flush'); +const FLUSHCHUNK = Symbol('flushChunk'); +const ENCODING = Symbol('encoding'); +const DECODER = Symbol('decoder'); +const FLOWING = Symbol('flowing'); +const PAUSED = Symbol('paused'); +const RESUME = Symbol('resume'); +const BUFFER = Symbol('buffer'); +const PIPES = Symbol('pipes'); +const BUFFERLENGTH = Symbol('bufferLength'); +const BUFFERPUSH = Symbol('bufferPush'); +const BUFFERSHIFT = Symbol('bufferShift'); +const OBJECTMODE = Symbol('objectMode'); +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed'); +// internal event when stream has an error +const ERROR = Symbol('error'); +const EMITDATA = Symbol('emitData'); +const EMITEND = Symbol('emitEnd'); +const EMITEND2 = Symbol('emitEnd2'); +const ASYNC = Symbol('async'); +const ABORT = Symbol('abort'); +const ABORTED = Symbol('aborted'); +const SIGNAL = Symbol('signal'); +const DATALISTENERS = Symbol('dataListeners'); +const DISCARDED = Symbol('discarded'); +const defer = (fn) => Promise.resolve().then(fn); +const nodefer = (fn) => fn(); +const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; +const isArrayBufferLike = (b) => b instanceof ArrayBuffer || + (!!b && + typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0); +const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); +/** + * Internal class representing a pipe to a destination stream. + * + * @internal + */ +class Pipe { + src; + dest; + opts; + ondrain; + constructor(src, dest, opts) { + this.src = src; + this.dest = dest; + this.opts = opts; + this.ondrain = () => src[RESUME](); + this.dest.on('drain', this.ondrain); + } + unpipe() { + this.dest.removeListener('drain', this.ondrain); + } + // only here for the prototype + /* c8 ignore start */ + proxyErrors(_er) { } + /* c8 ignore stop */ + end() { + this.unpipe(); + if (this.opts.end) + this.dest.end(); + } +} +/** + * Internal class representing a pipe to a destination stream where + * errors are proxied. + * + * @internal + */ +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors); + super.unpipe(); + } + constructor(src, dest, opts) { + super(src, dest, opts); + this.proxyErrors = er => dest.emit('error', er); + src.on('error', this.proxyErrors); + } +} +const isObjectModeOptions = (o) => !!o.objectMode; +const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; +/** + * Main export, the Minipass class + * + * `RType` is the type of data emitted, defaults to Buffer + * + * `WType` is the type of data to be written, if RType is buffer or string, + * then any {@link Minipass.ContiguousData} is allowed. + * + * `Events` is the set of event handler signatures that this object + * will emit, see {@link Minipass.Events} + */ +class Minipass extends node_events_1.EventEmitter { + [FLOWING] = false; + [PAUSED] = false; + [PIPES] = []; + [BUFFER] = []; + [OBJECTMODE]; + [ENCODING]; + [ASYNC]; + [DECODER]; + [EOF] = false; + [EMITTED_END] = false; + [EMITTING_END] = false; + [CLOSED] = false; + [EMITTED_ERROR] = null; + [BUFFERLENGTH] = 0; + [DESTROYED] = false; + [SIGNAL]; + [ABORTED] = false; + [DATALISTENERS] = 0; + [DISCARDED] = false; + /** + * true if the stream can be written + */ + writable = true; + /** + * true if the stream can be read + */ + readable = true; + /** + * If `RType` is Buffer, then options do not need to be provided. + * Otherwise, an options object must be provided to specify either + * {@link Minipass.SharedOptions.objectMode} or + * {@link Minipass.SharedOptions.encoding}, as appropriate. + */ + constructor(...args) { + const options = (args[0] || + {}); + super(); + if (options.objectMode && typeof options.encoding === 'string') { + throw new TypeError('Encoding and objectMode may not be used together'); + } + if (isObjectModeOptions(options)) { + this[OBJECTMODE] = true; + this[ENCODING] = null; + } + else if (isEncodingOptions(options)) { + this[ENCODING] = options.encoding; + this[OBJECTMODE] = false; + } + else { + this[OBJECTMODE] = false; + this[ENCODING] = null; + } + this[ASYNC] = !!options.async; + this[DECODER] = this[ENCODING] + ? new node_string_decoder_1.StringDecoder(this[ENCODING]) + : null; + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); + } + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); + } + const { signal } = options; + if (signal) { + this[SIGNAL] = signal; + if (signal.aborted) { + this[ABORT](); + } + else { + signal.addEventListener('abort', () => this[ABORT]()); + } + } + } + /** + * The amount of data stored in the buffer waiting to be read. + * + * For Buffer strings, this will be the total byte length. + * For string encoding streams, this will be the string character length, + * according to JavaScript's `string.length` logic. + * For objectMode streams, this is a count of the items waiting to be + * emitted. + */ + get bufferLength() { + return this[BUFFERLENGTH]; + } + /** + * The `BufferEncoding` currently in use, or `null` + */ + get encoding() { + return this[ENCODING]; + } + /** + * @deprecated - This is a read only property + */ + set encoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * @deprecated - Encoding may only be set at instantiation time + */ + setEncoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * True if this is an objectMode stream + */ + get objectMode() { + return this[OBJECTMODE]; + } + /** + * @deprecated - This is a read-only property + */ + set objectMode(_om) { + throw new Error('objectMode must be set at instantiation time'); + } + /** + * true if this is an async stream + */ + get ['async']() { + return this[ASYNC]; + } + /** + * Set to true to make this stream async. + * + * Once set, it cannot be unset, as this would potentially cause incorrect + * behavior. Ie, a sync stream can be made async, but an async stream + * cannot be safely made sync. + */ + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a; + } + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true; + this.emit('abort', this[SIGNAL]?.reason); + this.destroy(this[SIGNAL]?.reason); + } + /** + * True if the stream has been aborted. + */ + get aborted() { + return this[ABORTED]; + } + /** + * No-op setter. Stream aborted status is set via the AbortSignal provided + * in the constructor options. + */ + set aborted(_) { } + write(chunk, encoding, cb) { + if (this[ABORTED]) + return false; + if (this[EOF]) + throw new Error('write after end'); + if (this[DESTROYED]) { + this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); + return true; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (!encoding) + encoding = 'utf8'; + const fn = this[ASYNC] ? defer : nodefer; + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything is only allowed if in object mode, so throw + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); + } + else if (isArrayBufferLike(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk); + } + else if (typeof chunk !== 'string') { + throw new Error('Non-contiguous data written to non-objectMode stream'); + } + } + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + // maybe impossible? + /* c8 ignore start */ + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + /* c8 ignore stop */ + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { + //@ts-ignore - sinful unsafe type change + chunk = Buffer.from(chunk, encoding); + } + if (Buffer.isBuffer(chunk) && this[ENCODING]) { + //@ts-ignore - sinful unsafe type change + chunk = this[DECODER].write(chunk); + } + // Note: flushing CAN potentially switch us into not-flowing mode + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + /** + * Low-level explicit read method. + * + * In objectMode, the argument is ignored, and one item is returned if + * available. + * + * `n` is the number of bytes (or in the case of encoding streams, + * characters) to consume. If `n` is not provided, then the entire buffer + * is returned, or `null` is returned if no data is available. + * + * If `n` is greater that the amount of data in the internal buffer, + * then `null` is returned. + */ + read(n) { + if (this[DESTROYED]) + return null; + this[DISCARDED] = false; + if (this[BUFFERLENGTH] === 0 || + n === 0 || + (n && n > this[BUFFERLENGTH])) { + this[MAYBE_EMIT_END](); + return null; + } + if (this[OBJECTMODE]) + n = null; + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + // not object mode, so if we have an encoding, then RType is string + // otherwise, must be Buffer + this[BUFFER] = [ + (this[ENCODING] + ? this[BUFFER].join('') + : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), + ]; + } + const ret = this[READ](n || null, this[BUFFER][0]); + this[MAYBE_EMIT_END](); + return ret; + } + [READ](n, chunk) { + if (this[OBJECTMODE]) + this[BUFFERSHIFT](); + else { + const c = chunk; + if (n === c.length || n === null) + this[BUFFERSHIFT](); + else if (typeof c === 'string') { + this[BUFFER][0] = c.slice(n); + chunk = c.slice(0, n); + this[BUFFERLENGTH] -= n; + } + else { + this[BUFFER][0] = c.subarray(n); + chunk = c.subarray(0, n); + this[BUFFERLENGTH] -= n; + } + } + this.emit('data', chunk); + if (!this[BUFFER].length && !this[EOF]) + this.emit('drain'); + return chunk; + } + end(chunk, encoding, cb) { + if (typeof chunk === 'function') { + cb = chunk; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (chunk !== undefined) + this.write(chunk, encoding); + if (cb) + this.once('end', cb); + this[EOF] = true; + this.writable = false; + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this[FLOWING] || !this[PAUSED]) + this[MAYBE_EMIT_END](); + return this; + } + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) + return; + if (!this[DATALISTENERS] && !this[PIPES].length) { + this[DISCARDED] = true; + } + this[PAUSED] = false; + this[FLOWING] = true; + this.emit('resume'); + if (this[BUFFER].length) + this[FLUSH](); + else if (this[EOF]) + this[MAYBE_EMIT_END](); + else + this.emit('drain'); + } + /** + * Resume the stream if it is currently in a paused state + * + * If called when there are no pipe destinations or `data` event listeners, + * this will place the stream in a "discarded" state, where all data will + * be thrown away. The discarded state is removed if a pipe destination or + * data handler is added, if pause() is called, or if any synchronous or + * asynchronous iteration is started. + */ + resume() { + return this[RESUME](); + } + /** + * Pause the stream + */ + pause() { + this[FLOWING] = false; + this[PAUSED] = true; + this[DISCARDED] = false; + } + /** + * true if the stream has been forcibly destroyed + */ + get destroyed() { + return this[DESTROYED]; + } + /** + * true if the stream is currently in a flowing state, meaning that + * any writes will be immediately emitted. + */ + get flowing() { + return this[FLOWING]; + } + /** + * true if the stream is currently in a paused state + */ + get paused() { + return this[PAUSED]; + } + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1; + else + this[BUFFERLENGTH] += chunk.length; + this[BUFFER].push(chunk); + } + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1; + else + this[BUFFERLENGTH] -= this[BUFFER][0].length; + return this[BUFFER].shift(); + } + [FLUSH](noDrain = false) { + do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && + this[BUFFER].length); + if (!noDrain && !this[BUFFER].length && !this[EOF]) + this.emit('drain'); + } + [FLUSHCHUNK](chunk) { + this.emit('data', chunk); + return this[FLOWING]; + } + /** + * Pipe all data emitted by this stream into the destination provided. + * + * Triggers the flow of data. + */ + pipe(dest, opts) { + if (this[DESTROYED]) + return dest; + this[DISCARDED] = false; + const ended = this[EMITTED_END]; + opts = opts || {}; + if (dest === proc.stdout || dest === proc.stderr) + opts.end = false; + else + opts.end = opts.end !== false; + opts.proxyErrors = !!opts.proxyErrors; + // piping an ended stream ends immediately + if (ended) { + if (opts.end) + dest.end(); + } + else { + // "as" here just ignores the WType, which pipes don't care about, + // since they're only consuming from us, and writing to the dest + this[PIPES].push(!opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts)); + if (this[ASYNC]) + defer(() => this[RESUME]()); + else + this[RESUME](); + } + return dest; + } + /** + * Fully unhook a piped destination stream. + * + * If the destination stream was the only consumer of this stream (ie, + * there are no other piped destinations or `'data'` event listeners) + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest); + if (p) { + if (this[PIPES].length === 1) { + if (this[FLOWING] && this[DATALISTENERS] === 0) { + this[FLOWING] = false; + } + this[PIPES] = []; + } + else + this[PIPES].splice(this[PIPES].indexOf(p), 1); + p.unpipe(); + } + } + /** + * Alias for {@link Minipass#on} + */ + addListener(ev, handler) { + return this.on(ev, handler); + } + /** + * Mostly identical to `EventEmitter.on`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * - Adding a 'data' event handler will trigger the flow of data + * + * - Adding a 'readable' event handler when there is data waiting to be read + * will cause 'readable' to be emitted immediately. + * + * - Adding an 'endish' event handler ('end', 'finish', etc.) which has + * already passed will cause the event to be emitted immediately and all + * handlers removed. + * + * - Adding an 'error' event handler after an error has been emitted will + * cause the event to be re-emitted immediately with the error previously + * raised. + */ + on(ev, handler) { + const ret = super.on(ev, handler); + if (ev === 'data') { + this[DISCARDED] = false; + this[DATALISTENERS]++; + if (!this[PIPES].length && !this[FLOWING]) { + this[RESUME](); + } + } + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { + super.emit('readable'); + } + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev); + this.removeAllListeners(ev); + } + else if (ev === 'error' && this[EMITTED_ERROR]) { + const h = handler; + if (this[ASYNC]) + defer(() => h.call(this, this[EMITTED_ERROR])); + else + h.call(this, this[EMITTED_ERROR]); + } + return ret; + } + /** + * Alias for {@link Minipass#off} + */ + removeListener(ev, handler) { + return this.off(ev, handler); + } + /** + * Mostly identical to `EventEmitter.off` + * + * If a 'data' event handler is removed, and it was the last consumer + * (ie, there are no pipe destinations or other 'data' event listeners), + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + off(ev, handler) { + const ret = super.off(ev, handler); + // if we previously had listeners, and now we don't, and we don't + // have any pipes, then stop the flow, unless it's been explicitly + // put in a discarded flowing state via stream.resume(). + if (ev === 'data') { + this[DATALISTENERS] = this.listeners('data').length; + if (this[DATALISTENERS] === 0 && + !this[DISCARDED] && + !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * Mostly identical to `EventEmitter.removeAllListeners` + * + * If all 'data' event handlers are removed, and they were the last consumer + * (ie, there are no pipe destinations), then the flow of data will stop + * until there is another consumer or {@link Minipass#resume} is explicitly + * called. + */ + removeAllListeners(ev) { + const ret = super.removeAllListeners(ev); + if (ev === 'data' || ev === undefined) { + this[DATALISTENERS] = 0; + if (!this[DISCARDED] && !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * true if the 'end' event has been emitted + */ + get emittedEnd() { + return this[EMITTED_END]; + } + [MAYBE_EMIT_END]() { + if (!this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF]) { + this[EMITTING_END] = true; + this.emit('end'); + this.emit('prefinish'); + this.emit('finish'); + if (this[CLOSED]) + this.emit('close'); + this[EMITTING_END] = false; + } + } + /** + * Mostly identical to `EventEmitter.emit`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * If the stream has been destroyed, and the event is something other + * than 'close' or 'error', then `false` is returned and no handlers + * are called. + * + * If the event is 'end', and has already been emitted, then the event + * is ignored. If the stream is in a paused or non-flowing state, then + * the event will be deferred until data flow resumes. If the stream is + * async, then handlers will be called on the next tick rather than + * immediately. + * + * If the event is 'close', and 'end' has not yet been emitted, then + * the event will be deferred until after 'end' is emitted. + * + * If the event is 'error', and an AbortSignal was provided for the stream, + * and there are no listeners, then the event is ignored, matching the + * behavior of node core streams in the presense of an AbortSignal. + * + * If the event is 'finish' or 'prefinish', then all listeners will be + * removed after emitting the event, to prevent double-firing. + */ + emit(ev, ...args) { + const data = args[0]; + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && + ev !== 'close' && + ev !== DESTROYED && + this[DESTROYED]) { + return false; + } + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? (defer(() => this[EMITDATA](data)), true) + : this[EMITDATA](data); + } + else if (ev === 'end') { + return this[EMITEND](); + } + else if (ev === 'close') { + this[CLOSED] = true; + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) + return false; + const ret = super.emit('close'); + this.removeAllListeners('close'); + return ret; + } + else if (ev === 'error') { + this[EMITTED_ERROR] = data; + super.emit(ERROR, data); + const ret = !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false; + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'resume') { + const ret = super.emit('resume'); + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev); + this.removeAllListeners(ev); + return ret; + } + // Some other unknown event + const ret = super.emit(ev, ...args); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) + this.pause(); + } + const ret = this[DISCARDED] ? false : super.emit('data', data); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITEND]() { + if (this[EMITTED_END]) + return false; + this[EMITTED_END] = true; + this.readable = false; + return this[ASYNC] + ? (defer(() => this[EMITEND2]()), true) + : this[EMITEND2](); + } + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end(); + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data); + } + if (!this[DISCARDED]) + super.emit('data', data); + } + } + for (const p of this[PIPES]) { + p.end(); + } + const ret = super.emit('end'); + this.removeAllListeners('end'); + return ret; + } + /** + * Return a Promise that resolves to an array of all emitted data once + * the stream ends. + */ + async collect() { + const buf = Object.assign([], { + dataLength: 0, + }); + if (!this[OBJECTMODE]) + buf.dataLength = 0; + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise(); + this.on('data', c => { + buf.push(c); + if (!this[OBJECTMODE]) + buf.dataLength += c.length; + }); + await p; + return buf; + } + /** + * Return a Promise that resolves to the concatenation of all emitted data + * once the stream ends. + * + * Not allowed on objectMode streams. + */ + async concat() { + if (this[OBJECTMODE]) { + throw new Error('cannot concat in objectMode'); + } + const buf = await this.collect(); + return (this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength)); + } + /** + * Return a void Promise that resolves once the stream ends. + */ + async promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))); + this.on('error', er => reject(er)); + this.on('end', () => resolve()); + }); + } + /** + * Asynchronous `for await of` iteration. + * + * This will continue emitting all chunks until the stream terminates. + */ + [Symbol.asyncIterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = async () => { + this.pause(); + stopped = true; + return { value: undefined, done: true }; + }; + const next = () => { + if (stopped) + return stop(); + const res = this.read(); + if (res !== null) + return Promise.resolve({ done: false, value: res }); + if (this[EOF]) + return stop(); + let resolve; + let reject; + const onerr = (er) => { + this.off('data', ondata); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + stop(); + reject(er); + }; + const ondata = (value) => { + this.off('error', onerr); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + this.pause(); + resolve({ value, done: !!this[EOF] }); + }; + const onend = () => { + this.off('error', onerr); + this.off('data', ondata); + this.off(DESTROYED, ondestroy); + stop(); + resolve({ done: true, value: undefined }); + }; + const ondestroy = () => onerr(new Error('stream destroyed')); + return new Promise((res, rej) => { + reject = rej; + resolve = res; + this.once(DESTROYED, ondestroy); + this.once('error', onerr); + this.once('end', onend); + this.once('data', ondata); + }); + }; + return { + next, + throw: stop, + return: stop, + [Symbol.asyncIterator]() { + return this; + }, + }; + } + /** + * Synchronous `for of` iteration. + * + * The iteration will terminate when the internal buffer runs out, even + * if the stream has not yet terminated. + */ + [Symbol.iterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = () => { + this.pause(); + this.off(ERROR, stop); + this.off(DESTROYED, stop); + this.off('end', stop); + stopped = true; + return { done: true, value: undefined }; + }; + const next = () => { + if (stopped) + return stop(); + const value = this.read(); + return value === null ? stop() : { done: false, value }; + }; + this.once('end', stop); + this.once(ERROR, stop); + this.once(DESTROYED, stop); + return { + next, + throw: stop, + return: stop, + [Symbol.iterator]() { + return this; + }, + }; + } + /** + * Destroy a stream, preventing it from being used for any further purpose. + * + * If the stream has a `close()` method, then it will be called on + * destruction. + * + * After destruction, any attempt to write data, read data, or emit most + * events will be ignored. + * + * If an error argument is provided, then it will be emitted in an + * 'error' event. + */ + destroy(er) { + if (this[DESTROYED]) { + if (er) + this.emit('error', er); + else + this.emit(DESTROYED); + return this; + } + this[DESTROYED] = true; + this[DISCARDED] = true; + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0; + this[BUFFERLENGTH] = 0; + const wc = this; + if (typeof wc.close === 'function' && !this[CLOSED]) + wc.close(); + if (er) + this.emit('error', er); + // if no error to emit, still reject pending promises + else + this.emit(DESTROYED); + return this; + } + /** + * Alias for {@link isStream} + * + * Former export location, maintained for backwards compatibility. + * + * @deprecated + */ + static get isStream() { + return exports.isStream; + } +} +exports.Minipass = Minipass; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/pacote/node_modules/minipass/dist/cjs/package.json b/node_modules/pacote/node_modules/minipass/dist/cjs/package.json new file mode 100644 index 0000000000000..5bbefffbabee3 --- /dev/null +++ b/node_modules/pacote/node_modules/minipass/dist/cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/pacote/node_modules/minipass/dist/mjs/index.js b/node_modules/pacote/node_modules/minipass/dist/mjs/index.js new file mode 100644 index 0000000000000..b5fa4513c9083 --- /dev/null +++ b/node_modules/pacote/node_modules/minipass/dist/mjs/index.js @@ -0,0 +1,1018 @@ +const proc = typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + }; +import { EventEmitter } from 'node:events'; +import Stream from 'node:stream'; +import { StringDecoder } from 'node:string_decoder'; +/** + * Return true if the argument is a Minipass stream, Node stream, or something + * else that Minipass can interact with. + */ +export const isStream = (s) => !!s && + typeof s === 'object' && + (s instanceof Minipass || + s instanceof Stream || + isReadable(s) || + isWritable(s)); +/** + * Return true if the argument is a valid {@link Minipass.Readable} + */ +export const isReadable = (s) => !!s && + typeof s === 'object' && + s instanceof EventEmitter && + typeof s.pipe === 'function' && + // node core Writable streams have a pipe() method, but it throws + s.pipe !== Stream.Writable.prototype.pipe; +/** + * Return true if the argument is a valid {@link Minipass.Writable} + */ +export const isWritable = (s) => !!s && + typeof s === 'object' && + s instanceof EventEmitter && + typeof s.write === 'function' && + typeof s.end === 'function'; +const EOF = Symbol('EOF'); +const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); +const EMITTED_END = Symbol('emittedEnd'); +const EMITTING_END = Symbol('emittingEnd'); +const EMITTED_ERROR = Symbol('emittedError'); +const CLOSED = Symbol('closed'); +const READ = Symbol('read'); +const FLUSH = Symbol('flush'); +const FLUSHCHUNK = Symbol('flushChunk'); +const ENCODING = Symbol('encoding'); +const DECODER = Symbol('decoder'); +const FLOWING = Symbol('flowing'); +const PAUSED = Symbol('paused'); +const RESUME = Symbol('resume'); +const BUFFER = Symbol('buffer'); +const PIPES = Symbol('pipes'); +const BUFFERLENGTH = Symbol('bufferLength'); +const BUFFERPUSH = Symbol('bufferPush'); +const BUFFERSHIFT = Symbol('bufferShift'); +const OBJECTMODE = Symbol('objectMode'); +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed'); +// internal event when stream has an error +const ERROR = Symbol('error'); +const EMITDATA = Symbol('emitData'); +const EMITEND = Symbol('emitEnd'); +const EMITEND2 = Symbol('emitEnd2'); +const ASYNC = Symbol('async'); +const ABORT = Symbol('abort'); +const ABORTED = Symbol('aborted'); +const SIGNAL = Symbol('signal'); +const DATALISTENERS = Symbol('dataListeners'); +const DISCARDED = Symbol('discarded'); +const defer = (fn) => Promise.resolve().then(fn); +const nodefer = (fn) => fn(); +const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; +const isArrayBufferLike = (b) => b instanceof ArrayBuffer || + (!!b && + typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0); +const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); +/** + * Internal class representing a pipe to a destination stream. + * + * @internal + */ +class Pipe { + src; + dest; + opts; + ondrain; + constructor(src, dest, opts) { + this.src = src; + this.dest = dest; + this.opts = opts; + this.ondrain = () => src[RESUME](); + this.dest.on('drain', this.ondrain); + } + unpipe() { + this.dest.removeListener('drain', this.ondrain); + } + // only here for the prototype + /* c8 ignore start */ + proxyErrors(_er) { } + /* c8 ignore stop */ + end() { + this.unpipe(); + if (this.opts.end) + this.dest.end(); + } +} +/** + * Internal class representing a pipe to a destination stream where + * errors are proxied. + * + * @internal + */ +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors); + super.unpipe(); + } + constructor(src, dest, opts) { + super(src, dest, opts); + this.proxyErrors = er => dest.emit('error', er); + src.on('error', this.proxyErrors); + } +} +const isObjectModeOptions = (o) => !!o.objectMode; +const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; +/** + * Main export, the Minipass class + * + * `RType` is the type of data emitted, defaults to Buffer + * + * `WType` is the type of data to be written, if RType is buffer or string, + * then any {@link Minipass.ContiguousData} is allowed. + * + * `Events` is the set of event handler signatures that this object + * will emit, see {@link Minipass.Events} + */ +export class Minipass extends EventEmitter { + [FLOWING] = false; + [PAUSED] = false; + [PIPES] = []; + [BUFFER] = []; + [OBJECTMODE]; + [ENCODING]; + [ASYNC]; + [DECODER]; + [EOF] = false; + [EMITTED_END] = false; + [EMITTING_END] = false; + [CLOSED] = false; + [EMITTED_ERROR] = null; + [BUFFERLENGTH] = 0; + [DESTROYED] = false; + [SIGNAL]; + [ABORTED] = false; + [DATALISTENERS] = 0; + [DISCARDED] = false; + /** + * true if the stream can be written + */ + writable = true; + /** + * true if the stream can be read + */ + readable = true; + /** + * If `RType` is Buffer, then options do not need to be provided. + * Otherwise, an options object must be provided to specify either + * {@link Minipass.SharedOptions.objectMode} or + * {@link Minipass.SharedOptions.encoding}, as appropriate. + */ + constructor(...args) { + const options = (args[0] || + {}); + super(); + if (options.objectMode && typeof options.encoding === 'string') { + throw new TypeError('Encoding and objectMode may not be used together'); + } + if (isObjectModeOptions(options)) { + this[OBJECTMODE] = true; + this[ENCODING] = null; + } + else if (isEncodingOptions(options)) { + this[ENCODING] = options.encoding; + this[OBJECTMODE] = false; + } + else { + this[OBJECTMODE] = false; + this[ENCODING] = null; + } + this[ASYNC] = !!options.async; + this[DECODER] = this[ENCODING] + ? new StringDecoder(this[ENCODING]) + : null; + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); + } + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); + } + const { signal } = options; + if (signal) { + this[SIGNAL] = signal; + if (signal.aborted) { + this[ABORT](); + } + else { + signal.addEventListener('abort', () => this[ABORT]()); + } + } + } + /** + * The amount of data stored in the buffer waiting to be read. + * + * For Buffer strings, this will be the total byte length. + * For string encoding streams, this will be the string character length, + * according to JavaScript's `string.length` logic. + * For objectMode streams, this is a count of the items waiting to be + * emitted. + */ + get bufferLength() { + return this[BUFFERLENGTH]; + } + /** + * The `BufferEncoding` currently in use, or `null` + */ + get encoding() { + return this[ENCODING]; + } + /** + * @deprecated - This is a read only property + */ + set encoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * @deprecated - Encoding may only be set at instantiation time + */ + setEncoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * True if this is an objectMode stream + */ + get objectMode() { + return this[OBJECTMODE]; + } + /** + * @deprecated - This is a read-only property + */ + set objectMode(_om) { + throw new Error('objectMode must be set at instantiation time'); + } + /** + * true if this is an async stream + */ + get ['async']() { + return this[ASYNC]; + } + /** + * Set to true to make this stream async. + * + * Once set, it cannot be unset, as this would potentially cause incorrect + * behavior. Ie, a sync stream can be made async, but an async stream + * cannot be safely made sync. + */ + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a; + } + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true; + this.emit('abort', this[SIGNAL]?.reason); + this.destroy(this[SIGNAL]?.reason); + } + /** + * True if the stream has been aborted. + */ + get aborted() { + return this[ABORTED]; + } + /** + * No-op setter. Stream aborted status is set via the AbortSignal provided + * in the constructor options. + */ + set aborted(_) { } + write(chunk, encoding, cb) { + if (this[ABORTED]) + return false; + if (this[EOF]) + throw new Error('write after end'); + if (this[DESTROYED]) { + this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); + return true; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (!encoding) + encoding = 'utf8'; + const fn = this[ASYNC] ? defer : nodefer; + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything is only allowed if in object mode, so throw + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); + } + else if (isArrayBufferLike(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk); + } + else if (typeof chunk !== 'string') { + throw new Error('Non-contiguous data written to non-objectMode stream'); + } + } + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + // maybe impossible? + /* c8 ignore start */ + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + /* c8 ignore stop */ + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { + //@ts-ignore - sinful unsafe type change + chunk = Buffer.from(chunk, encoding); + } + if (Buffer.isBuffer(chunk) && this[ENCODING]) { + //@ts-ignore - sinful unsafe type change + chunk = this[DECODER].write(chunk); + } + // Note: flushing CAN potentially switch us into not-flowing mode + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + /** + * Low-level explicit read method. + * + * In objectMode, the argument is ignored, and one item is returned if + * available. + * + * `n` is the number of bytes (or in the case of encoding streams, + * characters) to consume. If `n` is not provided, then the entire buffer + * is returned, or `null` is returned if no data is available. + * + * If `n` is greater that the amount of data in the internal buffer, + * then `null` is returned. + */ + read(n) { + if (this[DESTROYED]) + return null; + this[DISCARDED] = false; + if (this[BUFFERLENGTH] === 0 || + n === 0 || + (n && n > this[BUFFERLENGTH])) { + this[MAYBE_EMIT_END](); + return null; + } + if (this[OBJECTMODE]) + n = null; + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + // not object mode, so if we have an encoding, then RType is string + // otherwise, must be Buffer + this[BUFFER] = [ + (this[ENCODING] + ? this[BUFFER].join('') + : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), + ]; + } + const ret = this[READ](n || null, this[BUFFER][0]); + this[MAYBE_EMIT_END](); + return ret; + } + [READ](n, chunk) { + if (this[OBJECTMODE]) + this[BUFFERSHIFT](); + else { + const c = chunk; + if (n === c.length || n === null) + this[BUFFERSHIFT](); + else if (typeof c === 'string') { + this[BUFFER][0] = c.slice(n); + chunk = c.slice(0, n); + this[BUFFERLENGTH] -= n; + } + else { + this[BUFFER][0] = c.subarray(n); + chunk = c.subarray(0, n); + this[BUFFERLENGTH] -= n; + } + } + this.emit('data', chunk); + if (!this[BUFFER].length && !this[EOF]) + this.emit('drain'); + return chunk; + } + end(chunk, encoding, cb) { + if (typeof chunk === 'function') { + cb = chunk; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (chunk !== undefined) + this.write(chunk, encoding); + if (cb) + this.once('end', cb); + this[EOF] = true; + this.writable = false; + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this[FLOWING] || !this[PAUSED]) + this[MAYBE_EMIT_END](); + return this; + } + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) + return; + if (!this[DATALISTENERS] && !this[PIPES].length) { + this[DISCARDED] = true; + } + this[PAUSED] = false; + this[FLOWING] = true; + this.emit('resume'); + if (this[BUFFER].length) + this[FLUSH](); + else if (this[EOF]) + this[MAYBE_EMIT_END](); + else + this.emit('drain'); + } + /** + * Resume the stream if it is currently in a paused state + * + * If called when there are no pipe destinations or `data` event listeners, + * this will place the stream in a "discarded" state, where all data will + * be thrown away. The discarded state is removed if a pipe destination or + * data handler is added, if pause() is called, or if any synchronous or + * asynchronous iteration is started. + */ + resume() { + return this[RESUME](); + } + /** + * Pause the stream + */ + pause() { + this[FLOWING] = false; + this[PAUSED] = true; + this[DISCARDED] = false; + } + /** + * true if the stream has been forcibly destroyed + */ + get destroyed() { + return this[DESTROYED]; + } + /** + * true if the stream is currently in a flowing state, meaning that + * any writes will be immediately emitted. + */ + get flowing() { + return this[FLOWING]; + } + /** + * true if the stream is currently in a paused state + */ + get paused() { + return this[PAUSED]; + } + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1; + else + this[BUFFERLENGTH] += chunk.length; + this[BUFFER].push(chunk); + } + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1; + else + this[BUFFERLENGTH] -= this[BUFFER][0].length; + return this[BUFFER].shift(); + } + [FLUSH](noDrain = false) { + do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && + this[BUFFER].length); + if (!noDrain && !this[BUFFER].length && !this[EOF]) + this.emit('drain'); + } + [FLUSHCHUNK](chunk) { + this.emit('data', chunk); + return this[FLOWING]; + } + /** + * Pipe all data emitted by this stream into the destination provided. + * + * Triggers the flow of data. + */ + pipe(dest, opts) { + if (this[DESTROYED]) + return dest; + this[DISCARDED] = false; + const ended = this[EMITTED_END]; + opts = opts || {}; + if (dest === proc.stdout || dest === proc.stderr) + opts.end = false; + else + opts.end = opts.end !== false; + opts.proxyErrors = !!opts.proxyErrors; + // piping an ended stream ends immediately + if (ended) { + if (opts.end) + dest.end(); + } + else { + // "as" here just ignores the WType, which pipes don't care about, + // since they're only consuming from us, and writing to the dest + this[PIPES].push(!opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts)); + if (this[ASYNC]) + defer(() => this[RESUME]()); + else + this[RESUME](); + } + return dest; + } + /** + * Fully unhook a piped destination stream. + * + * If the destination stream was the only consumer of this stream (ie, + * there are no other piped destinations or `'data'` event listeners) + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest); + if (p) { + if (this[PIPES].length === 1) { + if (this[FLOWING] && this[DATALISTENERS] === 0) { + this[FLOWING] = false; + } + this[PIPES] = []; + } + else + this[PIPES].splice(this[PIPES].indexOf(p), 1); + p.unpipe(); + } + } + /** + * Alias for {@link Minipass#on} + */ + addListener(ev, handler) { + return this.on(ev, handler); + } + /** + * Mostly identical to `EventEmitter.on`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * - Adding a 'data' event handler will trigger the flow of data + * + * - Adding a 'readable' event handler when there is data waiting to be read + * will cause 'readable' to be emitted immediately. + * + * - Adding an 'endish' event handler ('end', 'finish', etc.) which has + * already passed will cause the event to be emitted immediately and all + * handlers removed. + * + * - Adding an 'error' event handler after an error has been emitted will + * cause the event to be re-emitted immediately with the error previously + * raised. + */ + on(ev, handler) { + const ret = super.on(ev, handler); + if (ev === 'data') { + this[DISCARDED] = false; + this[DATALISTENERS]++; + if (!this[PIPES].length && !this[FLOWING]) { + this[RESUME](); + } + } + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { + super.emit('readable'); + } + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev); + this.removeAllListeners(ev); + } + else if (ev === 'error' && this[EMITTED_ERROR]) { + const h = handler; + if (this[ASYNC]) + defer(() => h.call(this, this[EMITTED_ERROR])); + else + h.call(this, this[EMITTED_ERROR]); + } + return ret; + } + /** + * Alias for {@link Minipass#off} + */ + removeListener(ev, handler) { + return this.off(ev, handler); + } + /** + * Mostly identical to `EventEmitter.off` + * + * If a 'data' event handler is removed, and it was the last consumer + * (ie, there are no pipe destinations or other 'data' event listeners), + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + off(ev, handler) { + const ret = super.off(ev, handler); + // if we previously had listeners, and now we don't, and we don't + // have any pipes, then stop the flow, unless it's been explicitly + // put in a discarded flowing state via stream.resume(). + if (ev === 'data') { + this[DATALISTENERS] = this.listeners('data').length; + if (this[DATALISTENERS] === 0 && + !this[DISCARDED] && + !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * Mostly identical to `EventEmitter.removeAllListeners` + * + * If all 'data' event handlers are removed, and they were the last consumer + * (ie, there are no pipe destinations), then the flow of data will stop + * until there is another consumer or {@link Minipass#resume} is explicitly + * called. + */ + removeAllListeners(ev) { + const ret = super.removeAllListeners(ev); + if (ev === 'data' || ev === undefined) { + this[DATALISTENERS] = 0; + if (!this[DISCARDED] && !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * true if the 'end' event has been emitted + */ + get emittedEnd() { + return this[EMITTED_END]; + } + [MAYBE_EMIT_END]() { + if (!this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF]) { + this[EMITTING_END] = true; + this.emit('end'); + this.emit('prefinish'); + this.emit('finish'); + if (this[CLOSED]) + this.emit('close'); + this[EMITTING_END] = false; + } + } + /** + * Mostly identical to `EventEmitter.emit`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * If the stream has been destroyed, and the event is something other + * than 'close' or 'error', then `false` is returned and no handlers + * are called. + * + * If the event is 'end', and has already been emitted, then the event + * is ignored. If the stream is in a paused or non-flowing state, then + * the event will be deferred until data flow resumes. If the stream is + * async, then handlers will be called on the next tick rather than + * immediately. + * + * If the event is 'close', and 'end' has not yet been emitted, then + * the event will be deferred until after 'end' is emitted. + * + * If the event is 'error', and an AbortSignal was provided for the stream, + * and there are no listeners, then the event is ignored, matching the + * behavior of node core streams in the presense of an AbortSignal. + * + * If the event is 'finish' or 'prefinish', then all listeners will be + * removed after emitting the event, to prevent double-firing. + */ + emit(ev, ...args) { + const data = args[0]; + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && + ev !== 'close' && + ev !== DESTROYED && + this[DESTROYED]) { + return false; + } + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? (defer(() => this[EMITDATA](data)), true) + : this[EMITDATA](data); + } + else if (ev === 'end') { + return this[EMITEND](); + } + else if (ev === 'close') { + this[CLOSED] = true; + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) + return false; + const ret = super.emit('close'); + this.removeAllListeners('close'); + return ret; + } + else if (ev === 'error') { + this[EMITTED_ERROR] = data; + super.emit(ERROR, data); + const ret = !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false; + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'resume') { + const ret = super.emit('resume'); + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev); + this.removeAllListeners(ev); + return ret; + } + // Some other unknown event + const ret = super.emit(ev, ...args); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) + this.pause(); + } + const ret = this[DISCARDED] ? false : super.emit('data', data); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITEND]() { + if (this[EMITTED_END]) + return false; + this[EMITTED_END] = true; + this.readable = false; + return this[ASYNC] + ? (defer(() => this[EMITEND2]()), true) + : this[EMITEND2](); + } + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end(); + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data); + } + if (!this[DISCARDED]) + super.emit('data', data); + } + } + for (const p of this[PIPES]) { + p.end(); + } + const ret = super.emit('end'); + this.removeAllListeners('end'); + return ret; + } + /** + * Return a Promise that resolves to an array of all emitted data once + * the stream ends. + */ + async collect() { + const buf = Object.assign([], { + dataLength: 0, + }); + if (!this[OBJECTMODE]) + buf.dataLength = 0; + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise(); + this.on('data', c => { + buf.push(c); + if (!this[OBJECTMODE]) + buf.dataLength += c.length; + }); + await p; + return buf; + } + /** + * Return a Promise that resolves to the concatenation of all emitted data + * once the stream ends. + * + * Not allowed on objectMode streams. + */ + async concat() { + if (this[OBJECTMODE]) { + throw new Error('cannot concat in objectMode'); + } + const buf = await this.collect(); + return (this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength)); + } + /** + * Return a void Promise that resolves once the stream ends. + */ + async promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))); + this.on('error', er => reject(er)); + this.on('end', () => resolve()); + }); + } + /** + * Asynchronous `for await of` iteration. + * + * This will continue emitting all chunks until the stream terminates. + */ + [Symbol.asyncIterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = async () => { + this.pause(); + stopped = true; + return { value: undefined, done: true }; + }; + const next = () => { + if (stopped) + return stop(); + const res = this.read(); + if (res !== null) + return Promise.resolve({ done: false, value: res }); + if (this[EOF]) + return stop(); + let resolve; + let reject; + const onerr = (er) => { + this.off('data', ondata); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + stop(); + reject(er); + }; + const ondata = (value) => { + this.off('error', onerr); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + this.pause(); + resolve({ value, done: !!this[EOF] }); + }; + const onend = () => { + this.off('error', onerr); + this.off('data', ondata); + this.off(DESTROYED, ondestroy); + stop(); + resolve({ done: true, value: undefined }); + }; + const ondestroy = () => onerr(new Error('stream destroyed')); + return new Promise((res, rej) => { + reject = rej; + resolve = res; + this.once(DESTROYED, ondestroy); + this.once('error', onerr); + this.once('end', onend); + this.once('data', ondata); + }); + }; + return { + next, + throw: stop, + return: stop, + [Symbol.asyncIterator]() { + return this; + }, + }; + } + /** + * Synchronous `for of` iteration. + * + * The iteration will terminate when the internal buffer runs out, even + * if the stream has not yet terminated. + */ + [Symbol.iterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = () => { + this.pause(); + this.off(ERROR, stop); + this.off(DESTROYED, stop); + this.off('end', stop); + stopped = true; + return { done: true, value: undefined }; + }; + const next = () => { + if (stopped) + return stop(); + const value = this.read(); + return value === null ? stop() : { done: false, value }; + }; + this.once('end', stop); + this.once(ERROR, stop); + this.once(DESTROYED, stop); + return { + next, + throw: stop, + return: stop, + [Symbol.iterator]() { + return this; + }, + }; + } + /** + * Destroy a stream, preventing it from being used for any further purpose. + * + * If the stream has a `close()` method, then it will be called on + * destruction. + * + * After destruction, any attempt to write data, read data, or emit most + * events will be ignored. + * + * If an error argument is provided, then it will be emitted in an + * 'error' event. + */ + destroy(er) { + if (this[DESTROYED]) { + if (er) + this.emit('error', er); + else + this.emit(DESTROYED); + return this; + } + this[DESTROYED] = true; + this[DISCARDED] = true; + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0; + this[BUFFERLENGTH] = 0; + const wc = this; + if (typeof wc.close === 'function' && !this[CLOSED]) + wc.close(); + if (er) + this.emit('error', er); + // if no error to emit, still reject pending promises + else + this.emit(DESTROYED); + return this; + } + /** + * Alias for {@link isStream} + * + * Former export location, maintained for backwards compatibility. + * + * @deprecated + */ + static get isStream() { + return isStream; + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/pacote/node_modules/minipass/dist/mjs/package.json b/node_modules/pacote/node_modules/minipass/dist/mjs/package.json new file mode 100644 index 0000000000000..3dbc1ca591c05 --- /dev/null +++ b/node_modules/pacote/node_modules/minipass/dist/mjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/pacote/node_modules/minipass/package.json b/node_modules/pacote/node_modules/minipass/package.json new file mode 100644 index 0000000000000..355501c0a10c1 --- /dev/null +++ b/node_modules/pacote/node_modules/minipass/package.json @@ -0,0 +1,82 @@ +{ + "name": "minipass", + "version": "7.0.2", + "description": "minimal implementation of a PassThrough stream", + "main": "./dist/cjs/index.js", + "module": "./dist/mjs/index.js", + "types": "./dist/cjs/index.js", + "exports": { + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + }, + "./package.json": "./package.json" + }, + "files": [ + "dist" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" + }, + "tap": { + "coverage": false, + "node-arg": [ + "--enable-source-maps", + "--no-warnings", + "--loader", + "ts-node/esm" + ], + "ts": false + }, + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "devDependencies": { + "@types/node": "^20.1.2", + "@types/tap": "^15.0.8", + "c8": "^7.13.0", + "prettier": "^2.6.2", + "tap": "^16.3.0", + "ts-node": "^10.9.1", + "typedoc": "^0.24.8", + "typescript": "^5.1.3", + "end-of-stream": "^1.4.0", + "node-abort-controller": "^3.1.1", + "sync-content": "^1.0.2", + "through2": "^2.0.3" + }, + "repository": "https://github.com/isaacs/minipass", + "keywords": [ + "passthrough", + "stream" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } +} diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json index bc8d984704af5..d9119065bfc3d 100644 --- a/node_modules/pacote/package.json +++ b/node_modules/pacote/package.json @@ -1,6 +1,6 @@ { "name": "pacote", - "version": "15.2.0", + "version": "16.0.0", "description": "JavaScript package downloader", "author": "GitHub Inc.", "bin": { @@ -27,7 +27,7 @@ "devDependencies": { "@npmcli/arborist": "^6.0.0 || ^6.0.0-pre.0", "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/template-oss": "4.18.0", "hosted-git-info": "^6.0.0", "mutate-fs": "^2.1.1", "nock": "^13.2.4", @@ -50,11 +50,11 @@ "@npmcli/run-script": "^6.0.0", "cacache": "^17.0.0", "fs-minipass": "^3.0.0", - "minipass": "^5.0.0", + "minipass": "^7.0.2", "npm-package-arg": "^10.0.0", "npm-packlist": "^7.0.0", "npm-pick-manifest": "^8.0.0", - "npm-registry-fetch": "^14.0.0", + "npm-registry-fetch": "^15.0.0", "proc-log": "^3.0.0", "promise-retry": "^2.0.1", "read-package-json": "^6.0.0", @@ -64,7 +64,7 @@ "tar": "^6.1.11" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" }, "repository": { "type": "git", @@ -72,7 +72,13 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ], + "version": "4.18.0", "windowsCI": false, "publish": "true" } diff --git a/package-lock.json b/package-lock.json index c0c42086d1b01..3975fe8881dd7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -138,7 +138,7 @@ "npm-user-validate": "^2.0.0", "npmlog": "^7.0.1", "p-map": "^4.0.0", - "pacote": "^15.2.0", + "pacote": "^16.0.0", "parse-conflict-json": "^3.0.1", "proc-log": "^3.0.0", "qrcode-terminal": "^0.12.0", @@ -230,7 +230,7 @@ "json-stringify-safe": "^5.0.1", "nock": "^13.3.0", "npm-package-arg": "^10.1.0", - "pacote": "^15.0.8", + "pacote": "^16.0.0", "tap": "^16.3.4" }, "engines": { @@ -2473,6 +2473,54 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch": { + "version": "14.0.5", + "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-14.0.5.tgz", + "integrity": "sha512-kIDMIo4aBm6xg7jOttupWZamsZRkAqMqwqqbVXnUqstY5+tapvv6bkH/qMR76jdgV+YljEUCyWx3hRYMrJiAgA==", + "dependencies": { + "make-fetch-happen": "^11.0.0", + "minipass": "^5.0.0", + "minipass-fetch": "^3.0.0", + "minipass-json-stream": "^1.0.1", + "minizlib": "^2.1.2", + "npm-package-arg": "^10.0.0", + "proc-log": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/pacote": { + "version": "15.2.0", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-15.2.0.tgz", + "integrity": "sha512-rJVZeIwHTUta23sIZgEIM62WYwbmGbThdbnkt81ravBplQv+HjyroqnLRNH2+sLJHcGZmLRmhPwACqhfTcOmnA==", + "dependencies": { + "@npmcli/git": "^4.0.0", + "@npmcli/installed-package-contents": "^2.0.1", + "@npmcli/promise-spawn": "^6.0.1", + "@npmcli/run-script": "^6.0.0", + "cacache": "^17.0.0", + "fs-minipass": "^3.0.0", + "minipass": "^5.0.0", + "npm-package-arg": "^10.0.0", + "npm-packlist": "^7.0.0", + "npm-pick-manifest": "^8.0.0", + "npm-registry-fetch": "^14.0.0", + "proc-log": "^3.0.0", + "promise-retry": "^2.0.1", + "read-package-json": "^6.0.0", + "read-package-json-fast": "^3.0.0", + "sigstore": "^1.3.0", + "ssri": "^10.0.0", + "tar": "^6.1.11" + }, + "bin": { + "pacote": "lib/bin.js" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/@npmcli/mock-globals": { "resolved": "mock-globals", "link": true @@ -10219,9 +10267,9 @@ } }, "node_modules/pacote": { - "version": "15.2.0", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-15.2.0.tgz", - "integrity": "sha512-rJVZeIwHTUta23sIZgEIM62WYwbmGbThdbnkt81ravBplQv+HjyroqnLRNH2+sLJHcGZmLRmhPwACqhfTcOmnA==", + "version": "16.0.0", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-16.0.0.tgz", + "integrity": "sha512-tXeSsl21IUIzw/zW0rzK/po2zlI5Nqbkpu0V6Pv99f3leHde7zdv+VjuP9pnVeTVV7OvaS49u+lgmvXjQ0TMJQ==", "inBundle": true, "dependencies": { "@npmcli/git": "^4.0.0", @@ -10230,11 +10278,11 @@ "@npmcli/run-script": "^6.0.0", "cacache": "^17.0.0", "fs-minipass": "^3.0.0", - "minipass": "^5.0.0", + "minipass": "^7.0.2", "npm-package-arg": "^10.0.0", "npm-packlist": "^7.0.0", "npm-pick-manifest": "^8.0.0", - "npm-registry-fetch": "^14.0.0", + "npm-registry-fetch": "^15.0.0", "proc-log": "^3.0.0", "promise-retry": "^2.0.1", "read-package-json": "^6.0.0", @@ -10247,25 +10295,16 @@ "pacote": "lib/bin.js" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" } }, - "node_modules/pacote/node_modules/npm-registry-fetch": { - "version": "14.0.5", - "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-14.0.5.tgz", - "integrity": "sha512-kIDMIo4aBm6xg7jOttupWZamsZRkAqMqwqqbVXnUqstY5+tapvv6bkH/qMR76jdgV+YljEUCyWx3hRYMrJiAgA==", + "node_modules/pacote/node_modules/minipass": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.2.tgz", + "integrity": "sha512-eL79dXrE1q9dBbDCLg7xfn/vl7MS4F1gvJAgjJrQli/jbQWdUttuVawphqpffoIYfRdq78LHx6GP4bU/EQ2ATA==", "inBundle": true, - "dependencies": { - "make-fetch-happen": "^11.0.0", - "minipass": "^5.0.0", - "minipass-fetch": "^3.0.0", - "minipass-json-stream": "^1.0.1", - "minizlib": "^2.1.2", - "npm-package-arg": "^10.0.0", - "proc-log": "^3.0.0" - }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": ">=16 || 14 >=14.17" } }, "node_modules/parent-module": { @@ -15712,7 +15751,7 @@ "npm-pick-manifest": "^8.0.1", "npm-registry-fetch": "^15.0.0", "npmlog": "^7.0.1", - "pacote": "^15.0.8", + "pacote": "^16.0.0", "parse-conflict-json": "^3.0.0", "proc-log": "^3.0.0", "promise-all-reject-late": "^1.0.0", @@ -15737,7 +15776,7 @@ "tcompare": "^5.0.6" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" } }, "workspaces/config": { @@ -15779,7 +15818,7 @@ "tap": "^16.3.4" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" } }, "workspaces/libnpmdiff": { @@ -15793,7 +15832,7 @@ "diff": "^5.1.0", "minimatch": "^9.0.0", "npm-package-arg": "^10.1.0", - "pacote": "^15.0.8", + "pacote": "^16.0.0", "tar": "^6.1.13" }, "devDependencies": { @@ -15814,7 +15853,7 @@ "ci-info": "^3.7.1", "npm-package-arg": "^10.1.0", "npmlog": "^7.0.1", - "pacote": "^15.0.8", + "pacote": "^16.0.0", "proc-log": "^3.0.0", "read": "^2.0.0", "read-package-json-fast": "^3.0.2", @@ -15864,7 +15903,7 @@ "tap": "^16.3.4" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" } }, "workspaces/libnpmorg": { @@ -15882,7 +15921,7 @@ "tap": "^16.3.4" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" } }, "workspaces/libnpmpack": { @@ -15892,7 +15931,7 @@ "@npmcli/arborist": "^6.3.0", "@npmcli/run-script": "^6.0.0", "npm-package-arg": "^10.1.0", - "pacote": "^15.0.8" + "pacote": "^16.0.0" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", @@ -15927,7 +15966,7 @@ "tap": "^16.3.4" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" } }, "workspaces/libnpmsearch": { @@ -15943,7 +15982,7 @@ "tap": "^16.3.4" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" } }, "workspaces/libnpmteam": { @@ -15960,7 +15999,7 @@ "tap": "^16.3.4" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" } }, "workspaces/libnpmversion": { diff --git a/package.json b/package.json index d0e0cd2e3ea5e..8ab1f82e62799 100644 --- a/package.json +++ b/package.json @@ -103,7 +103,7 @@ "npm-user-validate": "^2.0.0", "npmlog": "^7.0.1", "p-map": "^4.0.0", - "pacote": "^15.2.0", + "pacote": "^16.0.0", "parse-conflict-json": "^3.0.1", "proc-log": "^3.0.0", "qrcode-terminal": "^0.12.0", diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json index 018f77b91461c..f0595898e4a5b 100644 --- a/workspaces/arborist/package.json +++ b/workspaces/arborist/package.json @@ -26,7 +26,7 @@ "npm-pick-manifest": "^8.0.1", "npm-registry-fetch": "^15.0.0", "npmlog": "^7.0.1", - "pacote": "^15.0.8", + "pacote": "^16.0.0", "parse-conflict-json": "^3.0.0", "proc-log": "^3.0.0", "promise-all-reject-late": "^1.0.0", diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json index ce6eb3531b32e..b580aaf5bff3c 100644 --- a/workspaces/libnpmdiff/package.json +++ b/workspaces/libnpmdiff/package.json @@ -53,7 +53,7 @@ "diff": "^5.1.0", "minimatch": "^9.0.0", "npm-package-arg": "^10.1.0", - "pacote": "^15.0.8", + "pacote": "^16.0.0", "tar": "^6.1.13" }, "templateOSS": { diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json index 9b86b81a998ef..9117cc54c82f4 100644 --- a/workspaces/libnpmexec/package.json +++ b/workspaces/libnpmexec/package.json @@ -64,7 +64,7 @@ "ci-info": "^3.7.1", "npm-package-arg": "^10.1.0", "npmlog": "^7.0.1", - "pacote": "^15.0.8", + "pacote": "^16.0.0", "proc-log": "^3.0.0", "read": "^2.0.0", "read-package-json-fast": "^3.0.2", diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json index d8861c337c4d9..12d1d4ea0f888 100644 --- a/workspaces/libnpmpack/package.json +++ b/workspaces/libnpmpack/package.json @@ -39,7 +39,7 @@ "@npmcli/arborist": "^6.3.0", "@npmcli/run-script": "^6.0.0", "npm-package-arg": "^10.1.0", - "pacote": "^15.0.8" + "pacote": "^16.0.0" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" From e0ced9ea359c198fef626e5ba6e82e3116cd79c4 Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Fri, 11 Aug 2023 16:42:39 -0700 Subject: [PATCH 11/68] fix: drop node14 support BREAKING CHANGE: support for node 14 has been removed --- .github/workflows/ci-libnpmdiff.yml | 2 -- workspaces/libnpmdiff/package.json | 10 ++++++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-libnpmdiff.yml b/.github/workflows/ci-libnpmdiff.yml index 3b6ca0d16b9bc..dc6f09e74fc2b 100644 --- a/.github/workflows/ci-libnpmdiff.yml +++ b/.github/workflows/ci-libnpmdiff.yml @@ -64,8 +64,6 @@ jobs: os: windows-latest shell: cmd node-version: - - 14.17.0 - - 14.x - 16.13.0 - 16.x - 18.0.0 diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json index b580aaf5bff3c..ced4ae5443650 100644 --- a/workspaces/libnpmdiff/package.json +++ b/workspaces/libnpmdiff/package.json @@ -13,7 +13,7 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" }, "keywords": [ "npm", @@ -59,7 +59,13 @@ "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { "nyc-arg": [ From bee1a798f2b43926f7165cf98187d49607866d6b Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Fri, 11 Aug 2023 16:43:18 -0700 Subject: [PATCH 12/68] fix: drop node14 support BREAKING CHANGE: support for node 14 has been removed --- .github/workflows/ci-libnpmexec.yml | 2 -- workspaces/libnpmexec/package.json | 10 ++++++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-libnpmexec.yml b/.github/workflows/ci-libnpmexec.yml index 865b587f99248..a09285039e8aa 100644 --- a/.github/workflows/ci-libnpmexec.yml +++ b/.github/workflows/ci-libnpmexec.yml @@ -64,8 +64,6 @@ jobs: os: windows-latest shell: cmd node-version: - - 14.17.0 - - 14.x - 16.13.0 - 16.x - 18.0.0 diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json index 9117cc54c82f4..90b2e6b2e74d0 100644 --- a/workspaces/libnpmexec/package.json +++ b/workspaces/libnpmexec/package.json @@ -7,7 +7,7 @@ ], "main": "lib/index.js", "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" }, "description": "npm exec (npx) programmatic API", "repository": { @@ -74,6 +74,12 @@ "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ] } } From 936d920241de1bbf490a6fb537a1b59acba25aa2 Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Fri, 11 Aug 2023 16:43:56 -0700 Subject: [PATCH 13/68] fix: drop node14 support BREAKING CHANGE: support for node 14 has been removed --- .github/workflows/ci-libnpmpack.yml | 2 -- workspaces/libnpmpack/package.json | 10 ++++++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-libnpmpack.yml b/.github/workflows/ci-libnpmpack.yml index d5e9092de9031..db8b0b496c146 100644 --- a/.github/workflows/ci-libnpmpack.yml +++ b/.github/workflows/ci-libnpmpack.yml @@ -64,8 +64,6 @@ jobs: os: windows-latest shell: cmd node-version: - - 14.17.0 - - 14.x - 16.13.0 - 16.x - 18.0.0 diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json index 12d1d4ea0f888..e9b915466acc4 100644 --- a/workspaces/libnpmpack/package.json +++ b/workspaces/libnpmpack/package.json @@ -42,12 +42,18 @@ "pacote": "^16.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "version": "4.18.0", - "content": "../../scripts/template-oss/index.js" + "content": "../../scripts/template-oss/index.js", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { "nyc-arg": [ From 3d3f819e0e33e35e3b9f6b46f0e684521f998fff Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Fri, 11 Aug 2023 16:45:38 -0700 Subject: [PATCH 14/68] chore: drop node14 support in private mock-registry workspace --- .github/workflows/ci-npmcli-mock-registry.yml | 2 -- mock-registry/package.json | 10 ++++++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-npmcli-mock-registry.yml b/.github/workflows/ci-npmcli-mock-registry.yml index f95e25fca898b..4e8510eb59612 100644 --- a/.github/workflows/ci-npmcli-mock-registry.yml +++ b/.github/workflows/ci-npmcli-mock-registry.yml @@ -64,8 +64,6 @@ jobs: os: windows-latest shell: cmd node-version: - - 14.17.0 - - 14.x - 16.13.0 - 16.x - 18.0.0 diff --git a/mock-registry/package.json b/mock-registry/package.json index 8187bbe52b722..c7624d6d25579 100644 --- a/mock-registry/package.json +++ b/mock-registry/package.json @@ -30,11 +30,17 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.18.0" + "version": "4.18.0", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { "no-coverage": true, From e25a0662da6f270a14d8e15ff60e8f8365ef1565 Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Fri, 11 Aug 2023 16:46:39 -0700 Subject: [PATCH 15/68] chore: set workspaces with changing engines to prerelease --- release-please-config.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/release-please-config.json b/release-please-config.json index a539f69faef81..6062068715df7 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -44,10 +44,10 @@ "prerelease": true }, "workspaces/libnpmdiff": { - "prerelease": false + "prerelease": true }, "workspaces/libnpmexec": { - "prerelease": false + "prerelease": true }, "workspaces/libnpmfund": { "prerelease": false @@ -59,7 +59,7 @@ "prerelease": true }, "workspaces/libnpmpack": { - "prerelease": false + "prerelease": true }, "workspaces/libnpmpublish": { "prerelease": true From c52824548ee0d2d69cb113a2c7cc22c6d282912a Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Mon, 14 Aug 2023 12:02:30 -0700 Subject: [PATCH 16/68] deps: npm-profile@8.0.0 --- node_modules/.gitignore | 3 - .../npm-registry-fetch/LICENSE.md | 20 -- .../npm-registry-fetch/lib/auth.js | 145 ---------- .../npm-registry-fetch/lib/check-response.js | 100 ------- .../npm-registry-fetch/lib/clean-url.js | 27 -- .../npm-registry-fetch/lib/default-opts.js | 19 -- .../npm-registry-fetch/lib/errors.js | 80 ------ .../npm-registry-fetch/lib/index.js | 247 ------------------ .../npm-registry-fetch/package.json | 67 ----- node_modules/npm-profile/package.json | 17 +- package-lock.json | 38 +-- package.json | 2 +- 12 files changed, 23 insertions(+), 742 deletions(-) delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/clean-url.js delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/package.json diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 069ee05e38167..cb5082cb8bf74 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -198,9 +198,6 @@ !/npm-packlist !/npm-pick-manifest !/npm-profile -!/npm-profile/node_modules/ -/npm-profile/node_modules/* -!/npm-profile/node_modules/npm-registry-fetch !/npm-registry-fetch !/npm-registry-fetch/node_modules/ /npm-registry-fetch/node_modules/* diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md b/node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md deleted file mode 100644 index 5fc208ff122e0..0000000000000 --- a/node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md +++ /dev/null @@ -1,20 +0,0 @@ - - -ISC License - -Copyright npm, Inc. - -Permission to use, copy, modify, and/or distribute this -software for any purpose with or without fee is hereby -granted, provided that the above copyright notice and this -permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL -WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO -EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, -WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js deleted file mode 100644 index 870ce0d923cd0..0000000000000 --- a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js +++ /dev/null @@ -1,145 +0,0 @@ -'use strict' -const fs = require('fs') -const npa = require('npm-package-arg') -const { URL } = require('url') - -// Find the longest registry key that is used for some kind of auth -// in the options. -const regKeyFromURI = (uri, opts) => { - const parsed = new URL(uri) - // try to find a config key indicating we have auth for this registry - // can be one of :_authToken, :_auth, :_password and :username, or - // :certfile and :keyfile - // We walk up the "path" until we're left with just //[:], - // stopping when we reach '//'. - let regKey = `//${parsed.host}${parsed.pathname}` - while (regKey.length > '//'.length) { - // got some auth for this URI - if (hasAuth(regKey, opts)) { - return regKey - } - - // can be either //host/some/path/:_auth or //host/some/path:_auth - // walk up by removing EITHER what's after the slash OR the slash itself - regKey = regKey.replace(/([^/]+|\/)$/, '') - } -} - -const hasAuth = (regKey, opts) => ( - opts[`${regKey}:_authToken`] || - opts[`${regKey}:_auth`] || - opts[`${regKey}:username`] && opts[`${regKey}:_password`] || - opts[`${regKey}:certfile`] && opts[`${regKey}:keyfile`] -) - -const sameHost = (a, b) => { - const parsedA = new URL(a) - const parsedB = new URL(b) - return parsedA.host === parsedB.host -} - -const getRegistry = opts => { - const { spec } = opts - const { scope: specScope, subSpec } = spec ? npa(spec) : {} - const subSpecScope = subSpec && subSpec.scope - const scope = subSpec ? subSpecScope : specScope - const scopeReg = scope && opts[`${scope}:registry`] - return scopeReg || opts.registry -} - -const maybeReadFile = file => { - try { - return fs.readFileSync(file, 'utf8') - } catch (er) { - if (er.code !== 'ENOENT') { - throw er - } - return null - } -} - -const getAuth = (uri, opts = {}) => { - const { forceAuth } = opts - if (!uri) { - throw new Error('URI is required') - } - const regKey = regKeyFromURI(uri, forceAuth || opts) - - // we are only allowed to use what's in forceAuth if specified - if (forceAuth && !regKey) { - return new Auth({ - scopeAuthKey: null, - token: forceAuth._authToken || forceAuth.token, - username: forceAuth.username, - password: forceAuth._password || forceAuth.password, - auth: forceAuth._auth || forceAuth.auth, - certfile: forceAuth.certfile, - keyfile: forceAuth.keyfile, - }) - } - - // no auth for this URI, but might have it for the registry - if (!regKey) { - const registry = getRegistry(opts) - if (registry && uri !== registry && sameHost(uri, registry)) { - return getAuth(registry, opts) - } else if (registry !== opts.registry) { - // If making a tarball request to a different base URI than the - // registry where we logged in, but the same auth SHOULD be sent - // to that artifact host, then we track where it was coming in from, - // and warn the user if we get a 4xx error on it. - const scopeAuthKey = regKeyFromURI(registry, opts) - return new Auth({ scopeAuthKey }) - } - } - - const { - [`${regKey}:_authToken`]: token, - [`${regKey}:username`]: username, - [`${regKey}:_password`]: password, - [`${regKey}:_auth`]: auth, - [`${regKey}:certfile`]: certfile, - [`${regKey}:keyfile`]: keyfile, - } = opts - - return new Auth({ - scopeAuthKey: null, - token, - auth, - username, - password, - certfile, - keyfile, - }) -} - -class Auth { - constructor ({ token, auth, username, password, scopeAuthKey, certfile, keyfile }) { - this.scopeAuthKey = scopeAuthKey - this.token = null - this.auth = null - this.isBasicAuth = false - this.cert = null - this.key = null - if (token) { - this.token = token - } else if (auth) { - this.auth = auth - } else if (username && password) { - const p = Buffer.from(password, 'base64').toString('utf8') - this.auth = Buffer.from(`${username}:${p}`, 'utf8').toString('base64') - this.isBasicAuth = true - } - // mTLS may be used in conjunction with another auth method above - if (certfile && keyfile) { - const cert = maybeReadFile(certfile, 'utf-8') - const key = maybeReadFile(keyfile, 'utf-8') - if (cert && key) { - this.cert = cert - this.key = key - } - } - } -} - -module.exports = getAuth diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js deleted file mode 100644 index 066ac3c32420f..0000000000000 --- a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js +++ /dev/null @@ -1,100 +0,0 @@ -'use strict' - -const errors = require('./errors.js') -const { Response } = require('minipass-fetch') -const defaultOpts = require('./default-opts.js') -const log = require('proc-log') -const cleanUrl = require('./clean-url.js') - -/* eslint-disable-next-line max-len */ -const moreInfoUrl = 'https://github.com/npm/cli/wiki/No-auth-for-URI,-but-auth-present-for-scoped-registry' -const checkResponse = - async ({ method, uri, res, startTime, auth, opts }) => { - opts = { ...defaultOpts, ...opts } - if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) { - log.notice('', res.headers.get('npm-notice')) - } - - if (res.status >= 400) { - logRequest(method, res, startTime) - if (auth && auth.scopeAuthKey && !auth.token && !auth.auth) { - // we didn't have auth for THIS request, but we do have auth for - // requests to the registry indicated by the spec's scope value. - // Warn the user. - log.warn('registry', `No auth for URI, but auth present for scoped registry. - -URI: ${uri} -Scoped Registry Key: ${auth.scopeAuthKey} - -More info here: ${moreInfoUrl}`) - } - return checkErrors(method, res, startTime, opts) - } else { - res.body.on('end', () => logRequest(method, res, startTime, opts)) - if (opts.ignoreBody) { - res.body.resume() - return new Response(null, res) - } - return res - } - } -module.exports = checkResponse - -function logRequest (method, res, startTime) { - const elapsedTime = Date.now() - startTime - const attempt = res.headers.get('x-fetch-attempts') - const attemptStr = attempt && attempt > 1 ? ` attempt #${attempt}` : '' - const cacheStatus = res.headers.get('x-local-cache-status') - const cacheStr = cacheStatus ? ` (cache ${cacheStatus})` : '' - const urlStr = cleanUrl(res.url) - - log.http( - 'fetch', - `${method.toUpperCase()} ${res.status} ${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}` - ) -} - -function checkErrors (method, res, startTime, opts) { - return res.buffer() - .catch(() => null) - .then(body => { - let parsed = body - try { - parsed = JSON.parse(body.toString('utf8')) - } catch { - // ignore errors - } - if (res.status === 401 && res.headers.get('www-authenticate')) { - const auth = res.headers.get('www-authenticate') - .split(/,\s*/) - .map(s => s.toLowerCase()) - if (auth.indexOf('ipaddress') !== -1) { - throw new errors.HttpErrorAuthIPAddress( - method, res, parsed, opts.spec - ) - } else if (auth.indexOf('otp') !== -1) { - throw new errors.HttpErrorAuthOTP( - method, res, parsed, opts.spec - ) - } else { - throw new errors.HttpErrorAuthUnknown( - method, res, parsed, opts.spec - ) - } - } else if ( - res.status === 401 && - body != null && - /one-time pass/.test(body.toString('utf8')) - ) { - // Heuristic for malformed OTP responses that don't include the - // www-authenticate header. - throw new errors.HttpErrorAuthOTP( - method, res, parsed, opts.spec - ) - } else { - throw new errors.HttpErrorGeneral( - method, res, parsed, opts.spec - ) - } - }) -} diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/clean-url.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/clean-url.js deleted file mode 100644 index 0c2656b5653a0..0000000000000 --- a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/clean-url.js +++ /dev/null @@ -1,27 +0,0 @@ -const { URL } = require('url') - -const replace = '***' -const tokenRegex = /\bnpm_[a-zA-Z0-9]{36}\b/g -const guidRegex = /\b[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\b/g - -const cleanUrl = (str) => { - if (typeof str !== 'string' || !str) { - return str - } - - try { - const url = new URL(str) - if (url.password) { - url.password = replace - str = url.toString() - } - } catch { - // ignore errors - } - - return str - .replace(tokenRegex, `npm_${replace}`) - .replace(guidRegex, `npm_${replace}`) -} - -module.exports = cleanUrl diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js deleted file mode 100644 index f0847f0b507e2..0000000000000 --- a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js +++ /dev/null @@ -1,19 +0,0 @@ -const pkg = require('../package.json') -module.exports = { - maxSockets: 12, - method: 'GET', - registry: 'https://registry.npmjs.org/', - timeout: 5 * 60 * 1000, // 5 minutes - strictSSL: true, - noProxy: process.env.NOPROXY, - userAgent: `${pkg.name - }@${ - pkg.version - }/node@${ - process.version - }+${ - process.arch - } (${ - process.platform - })`, -} diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js deleted file mode 100644 index cf5ddba6f300c..0000000000000 --- a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js +++ /dev/null @@ -1,80 +0,0 @@ -'use strict' - -const url = require('url') - -function packageName (href) { - try { - let basePath = new url.URL(href).pathname.slice(1) - if (!basePath.match(/^-/)) { - basePath = basePath.split('/') - var index = basePath.indexOf('_rewrite') - if (index === -1) { - index = basePath.length - 1 - } else { - index++ - } - return decodeURIComponent(basePath[index]) - } - } catch (_) { - // this is ok - } -} - -class HttpErrorBase extends Error { - constructor (method, res, body, spec) { - super() - this.name = this.constructor.name - this.headers = res.headers.raw() - this.statusCode = res.status - this.code = `E${res.status}` - this.method = method - this.uri = res.url - this.body = body - this.pkgid = spec ? spec.toString() : packageName(res.url) - } -} -module.exports.HttpErrorBase = HttpErrorBase - -class HttpErrorGeneral extends HttpErrorBase { - constructor (method, res, body, spec) { - super(method, res, body, spec) - this.message = `${res.status} ${res.statusText} - ${ - this.method.toUpperCase() - } ${ - this.spec || this.uri - }${ - (body && body.error) ? ' - ' + body.error : '' - }` - Error.captureStackTrace(this, HttpErrorGeneral) - } -} -module.exports.HttpErrorGeneral = HttpErrorGeneral - -class HttpErrorAuthOTP extends HttpErrorBase { - constructor (method, res, body, spec) { - super(method, res, body, spec) - this.message = 'OTP required for authentication' - this.code = 'EOTP' - Error.captureStackTrace(this, HttpErrorAuthOTP) - } -} -module.exports.HttpErrorAuthOTP = HttpErrorAuthOTP - -class HttpErrorAuthIPAddress extends HttpErrorBase { - constructor (method, res, body, spec) { - super(method, res, body, spec) - this.message = 'Login is not allowed from your IP address' - this.code = 'EAUTHIP' - Error.captureStackTrace(this, HttpErrorAuthIPAddress) - } -} -module.exports.HttpErrorAuthIPAddress = HttpErrorAuthIPAddress - -class HttpErrorAuthUnknown extends HttpErrorBase { - constructor (method, res, body, spec) { - super(method, res, body, spec) - this.message = 'Unable to authenticate, need: ' + res.headers.get('www-authenticate') - Error.captureStackTrace(this, HttpErrorAuthUnknown) - } -} -module.exports.HttpErrorAuthUnknown = HttpErrorAuthUnknown diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js deleted file mode 100644 index 23e349c5c5b96..0000000000000 --- a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js +++ /dev/null @@ -1,247 +0,0 @@ -'use strict' - -const { HttpErrorAuthOTP } = require('./errors.js') -const checkResponse = require('./check-response.js') -const getAuth = require('./auth.js') -const fetch = require('make-fetch-happen') -const JSONStream = require('minipass-json-stream') -const npa = require('npm-package-arg') -const qs = require('querystring') -const url = require('url') -const zlib = require('minizlib') -const { Minipass } = require('minipass') - -const defaultOpts = require('./default-opts.js') - -// WhatWG URL throws if it's not fully resolved -const urlIsValid = u => { - try { - return !!new url.URL(u) - } catch (_) { - return false - } -} - -module.exports = regFetch -function regFetch (uri, /* istanbul ignore next */ opts_ = {}) { - const opts = { - ...defaultOpts, - ...opts_, - } - - // if we did not get a fully qualified URI, then we look at the registry - // config or relevant scope to resolve it. - const uriValid = urlIsValid(uri) - let registry = opts.registry || defaultOpts.registry - if (!uriValid) { - registry = opts.registry = ( - (opts.spec && pickRegistry(opts.spec, opts)) || - opts.registry || - registry - ) - uri = `${ - registry.trim().replace(/\/?$/g, '') - }/${ - uri.trim().replace(/^\//, '') - }` - // asserts that this is now valid - new url.URL(uri) - } - - const method = opts.method || 'GET' - - // through that takes into account the scope, the prefix of `uri`, etc - const startTime = Date.now() - const auth = getAuth(uri, opts) - const headers = getHeaders(uri, auth, opts) - let body = opts.body - const bodyIsStream = Minipass.isStream(body) - const bodyIsPromise = body && - typeof body === 'object' && - typeof body.then === 'function' - - if ( - body && !bodyIsStream && !bodyIsPromise && typeof body !== 'string' && !Buffer.isBuffer(body) - ) { - headers['content-type'] = headers['content-type'] || 'application/json' - body = JSON.stringify(body) - } else if (body && !headers['content-type']) { - headers['content-type'] = 'application/octet-stream' - } - - if (opts.gzip) { - headers['content-encoding'] = 'gzip' - if (bodyIsStream) { - const gz = new zlib.Gzip() - body.on('error', /* istanbul ignore next: unlikely and hard to test */ - err => gz.emit('error', err)) - body = body.pipe(gz) - } else if (!bodyIsPromise) { - body = new zlib.Gzip().end(body).concat() - } - } - - const parsed = new url.URL(uri) - - if (opts.query) { - const q = typeof opts.query === 'string' ? qs.parse(opts.query) - : opts.query - - Object.keys(q).forEach(key => { - if (q[key] !== undefined) { - parsed.searchParams.set(key, q[key]) - } - }) - uri = url.format(parsed) - } - - if (parsed.searchParams.get('write') === 'true' && method === 'GET') { - // do not cache, because this GET is fetching a rev that will be - // used for a subsequent PUT or DELETE, so we need to conditionally - // update cache. - opts.offline = false - opts.preferOffline = false - opts.preferOnline = true - } - - const doFetch = async fetchBody => { - const p = fetch(uri, { - agent: opts.agent, - algorithms: opts.algorithms, - body: fetchBody, - cache: getCacheMode(opts), - cachePath: opts.cache, - ca: opts.ca, - cert: auth.cert || opts.cert, - headers, - integrity: opts.integrity, - key: auth.key || opts.key, - localAddress: opts.localAddress, - maxSockets: opts.maxSockets, - memoize: opts.memoize, - method: method, - noProxy: opts.noProxy, - proxy: opts.httpsProxy || opts.proxy, - retry: opts.retry ? opts.retry : { - retries: opts.fetchRetries, - factor: opts.fetchRetryFactor, - minTimeout: opts.fetchRetryMintimeout, - maxTimeout: opts.fetchRetryMaxtimeout, - }, - strictSSL: opts.strictSSL, - timeout: opts.timeout || 30 * 1000, - }).then(res => checkResponse({ - method, - uri, - res, - registry, - startTime, - auth, - opts, - })) - - if (typeof opts.otpPrompt === 'function') { - return p.catch(async er => { - if (er instanceof HttpErrorAuthOTP) { - let otp - // if otp fails to complete, we fail with that failure - try { - otp = await opts.otpPrompt() - } catch (_) { - // ignore this error - } - // if no otp provided, or otpPrompt errored, throw the original HTTP error - if (!otp) { - throw er - } - return regFetch(uri, { ...opts, otp }) - } - throw er - }) - } else { - return p - } - } - - return Promise.resolve(body).then(doFetch) -} - -module.exports.json = fetchJSON -function fetchJSON (uri, opts) { - return regFetch(uri, opts).then(res => res.json()) -} - -module.exports.json.stream = fetchJSONStream -function fetchJSONStream (uri, jsonPath, - /* istanbul ignore next */ opts_ = {}) { - const opts = { ...defaultOpts, ...opts_ } - const parser = JSONStream.parse(jsonPath, opts.mapJSON) - regFetch(uri, opts).then(res => - res.body.on('error', - /* istanbul ignore next: unlikely and difficult to test */ - er => parser.emit('error', er)).pipe(parser) - ).catch(er => parser.emit('error', er)) - return parser -} - -module.exports.pickRegistry = pickRegistry -function pickRegistry (spec, opts = {}) { - spec = npa(spec) - let registry = spec.scope && - opts[spec.scope.replace(/^@?/, '@') + ':registry'] - - if (!registry && opts.scope) { - registry = opts[opts.scope.replace(/^@?/, '@') + ':registry'] - } - - if (!registry) { - registry = opts.registry || defaultOpts.registry - } - - return registry -} - -function getCacheMode (opts) { - return opts.offline ? 'only-if-cached' - : opts.preferOffline ? 'force-cache' - : opts.preferOnline ? 'no-cache' - : 'default' -} - -function getHeaders (uri, auth, opts) { - const headers = Object.assign({ - 'user-agent': opts.userAgent, - }, opts.headers || {}) - - if (opts.authType) { - headers['npm-auth-type'] = opts.authType - } - - if (opts.scope) { - headers['npm-scope'] = opts.scope - } - - if (opts.npmSession) { - headers['npm-session'] = opts.npmSession - } - - if (opts.npmCommand) { - headers['npm-command'] = opts.npmCommand - } - - // If a tarball is hosted on a different place than the manifest, only send - // credentials on `alwaysAuth` - if (auth.token) { - headers.authorization = `Bearer ${auth.token}` - } else if (auth.auth) { - headers.authorization = `Basic ${auth.auth}` - } - - if (opts.otp) { - headers['npm-otp'] = opts.otp - } - - return headers -} - -module.exports.cleanUrl = require('./clean-url.js') diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/package.json b/node_modules/npm-profile/node_modules/npm-registry-fetch/package.json deleted file mode 100644 index 63a44725886cc..0000000000000 --- a/node_modules/npm-profile/node_modules/npm-registry-fetch/package.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "name": "npm-registry-fetch", - "version": "14.0.5", - "description": "Fetch-based http client for use with npm registry APIs", - "main": "lib", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "eslint": "eslint", - "lint": "eslint \"**/*.js\"", - "lintfix": "npm run lint -- --fix", - "test": "tap", - "posttest": "npm run lint", - "npmclilint": "npmcli-lint", - "postsnap": "npm run lintfix --", - "postlint": "template-oss-check", - "snap": "tap", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/npm-registry-fetch.git" - }, - "keywords": [ - "npm", - "registry", - "fetch" - ], - "author": "GitHub Inc.", - "license": "ISC", - "dependencies": { - "make-fetch-happen": "^11.0.0", - "minipass": "^5.0.0", - "minipass-fetch": "^3.0.0", - "minipass-json-stream": "^1.0.1", - "minizlib": "^2.1.2", - "npm-package-arg": "^10.0.0", - "proc-log": "^3.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", - "cacache": "^17.0.0", - "nock": "^13.2.4", - "require-inject": "^1.4.4", - "ssri": "^10.0.0", - "tap": "^16.0.1" - }, - "tap": { - "check-coverage": true, - "test-ignore": "test[\\\\/](util|cache)[\\\\/]", - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", - "publish": "true" - } -} diff --git a/node_modules/npm-profile/package.json b/node_modules/npm-profile/package.json index 9c0b77b8a6dd5..c3b9bc2a8dbb2 100644 --- a/node_modules/npm-profile/package.json +++ b/node_modules/npm-profile/package.json @@ -1,12 +1,12 @@ { "name": "npm-profile", - "version": "7.0.1", + "version": "8.0.0", "description": "Library for updating an npmjs.com profile", "keywords": [], "author": "GitHub Inc.", "license": "ISC", "dependencies": { - "npm-registry-fetch": "^14.0.0", + "npm-registry-fetch": "^15.0.0", "proc-log": "^3.0.0" }, "main": "./lib/index.js", @@ -20,7 +20,7 @@ ], "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.5.1", + "@npmcli/template-oss": "4.18.0", "nock": "^13.2.4", "tap": "^16.0.1" }, @@ -41,10 +41,17 @@ ] }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" + "version": "4.18.0", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ], + "publish": true } } diff --git a/package-lock.json b/package-lock.json index 3975fe8881dd7..973a521e474df 100644 --- a/package-lock.json +++ b/package-lock.json @@ -133,7 +133,7 @@ "npm-install-checks": "^6.1.1", "npm-package-arg": "^10.1.0", "npm-pick-manifest": "^8.0.1", - "npm-profile": "^7.0.1", + "npm-profile": "^8.0.0", "npm-registry-fetch": "^15.0.0", "npm-user-validate": "^2.0.0", "npmlog": "^7.0.1", @@ -234,7 +234,7 @@ "tap": "^16.3.4" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" } }, "node_modules/@actions/core": { @@ -9654,34 +9654,16 @@ } }, "node_modules/npm-profile": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-7.0.1.tgz", - "integrity": "sha512-VReArOY/fCx5dWL66cbJ2OMogTQAVVQA//8jjmjkarboki3V7UJ0XbGFW+khRwiAJFQjuH0Bqr/yF7Y5RZdkMQ==", - "inBundle": true, - "dependencies": { - "npm-registry-fetch": "^14.0.0", - "proc-log": "^3.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/npm-profile/node_modules/npm-registry-fetch": { - "version": "14.0.5", - "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-14.0.5.tgz", - "integrity": "sha512-kIDMIo4aBm6xg7jOttupWZamsZRkAqMqwqqbVXnUqstY5+tapvv6bkH/qMR76jdgV+YljEUCyWx3hRYMrJiAgA==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-8.0.0.tgz", + "integrity": "sha512-3I/URYO4xI4PBRE9OMsxrTPT357n4ygEb5KqjZC31DU2tbdkOPBHCjRY5Xj7SXlPYvsx83tY3ia86EZ3LKkMzw==", "inBundle": true, "dependencies": { - "make-fetch-happen": "^11.0.0", - "minipass": "^5.0.0", - "minipass-fetch": "^3.0.0", - "minipass-json-stream": "^1.0.1", - "minizlib": "^2.1.2", - "npm-package-arg": "^10.0.0", + "npm-registry-fetch": "^15.0.0", "proc-log": "^3.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" } }, "node_modules/npm-registry-fetch": { @@ -15841,7 +15823,7 @@ "tap": "^16.3.4" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" } }, "workspaces/libnpmexec": { @@ -15871,7 +15853,7 @@ "tap": "^16.3.4" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" } }, "workspaces/libnpmfund": { @@ -15941,7 +15923,7 @@ "tap": "^16.3.4" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" } }, "workspaces/libnpmpublish": { diff --git a/package.json b/package.json index 8ab1f82e62799..76ff7b31596cd 100644 --- a/package.json +++ b/package.json @@ -98,7 +98,7 @@ "npm-install-checks": "^6.1.1", "npm-package-arg": "^10.1.0", "npm-pick-manifest": "^8.0.1", - "npm-profile": "^7.0.1", + "npm-profile": "^8.0.0", "npm-registry-fetch": "^15.0.0", "npm-user-validate": "^2.0.0", "npmlog": "^7.0.1", From 57df8bb61fe4a0f427d9b07ce9c57e98d3b7df2b Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Mon, 14 Aug 2023 12:05:36 -0700 Subject: [PATCH 17/68] deps: @npmcli/metavuln-calculator@6.0.0 --- .../@npmcli/metavuln-calculator/package.json | 16 +++++++++++----- package-lock.json | 10 +++++----- workspaces/arborist/package.json | 2 +- 3 files changed, 17 insertions(+), 11 deletions(-) diff --git a/node_modules/@npmcli/metavuln-calculator/package.json b/node_modules/@npmcli/metavuln-calculator/package.json index 18ebb68c4bfd1..105254e168ee8 100644 --- a/node_modules/@npmcli/metavuln-calculator/package.json +++ b/node_modules/@npmcli/metavuln-calculator/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/metavuln-calculator", - "version": "5.0.1", + "version": "6.0.0", "main": "lib/index.js", "files": [ "bin/", @@ -34,7 +34,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.13.0", + "@npmcli/template-oss": "4.18.0", "require-inject": "^1.4.4", "tap": "^16.0.1" }, @@ -45,11 +45,17 @@ "semver": "^7.3.5" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.13.0", - "publish": "true" + "version": "4.18.0", + "publish": "true", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ] } } diff --git a/package-lock.json b/package-lock.json index 973a521e474df..0ce1bd1f5959f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2460,9 +2460,9 @@ } }, "node_modules/@npmcli/metavuln-calculator": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-5.0.1.tgz", - "integrity": "sha512-qb8Q9wIIlEPj3WeA1Lba91R4ZboPL0uspzV0F9uwP+9AYMVB2zOoa7Pbk12g6D2NHAinSbHh6QYmGuRyHZ874Q==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-6.0.0.tgz", + "integrity": "sha512-h3zA2YSo7H3ZV1W4ZvlDTLaAbBwyOs6HEYhxrhl25Wtl49P7dLb8V2uFUb3dFZ8e4Ic+iF1cRMMWq9ATriYVqg==", "dependencies": { "cacache": "^17.0.0", "json-parse-even-better-errors": "^3.0.0", @@ -2470,7 +2470,7 @@ "semver": "^7.3.5" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" } }, "node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch": { @@ -15714,7 +15714,7 @@ "@npmcli/fs": "^3.1.0", "@npmcli/installed-package-contents": "^2.0.2", "@npmcli/map-workspaces": "^3.0.2", - "@npmcli/metavuln-calculator": "^5.0.0", + "@npmcli/metavuln-calculator": "^6.0.0", "@npmcli/name-from-folder": "^2.0.0", "@npmcli/node-gyp": "^3.0.0", "@npmcli/package-json": "^4.0.0", diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json index f0595898e4a5b..07ce5a75fb8aa 100644 --- a/workspaces/arborist/package.json +++ b/workspaces/arborist/package.json @@ -7,7 +7,7 @@ "@npmcli/fs": "^3.1.0", "@npmcli/installed-package-contents": "^2.0.2", "@npmcli/map-workspaces": "^3.0.2", - "@npmcli/metavuln-calculator": "^5.0.0", + "@npmcli/metavuln-calculator": "^6.0.0", "@npmcli/name-from-folder": "^2.0.0", "@npmcli/node-gyp": "^3.0.0", "@npmcli/package-json": "^4.0.0", From 028fa819c7b9b744d4d12a18d99491052f60f4db Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Mon, 14 Aug 2023 12:06:32 -0700 Subject: [PATCH 18/68] deps: cacache@17.1.4 --- node_modules/.gitignore | 3 + .../cacache/node_modules/minipass/LICENSE | 15 + .../node_modules/minipass/dist/cjs/index.js | 1028 +++++++++++++++++ .../minipass/dist/cjs/package.json | 3 + .../node_modules/minipass/dist/mjs/index.js | 1018 ++++++++++++++++ .../minipass/dist/mjs/package.json | 3 + .../node_modules/minipass/package.json | 82 ++ node_modules/cacache/package.json | 8 +- package-lock.json | 21 +- package.json | 2 +- workspaces/arborist/package.json | 2 +- 11 files changed, 2173 insertions(+), 12 deletions(-) create mode 100644 node_modules/cacache/node_modules/minipass/LICENSE create mode 100644 node_modules/cacache/node_modules/minipass/dist/cjs/index.js create mode 100644 node_modules/cacache/node_modules/minipass/dist/cjs/package.json create mode 100644 node_modules/cacache/node_modules/minipass/dist/mjs/index.js create mode 100644 node_modules/cacache/node_modules/minipass/dist/mjs/package.json create mode 100644 node_modules/cacache/node_modules/minipass/package.json diff --git a/node_modules/.gitignore b/node_modules/.gitignore index cb5082cb8bf74..b2f88a3100367 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -67,6 +67,9 @@ !/buffer !/builtins !/cacache +!/cacache/node_modules/ +/cacache/node_modules/* +!/cacache/node_modules/minipass !/chalk !/chownr !/ci-info diff --git a/node_modules/cacache/node_modules/minipass/LICENSE b/node_modules/cacache/node_modules/minipass/LICENSE new file mode 100644 index 0000000000000..97f8e32ed82e4 --- /dev/null +++ b/node_modules/cacache/node_modules/minipass/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/cacache/node_modules/minipass/dist/cjs/index.js b/node_modules/cacache/node_modules/minipass/dist/cjs/index.js new file mode 100644 index 0000000000000..b6cdae8eb514b --- /dev/null +++ b/node_modules/cacache/node_modules/minipass/dist/cjs/index.js @@ -0,0 +1,1028 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0; +const proc = typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + }; +const events_1 = require("events"); +const stream_1 = __importDefault(require("stream")); +const string_decoder_1 = require("string_decoder"); +/** + * Return true if the argument is a Minipass stream, Node stream, or something + * else that Minipass can interact with. + */ +const isStream = (s) => !!s && + typeof s === 'object' && + (s instanceof Minipass || + s instanceof stream_1.default || + (0, exports.isReadable)(s) || + (0, exports.isWritable)(s)); +exports.isStream = isStream; +/** + * Return true if the argument is a valid {@link Minipass.Readable} + */ +const isReadable = (s) => !!s && + typeof s === 'object' && + s instanceof events_1.EventEmitter && + typeof s.pipe === 'function' && + // node core Writable streams have a pipe() method, but it throws + s.pipe !== stream_1.default.Writable.prototype.pipe; +exports.isReadable = isReadable; +/** + * Return true if the argument is a valid {@link Minipass.Writable} + */ +const isWritable = (s) => !!s && + typeof s === 'object' && + s instanceof events_1.EventEmitter && + typeof s.write === 'function' && + typeof s.end === 'function'; +exports.isWritable = isWritable; +const EOF = Symbol('EOF'); +const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); +const EMITTED_END = Symbol('emittedEnd'); +const EMITTING_END = Symbol('emittingEnd'); +const EMITTED_ERROR = Symbol('emittedError'); +const CLOSED = Symbol('closed'); +const READ = Symbol('read'); +const FLUSH = Symbol('flush'); +const FLUSHCHUNK = Symbol('flushChunk'); +const ENCODING = Symbol('encoding'); +const DECODER = Symbol('decoder'); +const FLOWING = Symbol('flowing'); +const PAUSED = Symbol('paused'); +const RESUME = Symbol('resume'); +const BUFFER = Symbol('buffer'); +const PIPES = Symbol('pipes'); +const BUFFERLENGTH = Symbol('bufferLength'); +const BUFFERPUSH = Symbol('bufferPush'); +const BUFFERSHIFT = Symbol('bufferShift'); +const OBJECTMODE = Symbol('objectMode'); +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed'); +// internal event when stream has an error +const ERROR = Symbol('error'); +const EMITDATA = Symbol('emitData'); +const EMITEND = Symbol('emitEnd'); +const EMITEND2 = Symbol('emitEnd2'); +const ASYNC = Symbol('async'); +const ABORT = Symbol('abort'); +const ABORTED = Symbol('aborted'); +const SIGNAL = Symbol('signal'); +const DATALISTENERS = Symbol('dataListeners'); +const DISCARDED = Symbol('discarded'); +const defer = (fn) => Promise.resolve().then(fn); +const nodefer = (fn) => fn(); +const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; +const isArrayBufferLike = (b) => b instanceof ArrayBuffer || + (!!b && + typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0); +const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); +/** + * Internal class representing a pipe to a destination stream. + * + * @internal + */ +class Pipe { + src; + dest; + opts; + ondrain; + constructor(src, dest, opts) { + this.src = src; + this.dest = dest; + this.opts = opts; + this.ondrain = () => src[RESUME](); + this.dest.on('drain', this.ondrain); + } + unpipe() { + this.dest.removeListener('drain', this.ondrain); + } + // only here for the prototype + /* c8 ignore start */ + proxyErrors(_er) { } + /* c8 ignore stop */ + end() { + this.unpipe(); + if (this.opts.end) + this.dest.end(); + } +} +/** + * Internal class representing a pipe to a destination stream where + * errors are proxied. + * + * @internal + */ +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors); + super.unpipe(); + } + constructor(src, dest, opts) { + super(src, dest, opts); + this.proxyErrors = er => dest.emit('error', er); + src.on('error', this.proxyErrors); + } +} +const isObjectModeOptions = (o) => !!o.objectMode; +const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; +/** + * Main export, the Minipass class + * + * `RType` is the type of data emitted, defaults to Buffer + * + * `WType` is the type of data to be written, if RType is buffer or string, + * then any {@link Minipass.ContiguousData} is allowed. + * + * `Events` is the set of event handler signatures that this object + * will emit, see {@link Minipass.Events} + */ +class Minipass extends events_1.EventEmitter { + [FLOWING] = false; + [PAUSED] = false; + [PIPES] = []; + [BUFFER] = []; + [OBJECTMODE]; + [ENCODING]; + [ASYNC]; + [DECODER]; + [EOF] = false; + [EMITTED_END] = false; + [EMITTING_END] = false; + [CLOSED] = false; + [EMITTED_ERROR] = null; + [BUFFERLENGTH] = 0; + [DESTROYED] = false; + [SIGNAL]; + [ABORTED] = false; + [DATALISTENERS] = 0; + [DISCARDED] = false; + /** + * true if the stream can be written + */ + writable = true; + /** + * true if the stream can be read + */ + readable = true; + /** + * If `RType` is Buffer, then options do not need to be provided. + * Otherwise, an options object must be provided to specify either + * {@link Minipass.SharedOptions.objectMode} or + * {@link Minipass.SharedOptions.encoding}, as appropriate. + */ + constructor(...args) { + const options = (args[0] || + {}); + super(); + if (options.objectMode && typeof options.encoding === 'string') { + throw new TypeError('Encoding and objectMode may not be used together'); + } + if (isObjectModeOptions(options)) { + this[OBJECTMODE] = true; + this[ENCODING] = null; + } + else if (isEncodingOptions(options)) { + this[ENCODING] = options.encoding; + this[OBJECTMODE] = false; + } + else { + this[OBJECTMODE] = false; + this[ENCODING] = null; + } + this[ASYNC] = !!options.async; + this[DECODER] = this[ENCODING] + ? new string_decoder_1.StringDecoder(this[ENCODING]) + : null; + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); + } + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); + } + const { signal } = options; + if (signal) { + this[SIGNAL] = signal; + if (signal.aborted) { + this[ABORT](); + } + else { + signal.addEventListener('abort', () => this[ABORT]()); + } + } + } + /** + * The amount of data stored in the buffer waiting to be read. + * + * For Buffer strings, this will be the total byte length. + * For string encoding streams, this will be the string character length, + * according to JavaScript's `string.length` logic. + * For objectMode streams, this is a count of the items waiting to be + * emitted. + */ + get bufferLength() { + return this[BUFFERLENGTH]; + } + /** + * The `BufferEncoding` currently in use, or `null` + */ + get encoding() { + return this[ENCODING]; + } + /** + * @deprecated - This is a read only property + */ + set encoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * @deprecated - Encoding may only be set at instantiation time + */ + setEncoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * True if this is an objectMode stream + */ + get objectMode() { + return this[OBJECTMODE]; + } + /** + * @deprecated - This is a read-only property + */ + set objectMode(_om) { + throw new Error('objectMode must be set at instantiation time'); + } + /** + * true if this is an async stream + */ + get ['async']() { + return this[ASYNC]; + } + /** + * Set to true to make this stream async. + * + * Once set, it cannot be unset, as this would potentially cause incorrect + * behavior. Ie, a sync stream can be made async, but an async stream + * cannot be safely made sync. + */ + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a; + } + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true; + this.emit('abort', this[SIGNAL]?.reason); + this.destroy(this[SIGNAL]?.reason); + } + /** + * True if the stream has been aborted. + */ + get aborted() { + return this[ABORTED]; + } + /** + * No-op setter. Stream aborted status is set via the AbortSignal provided + * in the constructor options. + */ + set aborted(_) { } + write(chunk, encoding, cb) { + if (this[ABORTED]) + return false; + if (this[EOF]) + throw new Error('write after end'); + if (this[DESTROYED]) { + this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); + return true; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (!encoding) + encoding = 'utf8'; + const fn = this[ASYNC] ? defer : nodefer; + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything is only allowed if in object mode, so throw + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); + } + else if (isArrayBufferLike(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk); + } + else if (typeof chunk !== 'string') { + throw new Error('Non-contiguous data written to non-objectMode stream'); + } + } + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + // maybe impossible? + /* c8 ignore start */ + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + /* c8 ignore stop */ + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { + //@ts-ignore - sinful unsafe type change + chunk = Buffer.from(chunk, encoding); + } + if (Buffer.isBuffer(chunk) && this[ENCODING]) { + //@ts-ignore - sinful unsafe type change + chunk = this[DECODER].write(chunk); + } + // Note: flushing CAN potentially switch us into not-flowing mode + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + /** + * Low-level explicit read method. + * + * In objectMode, the argument is ignored, and one item is returned if + * available. + * + * `n` is the number of bytes (or in the case of encoding streams, + * characters) to consume. If `n` is not provided, then the entire buffer + * is returned, or `null` is returned if no data is available. + * + * If `n` is greater that the amount of data in the internal buffer, + * then `null` is returned. + */ + read(n) { + if (this[DESTROYED]) + return null; + this[DISCARDED] = false; + if (this[BUFFERLENGTH] === 0 || + n === 0 || + (n && n > this[BUFFERLENGTH])) { + this[MAYBE_EMIT_END](); + return null; + } + if (this[OBJECTMODE]) + n = null; + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + // not object mode, so if we have an encoding, then RType is string + // otherwise, must be Buffer + this[BUFFER] = [ + (this[ENCODING] + ? this[BUFFER].join('') + : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), + ]; + } + const ret = this[READ](n || null, this[BUFFER][0]); + this[MAYBE_EMIT_END](); + return ret; + } + [READ](n, chunk) { + if (this[OBJECTMODE]) + this[BUFFERSHIFT](); + else { + const c = chunk; + if (n === c.length || n === null) + this[BUFFERSHIFT](); + else if (typeof c === 'string') { + this[BUFFER][0] = c.slice(n); + chunk = c.slice(0, n); + this[BUFFERLENGTH] -= n; + } + else { + this[BUFFER][0] = c.subarray(n); + chunk = c.subarray(0, n); + this[BUFFERLENGTH] -= n; + } + } + this.emit('data', chunk); + if (!this[BUFFER].length && !this[EOF]) + this.emit('drain'); + return chunk; + } + end(chunk, encoding, cb) { + if (typeof chunk === 'function') { + cb = chunk; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (chunk !== undefined) + this.write(chunk, encoding); + if (cb) + this.once('end', cb); + this[EOF] = true; + this.writable = false; + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this[FLOWING] || !this[PAUSED]) + this[MAYBE_EMIT_END](); + return this; + } + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) + return; + if (!this[DATALISTENERS] && !this[PIPES].length) { + this[DISCARDED] = true; + } + this[PAUSED] = false; + this[FLOWING] = true; + this.emit('resume'); + if (this[BUFFER].length) + this[FLUSH](); + else if (this[EOF]) + this[MAYBE_EMIT_END](); + else + this.emit('drain'); + } + /** + * Resume the stream if it is currently in a paused state + * + * If called when there are no pipe destinations or `data` event listeners, + * this will place the stream in a "discarded" state, where all data will + * be thrown away. The discarded state is removed if a pipe destination or + * data handler is added, if pause() is called, or if any synchronous or + * asynchronous iteration is started. + */ + resume() { + return this[RESUME](); + } + /** + * Pause the stream + */ + pause() { + this[FLOWING] = false; + this[PAUSED] = true; + this[DISCARDED] = false; + } + /** + * true if the stream has been forcibly destroyed + */ + get destroyed() { + return this[DESTROYED]; + } + /** + * true if the stream is currently in a flowing state, meaning that + * any writes will be immediately emitted. + */ + get flowing() { + return this[FLOWING]; + } + /** + * true if the stream is currently in a paused state + */ + get paused() { + return this[PAUSED]; + } + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1; + else + this[BUFFERLENGTH] += chunk.length; + this[BUFFER].push(chunk); + } + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1; + else + this[BUFFERLENGTH] -= this[BUFFER][0].length; + return this[BUFFER].shift(); + } + [FLUSH](noDrain = false) { + do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && + this[BUFFER].length); + if (!noDrain && !this[BUFFER].length && !this[EOF]) + this.emit('drain'); + } + [FLUSHCHUNK](chunk) { + this.emit('data', chunk); + return this[FLOWING]; + } + /** + * Pipe all data emitted by this stream into the destination provided. + * + * Triggers the flow of data. + */ + pipe(dest, opts) { + if (this[DESTROYED]) + return dest; + this[DISCARDED] = false; + const ended = this[EMITTED_END]; + opts = opts || {}; + if (dest === proc.stdout || dest === proc.stderr) + opts.end = false; + else + opts.end = opts.end !== false; + opts.proxyErrors = !!opts.proxyErrors; + // piping an ended stream ends immediately + if (ended) { + if (opts.end) + dest.end(); + } + else { + // "as" here just ignores the WType, which pipes don't care about, + // since they're only consuming from us, and writing to the dest + this[PIPES].push(!opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts)); + if (this[ASYNC]) + defer(() => this[RESUME]()); + else + this[RESUME](); + } + return dest; + } + /** + * Fully unhook a piped destination stream. + * + * If the destination stream was the only consumer of this stream (ie, + * there are no other piped destinations or `'data'` event listeners) + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest); + if (p) { + if (this[PIPES].length === 1) { + if (this[FLOWING] && this[DATALISTENERS] === 0) { + this[FLOWING] = false; + } + this[PIPES] = []; + } + else + this[PIPES].splice(this[PIPES].indexOf(p), 1); + p.unpipe(); + } + } + /** + * Alias for {@link Minipass#on} + */ + addListener(ev, handler) { + return this.on(ev, handler); + } + /** + * Mostly identical to `EventEmitter.on`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * - Adding a 'data' event handler will trigger the flow of data + * + * - Adding a 'readable' event handler when there is data waiting to be read + * will cause 'readable' to be emitted immediately. + * + * - Adding an 'endish' event handler ('end', 'finish', etc.) which has + * already passed will cause the event to be emitted immediately and all + * handlers removed. + * + * - Adding an 'error' event handler after an error has been emitted will + * cause the event to be re-emitted immediately with the error previously + * raised. + */ + on(ev, handler) { + const ret = super.on(ev, handler); + if (ev === 'data') { + this[DISCARDED] = false; + this[DATALISTENERS]++; + if (!this[PIPES].length && !this[FLOWING]) { + this[RESUME](); + } + } + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { + super.emit('readable'); + } + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev); + this.removeAllListeners(ev); + } + else if (ev === 'error' && this[EMITTED_ERROR]) { + const h = handler; + if (this[ASYNC]) + defer(() => h.call(this, this[EMITTED_ERROR])); + else + h.call(this, this[EMITTED_ERROR]); + } + return ret; + } + /** + * Alias for {@link Minipass#off} + */ + removeListener(ev, handler) { + return this.off(ev, handler); + } + /** + * Mostly identical to `EventEmitter.off` + * + * If a 'data' event handler is removed, and it was the last consumer + * (ie, there are no pipe destinations or other 'data' event listeners), + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + off(ev, handler) { + const ret = super.off(ev, handler); + // if we previously had listeners, and now we don't, and we don't + // have any pipes, then stop the flow, unless it's been explicitly + // put in a discarded flowing state via stream.resume(). + if (ev === 'data') { + this[DATALISTENERS] = this.listeners('data').length; + if (this[DATALISTENERS] === 0 && + !this[DISCARDED] && + !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * Mostly identical to `EventEmitter.removeAllListeners` + * + * If all 'data' event handlers are removed, and they were the last consumer + * (ie, there are no pipe destinations), then the flow of data will stop + * until there is another consumer or {@link Minipass#resume} is explicitly + * called. + */ + removeAllListeners(ev) { + const ret = super.removeAllListeners(ev); + if (ev === 'data' || ev === undefined) { + this[DATALISTENERS] = 0; + if (!this[DISCARDED] && !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * true if the 'end' event has been emitted + */ + get emittedEnd() { + return this[EMITTED_END]; + } + [MAYBE_EMIT_END]() { + if (!this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF]) { + this[EMITTING_END] = true; + this.emit('end'); + this.emit('prefinish'); + this.emit('finish'); + if (this[CLOSED]) + this.emit('close'); + this[EMITTING_END] = false; + } + } + /** + * Mostly identical to `EventEmitter.emit`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * If the stream has been destroyed, and the event is something other + * than 'close' or 'error', then `false` is returned and no handlers + * are called. + * + * If the event is 'end', and has already been emitted, then the event + * is ignored. If the stream is in a paused or non-flowing state, then + * the event will be deferred until data flow resumes. If the stream is + * async, then handlers will be called on the next tick rather than + * immediately. + * + * If the event is 'close', and 'end' has not yet been emitted, then + * the event will be deferred until after 'end' is emitted. + * + * If the event is 'error', and an AbortSignal was provided for the stream, + * and there are no listeners, then the event is ignored, matching the + * behavior of node core streams in the presense of an AbortSignal. + * + * If the event is 'finish' or 'prefinish', then all listeners will be + * removed after emitting the event, to prevent double-firing. + */ + emit(ev, ...args) { + const data = args[0]; + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && + ev !== 'close' && + ev !== DESTROYED && + this[DESTROYED]) { + return false; + } + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? (defer(() => this[EMITDATA](data)), true) + : this[EMITDATA](data); + } + else if (ev === 'end') { + return this[EMITEND](); + } + else if (ev === 'close') { + this[CLOSED] = true; + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) + return false; + const ret = super.emit('close'); + this.removeAllListeners('close'); + return ret; + } + else if (ev === 'error') { + this[EMITTED_ERROR] = data; + super.emit(ERROR, data); + const ret = !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false; + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'resume') { + const ret = super.emit('resume'); + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev); + this.removeAllListeners(ev); + return ret; + } + // Some other unknown event + const ret = super.emit(ev, ...args); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) + this.pause(); + } + const ret = this[DISCARDED] ? false : super.emit('data', data); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITEND]() { + if (this[EMITTED_END]) + return false; + this[EMITTED_END] = true; + this.readable = false; + return this[ASYNC] + ? (defer(() => this[EMITEND2]()), true) + : this[EMITEND2](); + } + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end(); + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data); + } + if (!this[DISCARDED]) + super.emit('data', data); + } + } + for (const p of this[PIPES]) { + p.end(); + } + const ret = super.emit('end'); + this.removeAllListeners('end'); + return ret; + } + /** + * Return a Promise that resolves to an array of all emitted data once + * the stream ends. + */ + async collect() { + const buf = Object.assign([], { + dataLength: 0, + }); + if (!this[OBJECTMODE]) + buf.dataLength = 0; + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise(); + this.on('data', c => { + buf.push(c); + if (!this[OBJECTMODE]) + buf.dataLength += c.length; + }); + await p; + return buf; + } + /** + * Return a Promise that resolves to the concatenation of all emitted data + * once the stream ends. + * + * Not allowed on objectMode streams. + */ + async concat() { + if (this[OBJECTMODE]) { + throw new Error('cannot concat in objectMode'); + } + const buf = await this.collect(); + return (this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength)); + } + /** + * Return a void Promise that resolves once the stream ends. + */ + async promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))); + this.on('error', er => reject(er)); + this.on('end', () => resolve()); + }); + } + /** + * Asynchronous `for await of` iteration. + * + * This will continue emitting all chunks until the stream terminates. + */ + [Symbol.asyncIterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = async () => { + this.pause(); + stopped = true; + return { value: undefined, done: true }; + }; + const next = () => { + if (stopped) + return stop(); + const res = this.read(); + if (res !== null) + return Promise.resolve({ done: false, value: res }); + if (this[EOF]) + return stop(); + let resolve; + let reject; + const onerr = (er) => { + this.off('data', ondata); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + stop(); + reject(er); + }; + const ondata = (value) => { + this.off('error', onerr); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + this.pause(); + resolve({ value, done: !!this[EOF] }); + }; + const onend = () => { + this.off('error', onerr); + this.off('data', ondata); + this.off(DESTROYED, ondestroy); + stop(); + resolve({ done: true, value: undefined }); + }; + const ondestroy = () => onerr(new Error('stream destroyed')); + return new Promise((res, rej) => { + reject = rej; + resolve = res; + this.once(DESTROYED, ondestroy); + this.once('error', onerr); + this.once('end', onend); + this.once('data', ondata); + }); + }; + return { + next, + throw: stop, + return: stop, + [Symbol.asyncIterator]() { + return this; + }, + }; + } + /** + * Synchronous `for of` iteration. + * + * The iteration will terminate when the internal buffer runs out, even + * if the stream has not yet terminated. + */ + [Symbol.iterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = () => { + this.pause(); + this.off(ERROR, stop); + this.off(DESTROYED, stop); + this.off('end', stop); + stopped = true; + return { done: true, value: undefined }; + }; + const next = () => { + if (stopped) + return stop(); + const value = this.read(); + return value === null ? stop() : { done: false, value }; + }; + this.once('end', stop); + this.once(ERROR, stop); + this.once(DESTROYED, stop); + return { + next, + throw: stop, + return: stop, + [Symbol.iterator]() { + return this; + }, + }; + } + /** + * Destroy a stream, preventing it from being used for any further purpose. + * + * If the stream has a `close()` method, then it will be called on + * destruction. + * + * After destruction, any attempt to write data, read data, or emit most + * events will be ignored. + * + * If an error argument is provided, then it will be emitted in an + * 'error' event. + */ + destroy(er) { + if (this[DESTROYED]) { + if (er) + this.emit('error', er); + else + this.emit(DESTROYED); + return this; + } + this[DESTROYED] = true; + this[DISCARDED] = true; + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0; + this[BUFFERLENGTH] = 0; + const wc = this; + if (typeof wc.close === 'function' && !this[CLOSED]) + wc.close(); + if (er) + this.emit('error', er); + // if no error to emit, still reject pending promises + else + this.emit(DESTROYED); + return this; + } + /** + * Alias for {@link isStream} + * + * Former export location, maintained for backwards compatibility. + * + * @deprecated + */ + static get isStream() { + return exports.isStream; + } +} +exports.Minipass = Minipass; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/cacache/node_modules/minipass/dist/cjs/package.json b/node_modules/cacache/node_modules/minipass/dist/cjs/package.json new file mode 100644 index 0000000000000..5bbefffbabee3 --- /dev/null +++ b/node_modules/cacache/node_modules/minipass/dist/cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/cacache/node_modules/minipass/dist/mjs/index.js b/node_modules/cacache/node_modules/minipass/dist/mjs/index.js new file mode 100644 index 0000000000000..b65fafbae43a4 --- /dev/null +++ b/node_modules/cacache/node_modules/minipass/dist/mjs/index.js @@ -0,0 +1,1018 @@ +const proc = typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + }; +import { EventEmitter } from 'events'; +import Stream from 'stream'; +import { StringDecoder } from 'string_decoder'; +/** + * Return true if the argument is a Minipass stream, Node stream, or something + * else that Minipass can interact with. + */ +export const isStream = (s) => !!s && + typeof s === 'object' && + (s instanceof Minipass || + s instanceof Stream || + isReadable(s) || + isWritable(s)); +/** + * Return true if the argument is a valid {@link Minipass.Readable} + */ +export const isReadable = (s) => !!s && + typeof s === 'object' && + s instanceof EventEmitter && + typeof s.pipe === 'function' && + // node core Writable streams have a pipe() method, but it throws + s.pipe !== Stream.Writable.prototype.pipe; +/** + * Return true if the argument is a valid {@link Minipass.Writable} + */ +export const isWritable = (s) => !!s && + typeof s === 'object' && + s instanceof EventEmitter && + typeof s.write === 'function' && + typeof s.end === 'function'; +const EOF = Symbol('EOF'); +const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); +const EMITTED_END = Symbol('emittedEnd'); +const EMITTING_END = Symbol('emittingEnd'); +const EMITTED_ERROR = Symbol('emittedError'); +const CLOSED = Symbol('closed'); +const READ = Symbol('read'); +const FLUSH = Symbol('flush'); +const FLUSHCHUNK = Symbol('flushChunk'); +const ENCODING = Symbol('encoding'); +const DECODER = Symbol('decoder'); +const FLOWING = Symbol('flowing'); +const PAUSED = Symbol('paused'); +const RESUME = Symbol('resume'); +const BUFFER = Symbol('buffer'); +const PIPES = Symbol('pipes'); +const BUFFERLENGTH = Symbol('bufferLength'); +const BUFFERPUSH = Symbol('bufferPush'); +const BUFFERSHIFT = Symbol('bufferShift'); +const OBJECTMODE = Symbol('objectMode'); +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed'); +// internal event when stream has an error +const ERROR = Symbol('error'); +const EMITDATA = Symbol('emitData'); +const EMITEND = Symbol('emitEnd'); +const EMITEND2 = Symbol('emitEnd2'); +const ASYNC = Symbol('async'); +const ABORT = Symbol('abort'); +const ABORTED = Symbol('aborted'); +const SIGNAL = Symbol('signal'); +const DATALISTENERS = Symbol('dataListeners'); +const DISCARDED = Symbol('discarded'); +const defer = (fn) => Promise.resolve().then(fn); +const nodefer = (fn) => fn(); +const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; +const isArrayBufferLike = (b) => b instanceof ArrayBuffer || + (!!b && + typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0); +const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); +/** + * Internal class representing a pipe to a destination stream. + * + * @internal + */ +class Pipe { + src; + dest; + opts; + ondrain; + constructor(src, dest, opts) { + this.src = src; + this.dest = dest; + this.opts = opts; + this.ondrain = () => src[RESUME](); + this.dest.on('drain', this.ondrain); + } + unpipe() { + this.dest.removeListener('drain', this.ondrain); + } + // only here for the prototype + /* c8 ignore start */ + proxyErrors(_er) { } + /* c8 ignore stop */ + end() { + this.unpipe(); + if (this.opts.end) + this.dest.end(); + } +} +/** + * Internal class representing a pipe to a destination stream where + * errors are proxied. + * + * @internal + */ +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors); + super.unpipe(); + } + constructor(src, dest, opts) { + super(src, dest, opts); + this.proxyErrors = er => dest.emit('error', er); + src.on('error', this.proxyErrors); + } +} +const isObjectModeOptions = (o) => !!o.objectMode; +const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; +/** + * Main export, the Minipass class + * + * `RType` is the type of data emitted, defaults to Buffer + * + * `WType` is the type of data to be written, if RType is buffer or string, + * then any {@link Minipass.ContiguousData} is allowed. + * + * `Events` is the set of event handler signatures that this object + * will emit, see {@link Minipass.Events} + */ +export class Minipass extends EventEmitter { + [FLOWING] = false; + [PAUSED] = false; + [PIPES] = []; + [BUFFER] = []; + [OBJECTMODE]; + [ENCODING]; + [ASYNC]; + [DECODER]; + [EOF] = false; + [EMITTED_END] = false; + [EMITTING_END] = false; + [CLOSED] = false; + [EMITTED_ERROR] = null; + [BUFFERLENGTH] = 0; + [DESTROYED] = false; + [SIGNAL]; + [ABORTED] = false; + [DATALISTENERS] = 0; + [DISCARDED] = false; + /** + * true if the stream can be written + */ + writable = true; + /** + * true if the stream can be read + */ + readable = true; + /** + * If `RType` is Buffer, then options do not need to be provided. + * Otherwise, an options object must be provided to specify either + * {@link Minipass.SharedOptions.objectMode} or + * {@link Minipass.SharedOptions.encoding}, as appropriate. + */ + constructor(...args) { + const options = (args[0] || + {}); + super(); + if (options.objectMode && typeof options.encoding === 'string') { + throw new TypeError('Encoding and objectMode may not be used together'); + } + if (isObjectModeOptions(options)) { + this[OBJECTMODE] = true; + this[ENCODING] = null; + } + else if (isEncodingOptions(options)) { + this[ENCODING] = options.encoding; + this[OBJECTMODE] = false; + } + else { + this[OBJECTMODE] = false; + this[ENCODING] = null; + } + this[ASYNC] = !!options.async; + this[DECODER] = this[ENCODING] + ? new StringDecoder(this[ENCODING]) + : null; + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); + } + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); + } + const { signal } = options; + if (signal) { + this[SIGNAL] = signal; + if (signal.aborted) { + this[ABORT](); + } + else { + signal.addEventListener('abort', () => this[ABORT]()); + } + } + } + /** + * The amount of data stored in the buffer waiting to be read. + * + * For Buffer strings, this will be the total byte length. + * For string encoding streams, this will be the string character length, + * according to JavaScript's `string.length` logic. + * For objectMode streams, this is a count of the items waiting to be + * emitted. + */ + get bufferLength() { + return this[BUFFERLENGTH]; + } + /** + * The `BufferEncoding` currently in use, or `null` + */ + get encoding() { + return this[ENCODING]; + } + /** + * @deprecated - This is a read only property + */ + set encoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * @deprecated - Encoding may only be set at instantiation time + */ + setEncoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * True if this is an objectMode stream + */ + get objectMode() { + return this[OBJECTMODE]; + } + /** + * @deprecated - This is a read-only property + */ + set objectMode(_om) { + throw new Error('objectMode must be set at instantiation time'); + } + /** + * true if this is an async stream + */ + get ['async']() { + return this[ASYNC]; + } + /** + * Set to true to make this stream async. + * + * Once set, it cannot be unset, as this would potentially cause incorrect + * behavior. Ie, a sync stream can be made async, but an async stream + * cannot be safely made sync. + */ + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a; + } + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true; + this.emit('abort', this[SIGNAL]?.reason); + this.destroy(this[SIGNAL]?.reason); + } + /** + * True if the stream has been aborted. + */ + get aborted() { + return this[ABORTED]; + } + /** + * No-op setter. Stream aborted status is set via the AbortSignal provided + * in the constructor options. + */ + set aborted(_) { } + write(chunk, encoding, cb) { + if (this[ABORTED]) + return false; + if (this[EOF]) + throw new Error('write after end'); + if (this[DESTROYED]) { + this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); + return true; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (!encoding) + encoding = 'utf8'; + const fn = this[ASYNC] ? defer : nodefer; + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything is only allowed if in object mode, so throw + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); + } + else if (isArrayBufferLike(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk); + } + else if (typeof chunk !== 'string') { + throw new Error('Non-contiguous data written to non-objectMode stream'); + } + } + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + // maybe impossible? + /* c8 ignore start */ + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + /* c8 ignore stop */ + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { + //@ts-ignore - sinful unsafe type change + chunk = Buffer.from(chunk, encoding); + } + if (Buffer.isBuffer(chunk) && this[ENCODING]) { + //@ts-ignore - sinful unsafe type change + chunk = this[DECODER].write(chunk); + } + // Note: flushing CAN potentially switch us into not-flowing mode + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + /** + * Low-level explicit read method. + * + * In objectMode, the argument is ignored, and one item is returned if + * available. + * + * `n` is the number of bytes (or in the case of encoding streams, + * characters) to consume. If `n` is not provided, then the entire buffer + * is returned, or `null` is returned if no data is available. + * + * If `n` is greater that the amount of data in the internal buffer, + * then `null` is returned. + */ + read(n) { + if (this[DESTROYED]) + return null; + this[DISCARDED] = false; + if (this[BUFFERLENGTH] === 0 || + n === 0 || + (n && n > this[BUFFERLENGTH])) { + this[MAYBE_EMIT_END](); + return null; + } + if (this[OBJECTMODE]) + n = null; + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + // not object mode, so if we have an encoding, then RType is string + // otherwise, must be Buffer + this[BUFFER] = [ + (this[ENCODING] + ? this[BUFFER].join('') + : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), + ]; + } + const ret = this[READ](n || null, this[BUFFER][0]); + this[MAYBE_EMIT_END](); + return ret; + } + [READ](n, chunk) { + if (this[OBJECTMODE]) + this[BUFFERSHIFT](); + else { + const c = chunk; + if (n === c.length || n === null) + this[BUFFERSHIFT](); + else if (typeof c === 'string') { + this[BUFFER][0] = c.slice(n); + chunk = c.slice(0, n); + this[BUFFERLENGTH] -= n; + } + else { + this[BUFFER][0] = c.subarray(n); + chunk = c.subarray(0, n); + this[BUFFERLENGTH] -= n; + } + } + this.emit('data', chunk); + if (!this[BUFFER].length && !this[EOF]) + this.emit('drain'); + return chunk; + } + end(chunk, encoding, cb) { + if (typeof chunk === 'function') { + cb = chunk; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (chunk !== undefined) + this.write(chunk, encoding); + if (cb) + this.once('end', cb); + this[EOF] = true; + this.writable = false; + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this[FLOWING] || !this[PAUSED]) + this[MAYBE_EMIT_END](); + return this; + } + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) + return; + if (!this[DATALISTENERS] && !this[PIPES].length) { + this[DISCARDED] = true; + } + this[PAUSED] = false; + this[FLOWING] = true; + this.emit('resume'); + if (this[BUFFER].length) + this[FLUSH](); + else if (this[EOF]) + this[MAYBE_EMIT_END](); + else + this.emit('drain'); + } + /** + * Resume the stream if it is currently in a paused state + * + * If called when there are no pipe destinations or `data` event listeners, + * this will place the stream in a "discarded" state, where all data will + * be thrown away. The discarded state is removed if a pipe destination or + * data handler is added, if pause() is called, or if any synchronous or + * asynchronous iteration is started. + */ + resume() { + return this[RESUME](); + } + /** + * Pause the stream + */ + pause() { + this[FLOWING] = false; + this[PAUSED] = true; + this[DISCARDED] = false; + } + /** + * true if the stream has been forcibly destroyed + */ + get destroyed() { + return this[DESTROYED]; + } + /** + * true if the stream is currently in a flowing state, meaning that + * any writes will be immediately emitted. + */ + get flowing() { + return this[FLOWING]; + } + /** + * true if the stream is currently in a paused state + */ + get paused() { + return this[PAUSED]; + } + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1; + else + this[BUFFERLENGTH] += chunk.length; + this[BUFFER].push(chunk); + } + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1; + else + this[BUFFERLENGTH] -= this[BUFFER][0].length; + return this[BUFFER].shift(); + } + [FLUSH](noDrain = false) { + do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && + this[BUFFER].length); + if (!noDrain && !this[BUFFER].length && !this[EOF]) + this.emit('drain'); + } + [FLUSHCHUNK](chunk) { + this.emit('data', chunk); + return this[FLOWING]; + } + /** + * Pipe all data emitted by this stream into the destination provided. + * + * Triggers the flow of data. + */ + pipe(dest, opts) { + if (this[DESTROYED]) + return dest; + this[DISCARDED] = false; + const ended = this[EMITTED_END]; + opts = opts || {}; + if (dest === proc.stdout || dest === proc.stderr) + opts.end = false; + else + opts.end = opts.end !== false; + opts.proxyErrors = !!opts.proxyErrors; + // piping an ended stream ends immediately + if (ended) { + if (opts.end) + dest.end(); + } + else { + // "as" here just ignores the WType, which pipes don't care about, + // since they're only consuming from us, and writing to the dest + this[PIPES].push(!opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts)); + if (this[ASYNC]) + defer(() => this[RESUME]()); + else + this[RESUME](); + } + return dest; + } + /** + * Fully unhook a piped destination stream. + * + * If the destination stream was the only consumer of this stream (ie, + * there are no other piped destinations or `'data'` event listeners) + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest); + if (p) { + if (this[PIPES].length === 1) { + if (this[FLOWING] && this[DATALISTENERS] === 0) { + this[FLOWING] = false; + } + this[PIPES] = []; + } + else + this[PIPES].splice(this[PIPES].indexOf(p), 1); + p.unpipe(); + } + } + /** + * Alias for {@link Minipass#on} + */ + addListener(ev, handler) { + return this.on(ev, handler); + } + /** + * Mostly identical to `EventEmitter.on`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * - Adding a 'data' event handler will trigger the flow of data + * + * - Adding a 'readable' event handler when there is data waiting to be read + * will cause 'readable' to be emitted immediately. + * + * - Adding an 'endish' event handler ('end', 'finish', etc.) which has + * already passed will cause the event to be emitted immediately and all + * handlers removed. + * + * - Adding an 'error' event handler after an error has been emitted will + * cause the event to be re-emitted immediately with the error previously + * raised. + */ + on(ev, handler) { + const ret = super.on(ev, handler); + if (ev === 'data') { + this[DISCARDED] = false; + this[DATALISTENERS]++; + if (!this[PIPES].length && !this[FLOWING]) { + this[RESUME](); + } + } + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { + super.emit('readable'); + } + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev); + this.removeAllListeners(ev); + } + else if (ev === 'error' && this[EMITTED_ERROR]) { + const h = handler; + if (this[ASYNC]) + defer(() => h.call(this, this[EMITTED_ERROR])); + else + h.call(this, this[EMITTED_ERROR]); + } + return ret; + } + /** + * Alias for {@link Minipass#off} + */ + removeListener(ev, handler) { + return this.off(ev, handler); + } + /** + * Mostly identical to `EventEmitter.off` + * + * If a 'data' event handler is removed, and it was the last consumer + * (ie, there are no pipe destinations or other 'data' event listeners), + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + off(ev, handler) { + const ret = super.off(ev, handler); + // if we previously had listeners, and now we don't, and we don't + // have any pipes, then stop the flow, unless it's been explicitly + // put in a discarded flowing state via stream.resume(). + if (ev === 'data') { + this[DATALISTENERS] = this.listeners('data').length; + if (this[DATALISTENERS] === 0 && + !this[DISCARDED] && + !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * Mostly identical to `EventEmitter.removeAllListeners` + * + * If all 'data' event handlers are removed, and they were the last consumer + * (ie, there are no pipe destinations), then the flow of data will stop + * until there is another consumer or {@link Minipass#resume} is explicitly + * called. + */ + removeAllListeners(ev) { + const ret = super.removeAllListeners(ev); + if (ev === 'data' || ev === undefined) { + this[DATALISTENERS] = 0; + if (!this[DISCARDED] && !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * true if the 'end' event has been emitted + */ + get emittedEnd() { + return this[EMITTED_END]; + } + [MAYBE_EMIT_END]() { + if (!this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF]) { + this[EMITTING_END] = true; + this.emit('end'); + this.emit('prefinish'); + this.emit('finish'); + if (this[CLOSED]) + this.emit('close'); + this[EMITTING_END] = false; + } + } + /** + * Mostly identical to `EventEmitter.emit`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * If the stream has been destroyed, and the event is something other + * than 'close' or 'error', then `false` is returned and no handlers + * are called. + * + * If the event is 'end', and has already been emitted, then the event + * is ignored. If the stream is in a paused or non-flowing state, then + * the event will be deferred until data flow resumes. If the stream is + * async, then handlers will be called on the next tick rather than + * immediately. + * + * If the event is 'close', and 'end' has not yet been emitted, then + * the event will be deferred until after 'end' is emitted. + * + * If the event is 'error', and an AbortSignal was provided for the stream, + * and there are no listeners, then the event is ignored, matching the + * behavior of node core streams in the presense of an AbortSignal. + * + * If the event is 'finish' or 'prefinish', then all listeners will be + * removed after emitting the event, to prevent double-firing. + */ + emit(ev, ...args) { + const data = args[0]; + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && + ev !== 'close' && + ev !== DESTROYED && + this[DESTROYED]) { + return false; + } + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? (defer(() => this[EMITDATA](data)), true) + : this[EMITDATA](data); + } + else if (ev === 'end') { + return this[EMITEND](); + } + else if (ev === 'close') { + this[CLOSED] = true; + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) + return false; + const ret = super.emit('close'); + this.removeAllListeners('close'); + return ret; + } + else if (ev === 'error') { + this[EMITTED_ERROR] = data; + super.emit(ERROR, data); + const ret = !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false; + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'resume') { + const ret = super.emit('resume'); + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev); + this.removeAllListeners(ev); + return ret; + } + // Some other unknown event + const ret = super.emit(ev, ...args); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) + this.pause(); + } + const ret = this[DISCARDED] ? false : super.emit('data', data); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITEND]() { + if (this[EMITTED_END]) + return false; + this[EMITTED_END] = true; + this.readable = false; + return this[ASYNC] + ? (defer(() => this[EMITEND2]()), true) + : this[EMITEND2](); + } + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end(); + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data); + } + if (!this[DISCARDED]) + super.emit('data', data); + } + } + for (const p of this[PIPES]) { + p.end(); + } + const ret = super.emit('end'); + this.removeAllListeners('end'); + return ret; + } + /** + * Return a Promise that resolves to an array of all emitted data once + * the stream ends. + */ + async collect() { + const buf = Object.assign([], { + dataLength: 0, + }); + if (!this[OBJECTMODE]) + buf.dataLength = 0; + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise(); + this.on('data', c => { + buf.push(c); + if (!this[OBJECTMODE]) + buf.dataLength += c.length; + }); + await p; + return buf; + } + /** + * Return a Promise that resolves to the concatenation of all emitted data + * once the stream ends. + * + * Not allowed on objectMode streams. + */ + async concat() { + if (this[OBJECTMODE]) { + throw new Error('cannot concat in objectMode'); + } + const buf = await this.collect(); + return (this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength)); + } + /** + * Return a void Promise that resolves once the stream ends. + */ + async promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))); + this.on('error', er => reject(er)); + this.on('end', () => resolve()); + }); + } + /** + * Asynchronous `for await of` iteration. + * + * This will continue emitting all chunks until the stream terminates. + */ + [Symbol.asyncIterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = async () => { + this.pause(); + stopped = true; + return { value: undefined, done: true }; + }; + const next = () => { + if (stopped) + return stop(); + const res = this.read(); + if (res !== null) + return Promise.resolve({ done: false, value: res }); + if (this[EOF]) + return stop(); + let resolve; + let reject; + const onerr = (er) => { + this.off('data', ondata); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + stop(); + reject(er); + }; + const ondata = (value) => { + this.off('error', onerr); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + this.pause(); + resolve({ value, done: !!this[EOF] }); + }; + const onend = () => { + this.off('error', onerr); + this.off('data', ondata); + this.off(DESTROYED, ondestroy); + stop(); + resolve({ done: true, value: undefined }); + }; + const ondestroy = () => onerr(new Error('stream destroyed')); + return new Promise((res, rej) => { + reject = rej; + resolve = res; + this.once(DESTROYED, ondestroy); + this.once('error', onerr); + this.once('end', onend); + this.once('data', ondata); + }); + }; + return { + next, + throw: stop, + return: stop, + [Symbol.asyncIterator]() { + return this; + }, + }; + } + /** + * Synchronous `for of` iteration. + * + * The iteration will terminate when the internal buffer runs out, even + * if the stream has not yet terminated. + */ + [Symbol.iterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = () => { + this.pause(); + this.off(ERROR, stop); + this.off(DESTROYED, stop); + this.off('end', stop); + stopped = true; + return { done: true, value: undefined }; + }; + const next = () => { + if (stopped) + return stop(); + const value = this.read(); + return value === null ? stop() : { done: false, value }; + }; + this.once('end', stop); + this.once(ERROR, stop); + this.once(DESTROYED, stop); + return { + next, + throw: stop, + return: stop, + [Symbol.iterator]() { + return this; + }, + }; + } + /** + * Destroy a stream, preventing it from being used for any further purpose. + * + * If the stream has a `close()` method, then it will be called on + * destruction. + * + * After destruction, any attempt to write data, read data, or emit most + * events will be ignored. + * + * If an error argument is provided, then it will be emitted in an + * 'error' event. + */ + destroy(er) { + if (this[DESTROYED]) { + if (er) + this.emit('error', er); + else + this.emit(DESTROYED); + return this; + } + this[DESTROYED] = true; + this[DISCARDED] = true; + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0; + this[BUFFERLENGTH] = 0; + const wc = this; + if (typeof wc.close === 'function' && !this[CLOSED]) + wc.close(); + if (er) + this.emit('error', er); + // if no error to emit, still reject pending promises + else + this.emit(DESTROYED); + return this; + } + /** + * Alias for {@link isStream} + * + * Former export location, maintained for backwards compatibility. + * + * @deprecated + */ + static get isStream() { + return isStream; + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/cacache/node_modules/minipass/dist/mjs/package.json b/node_modules/cacache/node_modules/minipass/dist/mjs/package.json new file mode 100644 index 0000000000000..3dbc1ca591c05 --- /dev/null +++ b/node_modules/cacache/node_modules/minipass/dist/mjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/cacache/node_modules/minipass/package.json b/node_modules/cacache/node_modules/minipass/package.json new file mode 100644 index 0000000000000..6faaa247a5bc6 --- /dev/null +++ b/node_modules/cacache/node_modules/minipass/package.json @@ -0,0 +1,82 @@ +{ + "name": "minipass", + "version": "7.0.3", + "description": "minimal implementation of a PassThrough stream", + "main": "./dist/cjs/index.js", + "module": "./dist/mjs/index.js", + "types": "./dist/cjs/index.js", + "exports": { + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + }, + "./package.json": "./package.json" + }, + "files": [ + "dist" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" + }, + "tap": { + "coverage": false, + "node-arg": [ + "--enable-source-maps", + "--no-warnings", + "--loader", + "ts-node/esm" + ], + "ts": false + }, + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "devDependencies": { + "@types/node": "^20.1.2", + "@types/tap": "^15.0.8", + "c8": "^7.13.0", + "prettier": "^2.6.2", + "tap": "^16.3.0", + "ts-node": "^10.9.1", + "typedoc": "^0.24.8", + "typescript": "^5.1.3", + "end-of-stream": "^1.4.0", + "node-abort-controller": "^3.1.1", + "sync-content": "^1.0.2", + "through2": "^2.0.3" + }, + "repository": "https://github.com/isaacs/minipass", + "keywords": [ + "passthrough", + "stream" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } +} diff --git a/node_modules/cacache/package.json b/node_modules/cacache/package.json index a6f6f9bdfc465..ab58cb8b7c50f 100644 --- a/node_modules/cacache/package.json +++ b/node_modules/cacache/package.json @@ -1,6 +1,6 @@ { "name": "cacache", - "version": "17.1.3", + "version": "17.1.4", "cache-version": { "content": "2", "index": "5" @@ -49,7 +49,7 @@ "fs-minipass": "^3.0.0", "glob": "^10.2.2", "lru-cache": "^7.7.1", - "minipass": "^5.0.0", + "minipass": "^7.0.3", "minipass-collect": "^1.0.2", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", @@ -60,7 +60,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.15.1", + "@npmcli/template-oss": "4.18.0", "tap": "^16.0.0" }, "engines": { @@ -69,7 +69,7 @@ "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "windowsCI": false, - "version": "4.15.1", + "version": "4.18.0", "publish": "true" }, "author": "GitHub Inc.", diff --git a/package-lock.json b/package-lock.json index 0ce1bd1f5959f..b142b69964523 100644 --- a/package-lock.json +++ b/package-lock.json @@ -96,7 +96,7 @@ "@npmcli/run-script": "^6.0.2", "abbrev": "^2.0.0", "archy": "~1.0.0", - "cacache": "^17.1.3", + "cacache": "^17.1.4", "chalk": "^5.3.0", "ci-info": "^3.8.0", "cli-columns": "^4.0.0", @@ -3544,16 +3544,16 @@ } }, "node_modules/cacache": { - "version": "17.1.3", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.3.tgz", - "integrity": "sha512-jAdjGxmPxZh0IipMdR7fK/4sDSrHMLUV0+GvVUsjwyGNKHsh79kW/otg+GkbXwl6Uzvy9wsvHOX4nUoWldeZMg==", + "version": "17.1.4", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz", + "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==", "inBundle": true, "dependencies": { "@npmcli/fs": "^3.1.0", "fs-minipass": "^3.0.0", "glob": "^10.2.2", "lru-cache": "^7.7.1", - "minipass": "^5.0.0", + "minipass": "^7.0.3", "minipass-collect": "^1.0.2", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", @@ -3566,6 +3566,15 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/cacache/node_modules/minipass": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz", + "integrity": "sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg==", + "inBundle": true, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, "node_modules/caching-transform": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz", @@ -15721,7 +15730,7 @@ "@npmcli/query": "^3.0.0", "@npmcli/run-script": "^6.0.0", "bin-links": "^4.0.1", - "cacache": "^17.0.4", + "cacache": "^17.1.4", "common-ancestor-path": "^1.0.1", "hosted-git-info": "^6.1.1", "json-parse-even-better-errors": "^3.0.0", diff --git a/package.json b/package.json index 76ff7b31596cd..5284f7147b545 100644 --- a/package.json +++ b/package.json @@ -61,7 +61,7 @@ "@npmcli/run-script": "^6.0.2", "abbrev": "^2.0.0", "archy": "~1.0.0", - "cacache": "^17.1.3", + "cacache": "^17.1.4", "chalk": "^5.3.0", "ci-info": "^3.8.0", "cli-columns": "^4.0.0", diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json index 07ce5a75fb8aa..80aa885c0857b 100644 --- a/workspaces/arborist/package.json +++ b/workspaces/arborist/package.json @@ -14,7 +14,7 @@ "@npmcli/query": "^3.0.0", "@npmcli/run-script": "^6.0.0", "bin-links": "^4.0.1", - "cacache": "^17.0.4", + "cacache": "^17.1.4", "common-ancestor-path": "^1.0.1", "hosted-git-info": "^6.1.1", "json-parse-even-better-errors": "^3.0.0", From 39b4043e749636c6a71c610f9bb132d4b5534c9d Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Mon, 14 Aug 2023 12:07:07 -0700 Subject: [PATCH 19/68] deps: fs-minipass@3.0.3 --- node_modules/.gitignore | 3 + .../fs-minipass/node_modules/minipass/LICENSE | 15 + .../node_modules/minipass/dist/cjs/index.js | 1028 +++++++++++++++++ .../minipass/dist/cjs/package.json | 3 + .../node_modules/minipass/dist/mjs/index.js | 1018 ++++++++++++++++ .../minipass/dist/mjs/package.json | 3 + .../node_modules/minipass/package.json | 82 ++ node_modules/fs-minipass/package.json | 8 +- package-lock.json | 19 +- package.json | 2 +- 10 files changed, 2171 insertions(+), 10 deletions(-) create mode 100644 node_modules/fs-minipass/node_modules/minipass/LICENSE create mode 100644 node_modules/fs-minipass/node_modules/minipass/dist/cjs/index.js create mode 100644 node_modules/fs-minipass/node_modules/minipass/dist/cjs/package.json create mode 100644 node_modules/fs-minipass/node_modules/minipass/dist/mjs/index.js create mode 100644 node_modules/fs-minipass/node_modules/minipass/dist/mjs/package.json create mode 100644 node_modules/fs-minipass/node_modules/minipass/package.json diff --git a/node_modules/.gitignore b/node_modules/.gitignore index b2f88a3100367..d0134e49ddd93 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -110,6 +110,9 @@ !/fastest-levenshtein !/foreground-child !/fs-minipass +!/fs-minipass/node_modules/ +/fs-minipass/node_modules/* +!/fs-minipass/node_modules/minipass !/fs.realpath !/function-bind !/gauge diff --git a/node_modules/fs-minipass/node_modules/minipass/LICENSE b/node_modules/fs-minipass/node_modules/minipass/LICENSE new file mode 100644 index 0000000000000..97f8e32ed82e4 --- /dev/null +++ b/node_modules/fs-minipass/node_modules/minipass/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fs-minipass/node_modules/minipass/dist/cjs/index.js b/node_modules/fs-minipass/node_modules/minipass/dist/cjs/index.js new file mode 100644 index 0000000000000..b6cdae8eb514b --- /dev/null +++ b/node_modules/fs-minipass/node_modules/minipass/dist/cjs/index.js @@ -0,0 +1,1028 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0; +const proc = typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + }; +const events_1 = require("events"); +const stream_1 = __importDefault(require("stream")); +const string_decoder_1 = require("string_decoder"); +/** + * Return true if the argument is a Minipass stream, Node stream, or something + * else that Minipass can interact with. + */ +const isStream = (s) => !!s && + typeof s === 'object' && + (s instanceof Minipass || + s instanceof stream_1.default || + (0, exports.isReadable)(s) || + (0, exports.isWritable)(s)); +exports.isStream = isStream; +/** + * Return true if the argument is a valid {@link Minipass.Readable} + */ +const isReadable = (s) => !!s && + typeof s === 'object' && + s instanceof events_1.EventEmitter && + typeof s.pipe === 'function' && + // node core Writable streams have a pipe() method, but it throws + s.pipe !== stream_1.default.Writable.prototype.pipe; +exports.isReadable = isReadable; +/** + * Return true if the argument is a valid {@link Minipass.Writable} + */ +const isWritable = (s) => !!s && + typeof s === 'object' && + s instanceof events_1.EventEmitter && + typeof s.write === 'function' && + typeof s.end === 'function'; +exports.isWritable = isWritable; +const EOF = Symbol('EOF'); +const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); +const EMITTED_END = Symbol('emittedEnd'); +const EMITTING_END = Symbol('emittingEnd'); +const EMITTED_ERROR = Symbol('emittedError'); +const CLOSED = Symbol('closed'); +const READ = Symbol('read'); +const FLUSH = Symbol('flush'); +const FLUSHCHUNK = Symbol('flushChunk'); +const ENCODING = Symbol('encoding'); +const DECODER = Symbol('decoder'); +const FLOWING = Symbol('flowing'); +const PAUSED = Symbol('paused'); +const RESUME = Symbol('resume'); +const BUFFER = Symbol('buffer'); +const PIPES = Symbol('pipes'); +const BUFFERLENGTH = Symbol('bufferLength'); +const BUFFERPUSH = Symbol('bufferPush'); +const BUFFERSHIFT = Symbol('bufferShift'); +const OBJECTMODE = Symbol('objectMode'); +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed'); +// internal event when stream has an error +const ERROR = Symbol('error'); +const EMITDATA = Symbol('emitData'); +const EMITEND = Symbol('emitEnd'); +const EMITEND2 = Symbol('emitEnd2'); +const ASYNC = Symbol('async'); +const ABORT = Symbol('abort'); +const ABORTED = Symbol('aborted'); +const SIGNAL = Symbol('signal'); +const DATALISTENERS = Symbol('dataListeners'); +const DISCARDED = Symbol('discarded'); +const defer = (fn) => Promise.resolve().then(fn); +const nodefer = (fn) => fn(); +const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; +const isArrayBufferLike = (b) => b instanceof ArrayBuffer || + (!!b && + typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0); +const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); +/** + * Internal class representing a pipe to a destination stream. + * + * @internal + */ +class Pipe { + src; + dest; + opts; + ondrain; + constructor(src, dest, opts) { + this.src = src; + this.dest = dest; + this.opts = opts; + this.ondrain = () => src[RESUME](); + this.dest.on('drain', this.ondrain); + } + unpipe() { + this.dest.removeListener('drain', this.ondrain); + } + // only here for the prototype + /* c8 ignore start */ + proxyErrors(_er) { } + /* c8 ignore stop */ + end() { + this.unpipe(); + if (this.opts.end) + this.dest.end(); + } +} +/** + * Internal class representing a pipe to a destination stream where + * errors are proxied. + * + * @internal + */ +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors); + super.unpipe(); + } + constructor(src, dest, opts) { + super(src, dest, opts); + this.proxyErrors = er => dest.emit('error', er); + src.on('error', this.proxyErrors); + } +} +const isObjectModeOptions = (o) => !!o.objectMode; +const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; +/** + * Main export, the Minipass class + * + * `RType` is the type of data emitted, defaults to Buffer + * + * `WType` is the type of data to be written, if RType is buffer or string, + * then any {@link Minipass.ContiguousData} is allowed. + * + * `Events` is the set of event handler signatures that this object + * will emit, see {@link Minipass.Events} + */ +class Minipass extends events_1.EventEmitter { + [FLOWING] = false; + [PAUSED] = false; + [PIPES] = []; + [BUFFER] = []; + [OBJECTMODE]; + [ENCODING]; + [ASYNC]; + [DECODER]; + [EOF] = false; + [EMITTED_END] = false; + [EMITTING_END] = false; + [CLOSED] = false; + [EMITTED_ERROR] = null; + [BUFFERLENGTH] = 0; + [DESTROYED] = false; + [SIGNAL]; + [ABORTED] = false; + [DATALISTENERS] = 0; + [DISCARDED] = false; + /** + * true if the stream can be written + */ + writable = true; + /** + * true if the stream can be read + */ + readable = true; + /** + * If `RType` is Buffer, then options do not need to be provided. + * Otherwise, an options object must be provided to specify either + * {@link Minipass.SharedOptions.objectMode} or + * {@link Minipass.SharedOptions.encoding}, as appropriate. + */ + constructor(...args) { + const options = (args[0] || + {}); + super(); + if (options.objectMode && typeof options.encoding === 'string') { + throw new TypeError('Encoding and objectMode may not be used together'); + } + if (isObjectModeOptions(options)) { + this[OBJECTMODE] = true; + this[ENCODING] = null; + } + else if (isEncodingOptions(options)) { + this[ENCODING] = options.encoding; + this[OBJECTMODE] = false; + } + else { + this[OBJECTMODE] = false; + this[ENCODING] = null; + } + this[ASYNC] = !!options.async; + this[DECODER] = this[ENCODING] + ? new string_decoder_1.StringDecoder(this[ENCODING]) + : null; + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); + } + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); + } + const { signal } = options; + if (signal) { + this[SIGNAL] = signal; + if (signal.aborted) { + this[ABORT](); + } + else { + signal.addEventListener('abort', () => this[ABORT]()); + } + } + } + /** + * The amount of data stored in the buffer waiting to be read. + * + * For Buffer strings, this will be the total byte length. + * For string encoding streams, this will be the string character length, + * according to JavaScript's `string.length` logic. + * For objectMode streams, this is a count of the items waiting to be + * emitted. + */ + get bufferLength() { + return this[BUFFERLENGTH]; + } + /** + * The `BufferEncoding` currently in use, or `null` + */ + get encoding() { + return this[ENCODING]; + } + /** + * @deprecated - This is a read only property + */ + set encoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * @deprecated - Encoding may only be set at instantiation time + */ + setEncoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * True if this is an objectMode stream + */ + get objectMode() { + return this[OBJECTMODE]; + } + /** + * @deprecated - This is a read-only property + */ + set objectMode(_om) { + throw new Error('objectMode must be set at instantiation time'); + } + /** + * true if this is an async stream + */ + get ['async']() { + return this[ASYNC]; + } + /** + * Set to true to make this stream async. + * + * Once set, it cannot be unset, as this would potentially cause incorrect + * behavior. Ie, a sync stream can be made async, but an async stream + * cannot be safely made sync. + */ + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a; + } + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true; + this.emit('abort', this[SIGNAL]?.reason); + this.destroy(this[SIGNAL]?.reason); + } + /** + * True if the stream has been aborted. + */ + get aborted() { + return this[ABORTED]; + } + /** + * No-op setter. Stream aborted status is set via the AbortSignal provided + * in the constructor options. + */ + set aborted(_) { } + write(chunk, encoding, cb) { + if (this[ABORTED]) + return false; + if (this[EOF]) + throw new Error('write after end'); + if (this[DESTROYED]) { + this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); + return true; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (!encoding) + encoding = 'utf8'; + const fn = this[ASYNC] ? defer : nodefer; + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything is only allowed if in object mode, so throw + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); + } + else if (isArrayBufferLike(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk); + } + else if (typeof chunk !== 'string') { + throw new Error('Non-contiguous data written to non-objectMode stream'); + } + } + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + // maybe impossible? + /* c8 ignore start */ + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + /* c8 ignore stop */ + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { + //@ts-ignore - sinful unsafe type change + chunk = Buffer.from(chunk, encoding); + } + if (Buffer.isBuffer(chunk) && this[ENCODING]) { + //@ts-ignore - sinful unsafe type change + chunk = this[DECODER].write(chunk); + } + // Note: flushing CAN potentially switch us into not-flowing mode + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + /** + * Low-level explicit read method. + * + * In objectMode, the argument is ignored, and one item is returned if + * available. + * + * `n` is the number of bytes (or in the case of encoding streams, + * characters) to consume. If `n` is not provided, then the entire buffer + * is returned, or `null` is returned if no data is available. + * + * If `n` is greater that the amount of data in the internal buffer, + * then `null` is returned. + */ + read(n) { + if (this[DESTROYED]) + return null; + this[DISCARDED] = false; + if (this[BUFFERLENGTH] === 0 || + n === 0 || + (n && n > this[BUFFERLENGTH])) { + this[MAYBE_EMIT_END](); + return null; + } + if (this[OBJECTMODE]) + n = null; + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + // not object mode, so if we have an encoding, then RType is string + // otherwise, must be Buffer + this[BUFFER] = [ + (this[ENCODING] + ? this[BUFFER].join('') + : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), + ]; + } + const ret = this[READ](n || null, this[BUFFER][0]); + this[MAYBE_EMIT_END](); + return ret; + } + [READ](n, chunk) { + if (this[OBJECTMODE]) + this[BUFFERSHIFT](); + else { + const c = chunk; + if (n === c.length || n === null) + this[BUFFERSHIFT](); + else if (typeof c === 'string') { + this[BUFFER][0] = c.slice(n); + chunk = c.slice(0, n); + this[BUFFERLENGTH] -= n; + } + else { + this[BUFFER][0] = c.subarray(n); + chunk = c.subarray(0, n); + this[BUFFERLENGTH] -= n; + } + } + this.emit('data', chunk); + if (!this[BUFFER].length && !this[EOF]) + this.emit('drain'); + return chunk; + } + end(chunk, encoding, cb) { + if (typeof chunk === 'function') { + cb = chunk; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (chunk !== undefined) + this.write(chunk, encoding); + if (cb) + this.once('end', cb); + this[EOF] = true; + this.writable = false; + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this[FLOWING] || !this[PAUSED]) + this[MAYBE_EMIT_END](); + return this; + } + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) + return; + if (!this[DATALISTENERS] && !this[PIPES].length) { + this[DISCARDED] = true; + } + this[PAUSED] = false; + this[FLOWING] = true; + this.emit('resume'); + if (this[BUFFER].length) + this[FLUSH](); + else if (this[EOF]) + this[MAYBE_EMIT_END](); + else + this.emit('drain'); + } + /** + * Resume the stream if it is currently in a paused state + * + * If called when there are no pipe destinations or `data` event listeners, + * this will place the stream in a "discarded" state, where all data will + * be thrown away. The discarded state is removed if a pipe destination or + * data handler is added, if pause() is called, or if any synchronous or + * asynchronous iteration is started. + */ + resume() { + return this[RESUME](); + } + /** + * Pause the stream + */ + pause() { + this[FLOWING] = false; + this[PAUSED] = true; + this[DISCARDED] = false; + } + /** + * true if the stream has been forcibly destroyed + */ + get destroyed() { + return this[DESTROYED]; + } + /** + * true if the stream is currently in a flowing state, meaning that + * any writes will be immediately emitted. + */ + get flowing() { + return this[FLOWING]; + } + /** + * true if the stream is currently in a paused state + */ + get paused() { + return this[PAUSED]; + } + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1; + else + this[BUFFERLENGTH] += chunk.length; + this[BUFFER].push(chunk); + } + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1; + else + this[BUFFERLENGTH] -= this[BUFFER][0].length; + return this[BUFFER].shift(); + } + [FLUSH](noDrain = false) { + do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && + this[BUFFER].length); + if (!noDrain && !this[BUFFER].length && !this[EOF]) + this.emit('drain'); + } + [FLUSHCHUNK](chunk) { + this.emit('data', chunk); + return this[FLOWING]; + } + /** + * Pipe all data emitted by this stream into the destination provided. + * + * Triggers the flow of data. + */ + pipe(dest, opts) { + if (this[DESTROYED]) + return dest; + this[DISCARDED] = false; + const ended = this[EMITTED_END]; + opts = opts || {}; + if (dest === proc.stdout || dest === proc.stderr) + opts.end = false; + else + opts.end = opts.end !== false; + opts.proxyErrors = !!opts.proxyErrors; + // piping an ended stream ends immediately + if (ended) { + if (opts.end) + dest.end(); + } + else { + // "as" here just ignores the WType, which pipes don't care about, + // since they're only consuming from us, and writing to the dest + this[PIPES].push(!opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts)); + if (this[ASYNC]) + defer(() => this[RESUME]()); + else + this[RESUME](); + } + return dest; + } + /** + * Fully unhook a piped destination stream. + * + * If the destination stream was the only consumer of this stream (ie, + * there are no other piped destinations or `'data'` event listeners) + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest); + if (p) { + if (this[PIPES].length === 1) { + if (this[FLOWING] && this[DATALISTENERS] === 0) { + this[FLOWING] = false; + } + this[PIPES] = []; + } + else + this[PIPES].splice(this[PIPES].indexOf(p), 1); + p.unpipe(); + } + } + /** + * Alias for {@link Minipass#on} + */ + addListener(ev, handler) { + return this.on(ev, handler); + } + /** + * Mostly identical to `EventEmitter.on`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * - Adding a 'data' event handler will trigger the flow of data + * + * - Adding a 'readable' event handler when there is data waiting to be read + * will cause 'readable' to be emitted immediately. + * + * - Adding an 'endish' event handler ('end', 'finish', etc.) which has + * already passed will cause the event to be emitted immediately and all + * handlers removed. + * + * - Adding an 'error' event handler after an error has been emitted will + * cause the event to be re-emitted immediately with the error previously + * raised. + */ + on(ev, handler) { + const ret = super.on(ev, handler); + if (ev === 'data') { + this[DISCARDED] = false; + this[DATALISTENERS]++; + if (!this[PIPES].length && !this[FLOWING]) { + this[RESUME](); + } + } + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { + super.emit('readable'); + } + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev); + this.removeAllListeners(ev); + } + else if (ev === 'error' && this[EMITTED_ERROR]) { + const h = handler; + if (this[ASYNC]) + defer(() => h.call(this, this[EMITTED_ERROR])); + else + h.call(this, this[EMITTED_ERROR]); + } + return ret; + } + /** + * Alias for {@link Minipass#off} + */ + removeListener(ev, handler) { + return this.off(ev, handler); + } + /** + * Mostly identical to `EventEmitter.off` + * + * If a 'data' event handler is removed, and it was the last consumer + * (ie, there are no pipe destinations or other 'data' event listeners), + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + off(ev, handler) { + const ret = super.off(ev, handler); + // if we previously had listeners, and now we don't, and we don't + // have any pipes, then stop the flow, unless it's been explicitly + // put in a discarded flowing state via stream.resume(). + if (ev === 'data') { + this[DATALISTENERS] = this.listeners('data').length; + if (this[DATALISTENERS] === 0 && + !this[DISCARDED] && + !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * Mostly identical to `EventEmitter.removeAllListeners` + * + * If all 'data' event handlers are removed, and they were the last consumer + * (ie, there are no pipe destinations), then the flow of data will stop + * until there is another consumer or {@link Minipass#resume} is explicitly + * called. + */ + removeAllListeners(ev) { + const ret = super.removeAllListeners(ev); + if (ev === 'data' || ev === undefined) { + this[DATALISTENERS] = 0; + if (!this[DISCARDED] && !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * true if the 'end' event has been emitted + */ + get emittedEnd() { + return this[EMITTED_END]; + } + [MAYBE_EMIT_END]() { + if (!this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF]) { + this[EMITTING_END] = true; + this.emit('end'); + this.emit('prefinish'); + this.emit('finish'); + if (this[CLOSED]) + this.emit('close'); + this[EMITTING_END] = false; + } + } + /** + * Mostly identical to `EventEmitter.emit`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * If the stream has been destroyed, and the event is something other + * than 'close' or 'error', then `false` is returned and no handlers + * are called. + * + * If the event is 'end', and has already been emitted, then the event + * is ignored. If the stream is in a paused or non-flowing state, then + * the event will be deferred until data flow resumes. If the stream is + * async, then handlers will be called on the next tick rather than + * immediately. + * + * If the event is 'close', and 'end' has not yet been emitted, then + * the event will be deferred until after 'end' is emitted. + * + * If the event is 'error', and an AbortSignal was provided for the stream, + * and there are no listeners, then the event is ignored, matching the + * behavior of node core streams in the presense of an AbortSignal. + * + * If the event is 'finish' or 'prefinish', then all listeners will be + * removed after emitting the event, to prevent double-firing. + */ + emit(ev, ...args) { + const data = args[0]; + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && + ev !== 'close' && + ev !== DESTROYED && + this[DESTROYED]) { + return false; + } + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? (defer(() => this[EMITDATA](data)), true) + : this[EMITDATA](data); + } + else if (ev === 'end') { + return this[EMITEND](); + } + else if (ev === 'close') { + this[CLOSED] = true; + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) + return false; + const ret = super.emit('close'); + this.removeAllListeners('close'); + return ret; + } + else if (ev === 'error') { + this[EMITTED_ERROR] = data; + super.emit(ERROR, data); + const ret = !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false; + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'resume') { + const ret = super.emit('resume'); + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev); + this.removeAllListeners(ev); + return ret; + } + // Some other unknown event + const ret = super.emit(ev, ...args); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) + this.pause(); + } + const ret = this[DISCARDED] ? false : super.emit('data', data); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITEND]() { + if (this[EMITTED_END]) + return false; + this[EMITTED_END] = true; + this.readable = false; + return this[ASYNC] + ? (defer(() => this[EMITEND2]()), true) + : this[EMITEND2](); + } + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end(); + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data); + } + if (!this[DISCARDED]) + super.emit('data', data); + } + } + for (const p of this[PIPES]) { + p.end(); + } + const ret = super.emit('end'); + this.removeAllListeners('end'); + return ret; + } + /** + * Return a Promise that resolves to an array of all emitted data once + * the stream ends. + */ + async collect() { + const buf = Object.assign([], { + dataLength: 0, + }); + if (!this[OBJECTMODE]) + buf.dataLength = 0; + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise(); + this.on('data', c => { + buf.push(c); + if (!this[OBJECTMODE]) + buf.dataLength += c.length; + }); + await p; + return buf; + } + /** + * Return a Promise that resolves to the concatenation of all emitted data + * once the stream ends. + * + * Not allowed on objectMode streams. + */ + async concat() { + if (this[OBJECTMODE]) { + throw new Error('cannot concat in objectMode'); + } + const buf = await this.collect(); + return (this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength)); + } + /** + * Return a void Promise that resolves once the stream ends. + */ + async promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))); + this.on('error', er => reject(er)); + this.on('end', () => resolve()); + }); + } + /** + * Asynchronous `for await of` iteration. + * + * This will continue emitting all chunks until the stream terminates. + */ + [Symbol.asyncIterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = async () => { + this.pause(); + stopped = true; + return { value: undefined, done: true }; + }; + const next = () => { + if (stopped) + return stop(); + const res = this.read(); + if (res !== null) + return Promise.resolve({ done: false, value: res }); + if (this[EOF]) + return stop(); + let resolve; + let reject; + const onerr = (er) => { + this.off('data', ondata); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + stop(); + reject(er); + }; + const ondata = (value) => { + this.off('error', onerr); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + this.pause(); + resolve({ value, done: !!this[EOF] }); + }; + const onend = () => { + this.off('error', onerr); + this.off('data', ondata); + this.off(DESTROYED, ondestroy); + stop(); + resolve({ done: true, value: undefined }); + }; + const ondestroy = () => onerr(new Error('stream destroyed')); + return new Promise((res, rej) => { + reject = rej; + resolve = res; + this.once(DESTROYED, ondestroy); + this.once('error', onerr); + this.once('end', onend); + this.once('data', ondata); + }); + }; + return { + next, + throw: stop, + return: stop, + [Symbol.asyncIterator]() { + return this; + }, + }; + } + /** + * Synchronous `for of` iteration. + * + * The iteration will terminate when the internal buffer runs out, even + * if the stream has not yet terminated. + */ + [Symbol.iterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = () => { + this.pause(); + this.off(ERROR, stop); + this.off(DESTROYED, stop); + this.off('end', stop); + stopped = true; + return { done: true, value: undefined }; + }; + const next = () => { + if (stopped) + return stop(); + const value = this.read(); + return value === null ? stop() : { done: false, value }; + }; + this.once('end', stop); + this.once(ERROR, stop); + this.once(DESTROYED, stop); + return { + next, + throw: stop, + return: stop, + [Symbol.iterator]() { + return this; + }, + }; + } + /** + * Destroy a stream, preventing it from being used for any further purpose. + * + * If the stream has a `close()` method, then it will be called on + * destruction. + * + * After destruction, any attempt to write data, read data, or emit most + * events will be ignored. + * + * If an error argument is provided, then it will be emitted in an + * 'error' event. + */ + destroy(er) { + if (this[DESTROYED]) { + if (er) + this.emit('error', er); + else + this.emit(DESTROYED); + return this; + } + this[DESTROYED] = true; + this[DISCARDED] = true; + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0; + this[BUFFERLENGTH] = 0; + const wc = this; + if (typeof wc.close === 'function' && !this[CLOSED]) + wc.close(); + if (er) + this.emit('error', er); + // if no error to emit, still reject pending promises + else + this.emit(DESTROYED); + return this; + } + /** + * Alias for {@link isStream} + * + * Former export location, maintained for backwards compatibility. + * + * @deprecated + */ + static get isStream() { + return exports.isStream; + } +} +exports.Minipass = Minipass; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/fs-minipass/node_modules/minipass/dist/cjs/package.json b/node_modules/fs-minipass/node_modules/minipass/dist/cjs/package.json new file mode 100644 index 0000000000000..5bbefffbabee3 --- /dev/null +++ b/node_modules/fs-minipass/node_modules/minipass/dist/cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/fs-minipass/node_modules/minipass/dist/mjs/index.js b/node_modules/fs-minipass/node_modules/minipass/dist/mjs/index.js new file mode 100644 index 0000000000000..b65fafbae43a4 --- /dev/null +++ b/node_modules/fs-minipass/node_modules/minipass/dist/mjs/index.js @@ -0,0 +1,1018 @@ +const proc = typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + }; +import { EventEmitter } from 'events'; +import Stream from 'stream'; +import { StringDecoder } from 'string_decoder'; +/** + * Return true if the argument is a Minipass stream, Node stream, or something + * else that Minipass can interact with. + */ +export const isStream = (s) => !!s && + typeof s === 'object' && + (s instanceof Minipass || + s instanceof Stream || + isReadable(s) || + isWritable(s)); +/** + * Return true if the argument is a valid {@link Minipass.Readable} + */ +export const isReadable = (s) => !!s && + typeof s === 'object' && + s instanceof EventEmitter && + typeof s.pipe === 'function' && + // node core Writable streams have a pipe() method, but it throws + s.pipe !== Stream.Writable.prototype.pipe; +/** + * Return true if the argument is a valid {@link Minipass.Writable} + */ +export const isWritable = (s) => !!s && + typeof s === 'object' && + s instanceof EventEmitter && + typeof s.write === 'function' && + typeof s.end === 'function'; +const EOF = Symbol('EOF'); +const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); +const EMITTED_END = Symbol('emittedEnd'); +const EMITTING_END = Symbol('emittingEnd'); +const EMITTED_ERROR = Symbol('emittedError'); +const CLOSED = Symbol('closed'); +const READ = Symbol('read'); +const FLUSH = Symbol('flush'); +const FLUSHCHUNK = Symbol('flushChunk'); +const ENCODING = Symbol('encoding'); +const DECODER = Symbol('decoder'); +const FLOWING = Symbol('flowing'); +const PAUSED = Symbol('paused'); +const RESUME = Symbol('resume'); +const BUFFER = Symbol('buffer'); +const PIPES = Symbol('pipes'); +const BUFFERLENGTH = Symbol('bufferLength'); +const BUFFERPUSH = Symbol('bufferPush'); +const BUFFERSHIFT = Symbol('bufferShift'); +const OBJECTMODE = Symbol('objectMode'); +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed'); +// internal event when stream has an error +const ERROR = Symbol('error'); +const EMITDATA = Symbol('emitData'); +const EMITEND = Symbol('emitEnd'); +const EMITEND2 = Symbol('emitEnd2'); +const ASYNC = Symbol('async'); +const ABORT = Symbol('abort'); +const ABORTED = Symbol('aborted'); +const SIGNAL = Symbol('signal'); +const DATALISTENERS = Symbol('dataListeners'); +const DISCARDED = Symbol('discarded'); +const defer = (fn) => Promise.resolve().then(fn); +const nodefer = (fn) => fn(); +const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; +const isArrayBufferLike = (b) => b instanceof ArrayBuffer || + (!!b && + typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0); +const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); +/** + * Internal class representing a pipe to a destination stream. + * + * @internal + */ +class Pipe { + src; + dest; + opts; + ondrain; + constructor(src, dest, opts) { + this.src = src; + this.dest = dest; + this.opts = opts; + this.ondrain = () => src[RESUME](); + this.dest.on('drain', this.ondrain); + } + unpipe() { + this.dest.removeListener('drain', this.ondrain); + } + // only here for the prototype + /* c8 ignore start */ + proxyErrors(_er) { } + /* c8 ignore stop */ + end() { + this.unpipe(); + if (this.opts.end) + this.dest.end(); + } +} +/** + * Internal class representing a pipe to a destination stream where + * errors are proxied. + * + * @internal + */ +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors); + super.unpipe(); + } + constructor(src, dest, opts) { + super(src, dest, opts); + this.proxyErrors = er => dest.emit('error', er); + src.on('error', this.proxyErrors); + } +} +const isObjectModeOptions = (o) => !!o.objectMode; +const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; +/** + * Main export, the Minipass class + * + * `RType` is the type of data emitted, defaults to Buffer + * + * `WType` is the type of data to be written, if RType is buffer or string, + * then any {@link Minipass.ContiguousData} is allowed. + * + * `Events` is the set of event handler signatures that this object + * will emit, see {@link Minipass.Events} + */ +export class Minipass extends EventEmitter { + [FLOWING] = false; + [PAUSED] = false; + [PIPES] = []; + [BUFFER] = []; + [OBJECTMODE]; + [ENCODING]; + [ASYNC]; + [DECODER]; + [EOF] = false; + [EMITTED_END] = false; + [EMITTING_END] = false; + [CLOSED] = false; + [EMITTED_ERROR] = null; + [BUFFERLENGTH] = 0; + [DESTROYED] = false; + [SIGNAL]; + [ABORTED] = false; + [DATALISTENERS] = 0; + [DISCARDED] = false; + /** + * true if the stream can be written + */ + writable = true; + /** + * true if the stream can be read + */ + readable = true; + /** + * If `RType` is Buffer, then options do not need to be provided. + * Otherwise, an options object must be provided to specify either + * {@link Minipass.SharedOptions.objectMode} or + * {@link Minipass.SharedOptions.encoding}, as appropriate. + */ + constructor(...args) { + const options = (args[0] || + {}); + super(); + if (options.objectMode && typeof options.encoding === 'string') { + throw new TypeError('Encoding and objectMode may not be used together'); + } + if (isObjectModeOptions(options)) { + this[OBJECTMODE] = true; + this[ENCODING] = null; + } + else if (isEncodingOptions(options)) { + this[ENCODING] = options.encoding; + this[OBJECTMODE] = false; + } + else { + this[OBJECTMODE] = false; + this[ENCODING] = null; + } + this[ASYNC] = !!options.async; + this[DECODER] = this[ENCODING] + ? new StringDecoder(this[ENCODING]) + : null; + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); + } + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); + } + const { signal } = options; + if (signal) { + this[SIGNAL] = signal; + if (signal.aborted) { + this[ABORT](); + } + else { + signal.addEventListener('abort', () => this[ABORT]()); + } + } + } + /** + * The amount of data stored in the buffer waiting to be read. + * + * For Buffer strings, this will be the total byte length. + * For string encoding streams, this will be the string character length, + * according to JavaScript's `string.length` logic. + * For objectMode streams, this is a count of the items waiting to be + * emitted. + */ + get bufferLength() { + return this[BUFFERLENGTH]; + } + /** + * The `BufferEncoding` currently in use, or `null` + */ + get encoding() { + return this[ENCODING]; + } + /** + * @deprecated - This is a read only property + */ + set encoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * @deprecated - Encoding may only be set at instantiation time + */ + setEncoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * True if this is an objectMode stream + */ + get objectMode() { + return this[OBJECTMODE]; + } + /** + * @deprecated - This is a read-only property + */ + set objectMode(_om) { + throw new Error('objectMode must be set at instantiation time'); + } + /** + * true if this is an async stream + */ + get ['async']() { + return this[ASYNC]; + } + /** + * Set to true to make this stream async. + * + * Once set, it cannot be unset, as this would potentially cause incorrect + * behavior. Ie, a sync stream can be made async, but an async stream + * cannot be safely made sync. + */ + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a; + } + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true; + this.emit('abort', this[SIGNAL]?.reason); + this.destroy(this[SIGNAL]?.reason); + } + /** + * True if the stream has been aborted. + */ + get aborted() { + return this[ABORTED]; + } + /** + * No-op setter. Stream aborted status is set via the AbortSignal provided + * in the constructor options. + */ + set aborted(_) { } + write(chunk, encoding, cb) { + if (this[ABORTED]) + return false; + if (this[EOF]) + throw new Error('write after end'); + if (this[DESTROYED]) { + this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); + return true; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (!encoding) + encoding = 'utf8'; + const fn = this[ASYNC] ? defer : nodefer; + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything is only allowed if in object mode, so throw + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); + } + else if (isArrayBufferLike(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk); + } + else if (typeof chunk !== 'string') { + throw new Error('Non-contiguous data written to non-objectMode stream'); + } + } + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + // maybe impossible? + /* c8 ignore start */ + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + /* c8 ignore stop */ + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { + //@ts-ignore - sinful unsafe type change + chunk = Buffer.from(chunk, encoding); + } + if (Buffer.isBuffer(chunk) && this[ENCODING]) { + //@ts-ignore - sinful unsafe type change + chunk = this[DECODER].write(chunk); + } + // Note: flushing CAN potentially switch us into not-flowing mode + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + /** + * Low-level explicit read method. + * + * In objectMode, the argument is ignored, and one item is returned if + * available. + * + * `n` is the number of bytes (or in the case of encoding streams, + * characters) to consume. If `n` is not provided, then the entire buffer + * is returned, or `null` is returned if no data is available. + * + * If `n` is greater that the amount of data in the internal buffer, + * then `null` is returned. + */ + read(n) { + if (this[DESTROYED]) + return null; + this[DISCARDED] = false; + if (this[BUFFERLENGTH] === 0 || + n === 0 || + (n && n > this[BUFFERLENGTH])) { + this[MAYBE_EMIT_END](); + return null; + } + if (this[OBJECTMODE]) + n = null; + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + // not object mode, so if we have an encoding, then RType is string + // otherwise, must be Buffer + this[BUFFER] = [ + (this[ENCODING] + ? this[BUFFER].join('') + : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), + ]; + } + const ret = this[READ](n || null, this[BUFFER][0]); + this[MAYBE_EMIT_END](); + return ret; + } + [READ](n, chunk) { + if (this[OBJECTMODE]) + this[BUFFERSHIFT](); + else { + const c = chunk; + if (n === c.length || n === null) + this[BUFFERSHIFT](); + else if (typeof c === 'string') { + this[BUFFER][0] = c.slice(n); + chunk = c.slice(0, n); + this[BUFFERLENGTH] -= n; + } + else { + this[BUFFER][0] = c.subarray(n); + chunk = c.subarray(0, n); + this[BUFFERLENGTH] -= n; + } + } + this.emit('data', chunk); + if (!this[BUFFER].length && !this[EOF]) + this.emit('drain'); + return chunk; + } + end(chunk, encoding, cb) { + if (typeof chunk === 'function') { + cb = chunk; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (chunk !== undefined) + this.write(chunk, encoding); + if (cb) + this.once('end', cb); + this[EOF] = true; + this.writable = false; + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this[FLOWING] || !this[PAUSED]) + this[MAYBE_EMIT_END](); + return this; + } + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) + return; + if (!this[DATALISTENERS] && !this[PIPES].length) { + this[DISCARDED] = true; + } + this[PAUSED] = false; + this[FLOWING] = true; + this.emit('resume'); + if (this[BUFFER].length) + this[FLUSH](); + else if (this[EOF]) + this[MAYBE_EMIT_END](); + else + this.emit('drain'); + } + /** + * Resume the stream if it is currently in a paused state + * + * If called when there are no pipe destinations or `data` event listeners, + * this will place the stream in a "discarded" state, where all data will + * be thrown away. The discarded state is removed if a pipe destination or + * data handler is added, if pause() is called, or if any synchronous or + * asynchronous iteration is started. + */ + resume() { + return this[RESUME](); + } + /** + * Pause the stream + */ + pause() { + this[FLOWING] = false; + this[PAUSED] = true; + this[DISCARDED] = false; + } + /** + * true if the stream has been forcibly destroyed + */ + get destroyed() { + return this[DESTROYED]; + } + /** + * true if the stream is currently in a flowing state, meaning that + * any writes will be immediately emitted. + */ + get flowing() { + return this[FLOWING]; + } + /** + * true if the stream is currently in a paused state + */ + get paused() { + return this[PAUSED]; + } + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1; + else + this[BUFFERLENGTH] += chunk.length; + this[BUFFER].push(chunk); + } + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1; + else + this[BUFFERLENGTH] -= this[BUFFER][0].length; + return this[BUFFER].shift(); + } + [FLUSH](noDrain = false) { + do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && + this[BUFFER].length); + if (!noDrain && !this[BUFFER].length && !this[EOF]) + this.emit('drain'); + } + [FLUSHCHUNK](chunk) { + this.emit('data', chunk); + return this[FLOWING]; + } + /** + * Pipe all data emitted by this stream into the destination provided. + * + * Triggers the flow of data. + */ + pipe(dest, opts) { + if (this[DESTROYED]) + return dest; + this[DISCARDED] = false; + const ended = this[EMITTED_END]; + opts = opts || {}; + if (dest === proc.stdout || dest === proc.stderr) + opts.end = false; + else + opts.end = opts.end !== false; + opts.proxyErrors = !!opts.proxyErrors; + // piping an ended stream ends immediately + if (ended) { + if (opts.end) + dest.end(); + } + else { + // "as" here just ignores the WType, which pipes don't care about, + // since they're only consuming from us, and writing to the dest + this[PIPES].push(!opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts)); + if (this[ASYNC]) + defer(() => this[RESUME]()); + else + this[RESUME](); + } + return dest; + } + /** + * Fully unhook a piped destination stream. + * + * If the destination stream was the only consumer of this stream (ie, + * there are no other piped destinations or `'data'` event listeners) + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest); + if (p) { + if (this[PIPES].length === 1) { + if (this[FLOWING] && this[DATALISTENERS] === 0) { + this[FLOWING] = false; + } + this[PIPES] = []; + } + else + this[PIPES].splice(this[PIPES].indexOf(p), 1); + p.unpipe(); + } + } + /** + * Alias for {@link Minipass#on} + */ + addListener(ev, handler) { + return this.on(ev, handler); + } + /** + * Mostly identical to `EventEmitter.on`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * - Adding a 'data' event handler will trigger the flow of data + * + * - Adding a 'readable' event handler when there is data waiting to be read + * will cause 'readable' to be emitted immediately. + * + * - Adding an 'endish' event handler ('end', 'finish', etc.) which has + * already passed will cause the event to be emitted immediately and all + * handlers removed. + * + * - Adding an 'error' event handler after an error has been emitted will + * cause the event to be re-emitted immediately with the error previously + * raised. + */ + on(ev, handler) { + const ret = super.on(ev, handler); + if (ev === 'data') { + this[DISCARDED] = false; + this[DATALISTENERS]++; + if (!this[PIPES].length && !this[FLOWING]) { + this[RESUME](); + } + } + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { + super.emit('readable'); + } + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev); + this.removeAllListeners(ev); + } + else if (ev === 'error' && this[EMITTED_ERROR]) { + const h = handler; + if (this[ASYNC]) + defer(() => h.call(this, this[EMITTED_ERROR])); + else + h.call(this, this[EMITTED_ERROR]); + } + return ret; + } + /** + * Alias for {@link Minipass#off} + */ + removeListener(ev, handler) { + return this.off(ev, handler); + } + /** + * Mostly identical to `EventEmitter.off` + * + * If a 'data' event handler is removed, and it was the last consumer + * (ie, there are no pipe destinations or other 'data' event listeners), + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + off(ev, handler) { + const ret = super.off(ev, handler); + // if we previously had listeners, and now we don't, and we don't + // have any pipes, then stop the flow, unless it's been explicitly + // put in a discarded flowing state via stream.resume(). + if (ev === 'data') { + this[DATALISTENERS] = this.listeners('data').length; + if (this[DATALISTENERS] === 0 && + !this[DISCARDED] && + !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * Mostly identical to `EventEmitter.removeAllListeners` + * + * If all 'data' event handlers are removed, and they were the last consumer + * (ie, there are no pipe destinations), then the flow of data will stop + * until there is another consumer or {@link Minipass#resume} is explicitly + * called. + */ + removeAllListeners(ev) { + const ret = super.removeAllListeners(ev); + if (ev === 'data' || ev === undefined) { + this[DATALISTENERS] = 0; + if (!this[DISCARDED] && !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * true if the 'end' event has been emitted + */ + get emittedEnd() { + return this[EMITTED_END]; + } + [MAYBE_EMIT_END]() { + if (!this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF]) { + this[EMITTING_END] = true; + this.emit('end'); + this.emit('prefinish'); + this.emit('finish'); + if (this[CLOSED]) + this.emit('close'); + this[EMITTING_END] = false; + } + } + /** + * Mostly identical to `EventEmitter.emit`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * If the stream has been destroyed, and the event is something other + * than 'close' or 'error', then `false` is returned and no handlers + * are called. + * + * If the event is 'end', and has already been emitted, then the event + * is ignored. If the stream is in a paused or non-flowing state, then + * the event will be deferred until data flow resumes. If the stream is + * async, then handlers will be called on the next tick rather than + * immediately. + * + * If the event is 'close', and 'end' has not yet been emitted, then + * the event will be deferred until after 'end' is emitted. + * + * If the event is 'error', and an AbortSignal was provided for the stream, + * and there are no listeners, then the event is ignored, matching the + * behavior of node core streams in the presense of an AbortSignal. + * + * If the event is 'finish' or 'prefinish', then all listeners will be + * removed after emitting the event, to prevent double-firing. + */ + emit(ev, ...args) { + const data = args[0]; + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && + ev !== 'close' && + ev !== DESTROYED && + this[DESTROYED]) { + return false; + } + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? (defer(() => this[EMITDATA](data)), true) + : this[EMITDATA](data); + } + else if (ev === 'end') { + return this[EMITEND](); + } + else if (ev === 'close') { + this[CLOSED] = true; + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) + return false; + const ret = super.emit('close'); + this.removeAllListeners('close'); + return ret; + } + else if (ev === 'error') { + this[EMITTED_ERROR] = data; + super.emit(ERROR, data); + const ret = !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false; + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'resume') { + const ret = super.emit('resume'); + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev); + this.removeAllListeners(ev); + return ret; + } + // Some other unknown event + const ret = super.emit(ev, ...args); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) + this.pause(); + } + const ret = this[DISCARDED] ? false : super.emit('data', data); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITEND]() { + if (this[EMITTED_END]) + return false; + this[EMITTED_END] = true; + this.readable = false; + return this[ASYNC] + ? (defer(() => this[EMITEND2]()), true) + : this[EMITEND2](); + } + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end(); + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data); + } + if (!this[DISCARDED]) + super.emit('data', data); + } + } + for (const p of this[PIPES]) { + p.end(); + } + const ret = super.emit('end'); + this.removeAllListeners('end'); + return ret; + } + /** + * Return a Promise that resolves to an array of all emitted data once + * the stream ends. + */ + async collect() { + const buf = Object.assign([], { + dataLength: 0, + }); + if (!this[OBJECTMODE]) + buf.dataLength = 0; + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise(); + this.on('data', c => { + buf.push(c); + if (!this[OBJECTMODE]) + buf.dataLength += c.length; + }); + await p; + return buf; + } + /** + * Return a Promise that resolves to the concatenation of all emitted data + * once the stream ends. + * + * Not allowed on objectMode streams. + */ + async concat() { + if (this[OBJECTMODE]) { + throw new Error('cannot concat in objectMode'); + } + const buf = await this.collect(); + return (this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength)); + } + /** + * Return a void Promise that resolves once the stream ends. + */ + async promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))); + this.on('error', er => reject(er)); + this.on('end', () => resolve()); + }); + } + /** + * Asynchronous `for await of` iteration. + * + * This will continue emitting all chunks until the stream terminates. + */ + [Symbol.asyncIterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = async () => { + this.pause(); + stopped = true; + return { value: undefined, done: true }; + }; + const next = () => { + if (stopped) + return stop(); + const res = this.read(); + if (res !== null) + return Promise.resolve({ done: false, value: res }); + if (this[EOF]) + return stop(); + let resolve; + let reject; + const onerr = (er) => { + this.off('data', ondata); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + stop(); + reject(er); + }; + const ondata = (value) => { + this.off('error', onerr); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + this.pause(); + resolve({ value, done: !!this[EOF] }); + }; + const onend = () => { + this.off('error', onerr); + this.off('data', ondata); + this.off(DESTROYED, ondestroy); + stop(); + resolve({ done: true, value: undefined }); + }; + const ondestroy = () => onerr(new Error('stream destroyed')); + return new Promise((res, rej) => { + reject = rej; + resolve = res; + this.once(DESTROYED, ondestroy); + this.once('error', onerr); + this.once('end', onend); + this.once('data', ondata); + }); + }; + return { + next, + throw: stop, + return: stop, + [Symbol.asyncIterator]() { + return this; + }, + }; + } + /** + * Synchronous `for of` iteration. + * + * The iteration will terminate when the internal buffer runs out, even + * if the stream has not yet terminated. + */ + [Symbol.iterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = () => { + this.pause(); + this.off(ERROR, stop); + this.off(DESTROYED, stop); + this.off('end', stop); + stopped = true; + return { done: true, value: undefined }; + }; + const next = () => { + if (stopped) + return stop(); + const value = this.read(); + return value === null ? stop() : { done: false, value }; + }; + this.once('end', stop); + this.once(ERROR, stop); + this.once(DESTROYED, stop); + return { + next, + throw: stop, + return: stop, + [Symbol.iterator]() { + return this; + }, + }; + } + /** + * Destroy a stream, preventing it from being used for any further purpose. + * + * If the stream has a `close()` method, then it will be called on + * destruction. + * + * After destruction, any attempt to write data, read data, or emit most + * events will be ignored. + * + * If an error argument is provided, then it will be emitted in an + * 'error' event. + */ + destroy(er) { + if (this[DESTROYED]) { + if (er) + this.emit('error', er); + else + this.emit(DESTROYED); + return this; + } + this[DESTROYED] = true; + this[DISCARDED] = true; + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0; + this[BUFFERLENGTH] = 0; + const wc = this; + if (typeof wc.close === 'function' && !this[CLOSED]) + wc.close(); + if (er) + this.emit('error', er); + // if no error to emit, still reject pending promises + else + this.emit(DESTROYED); + return this; + } + /** + * Alias for {@link isStream} + * + * Former export location, maintained for backwards compatibility. + * + * @deprecated + */ + static get isStream() { + return isStream; + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/fs-minipass/node_modules/minipass/dist/mjs/package.json b/node_modules/fs-minipass/node_modules/minipass/dist/mjs/package.json new file mode 100644 index 0000000000000..3dbc1ca591c05 --- /dev/null +++ b/node_modules/fs-minipass/node_modules/minipass/dist/mjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/fs-minipass/node_modules/minipass/package.json b/node_modules/fs-minipass/node_modules/minipass/package.json new file mode 100644 index 0000000000000..6faaa247a5bc6 --- /dev/null +++ b/node_modules/fs-minipass/node_modules/minipass/package.json @@ -0,0 +1,82 @@ +{ + "name": "minipass", + "version": "7.0.3", + "description": "minimal implementation of a PassThrough stream", + "main": "./dist/cjs/index.js", + "module": "./dist/mjs/index.js", + "types": "./dist/cjs/index.js", + "exports": { + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + }, + "./package.json": "./package.json" + }, + "files": [ + "dist" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" + }, + "tap": { + "coverage": false, + "node-arg": [ + "--enable-source-maps", + "--no-warnings", + "--loader", + "ts-node/esm" + ], + "ts": false + }, + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "devDependencies": { + "@types/node": "^20.1.2", + "@types/tap": "^15.0.8", + "c8": "^7.13.0", + "prettier": "^2.6.2", + "tap": "^16.3.0", + "ts-node": "^10.9.1", + "typedoc": "^0.24.8", + "typescript": "^5.1.3", + "end-of-stream": "^1.4.0", + "node-abort-controller": "^3.1.1", + "sync-content": "^1.0.2", + "through2": "^2.0.3" + }, + "repository": "https://github.com/isaacs/minipass", + "keywords": [ + "passthrough", + "stream" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } +} diff --git a/node_modules/fs-minipass/package.json b/node_modules/fs-minipass/package.json index 3d1fa3dbc11e4..e501e6474294d 100644 --- a/node_modules/fs-minipass/package.json +++ b/node_modules/fs-minipass/package.json @@ -1,6 +1,6 @@ { "name": "fs-minipass", - "version": "3.0.2", + "version": "3.0.3", "main": "lib/index.js", "scripts": { "test": "tap", @@ -24,11 +24,11 @@ "homepage": "https://github.com/npm/fs-minipass#readme", "description": "fs read and write streams based on minipass", "dependencies": { - "minipass": "^5.0.0" + "minipass": "^7.0.3" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.1", - "@npmcli/template-oss": "4.14.1", + "@npmcli/template-oss": "4.18.0", "mutate-fs": "^2.1.1", "tap": "^16.3.2" }, @@ -48,7 +48,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", + "version": "4.18.0", "publish": "true" } } diff --git a/package-lock.json b/package-lock.json index b142b69964523..b0439e2edec9f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -103,7 +103,7 @@ "cli-table3": "^0.6.3", "columnify": "^1.6.0", "fastest-levenshtein": "^1.0.16", - "fs-minipass": "^3.0.2", + "fs-minipass": "^3.0.3", "glob": "^10.2.7", "graceful-fs": "^4.2.11", "hosted-git-info": "^6.1.1", @@ -5842,17 +5842,26 @@ } }, "node_modules/fs-minipass": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-3.0.2.tgz", - "integrity": "sha512-2GAfyfoaCDRrM6jaOS3UsBts8yJ55VioXdWcOL7dK9zdAuKT71+WBA4ifnNYqVjYv+4SsPxjK0JT4yIIn4cA/g==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-3.0.3.tgz", + "integrity": "sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw==", "inBundle": true, "dependencies": { - "minipass": "^5.0.0" + "minipass": "^7.0.3" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/fs-minipass/node_modules/minipass": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz", + "integrity": "sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg==", + "inBundle": true, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", diff --git a/package.json b/package.json index 5284f7147b545..1ec72091c4c11 100644 --- a/package.json +++ b/package.json @@ -68,7 +68,7 @@ "cli-table3": "^0.6.3", "columnify": "^1.6.0", "fastest-levenshtein": "^1.0.16", - "fs-minipass": "^3.0.2", + "fs-minipass": "^3.0.3", "glob": "^10.2.7", "graceful-fs": "^4.2.11", "hosted-git-info": "^6.1.1", From 80844e94301d5309da4fa2f62fe4fdd93655de24 Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Mon, 14 Aug 2023 12:09:17 -0700 Subject: [PATCH 20/68] deps: make-fetch-happen@12.0.0 --- DEPENDENCIES.md | 8 +- node_modules/.gitignore | 13 +- .../node_modules/make-fetch-happen/LICENSE | 0 .../make-fetch-happen/lib/agent.js | 0 .../make-fetch-happen/lib/cache/entry.js | 0 .../make-fetch-happen/lib/cache/errors.js | 0 .../make-fetch-happen/lib/cache/index.js | 0 .../make-fetch-happen/lib/cache/key.js | 0 .../make-fetch-happen/lib/cache/policy.js | 0 .../make-fetch-happen/lib/dns.js | 0 .../make-fetch-happen/lib/fetch.js | 0 .../make-fetch-happen/lib/index.js | 0 .../make-fetch-happen/lib/options.js | 0 .../make-fetch-happen/lib/pipeline.js | 0 .../make-fetch-happen/lib/remote.js | 10 +- .../make-fetch-happen/package.json | 22 +- node_modules/agentkeepalive/History.md | 12 + node_modules/agentkeepalive/lib/agent.js | 8 +- .../agentkeepalive/lib/https_agent.js | 4 +- node_modules/agentkeepalive/package.json | 6 +- node_modules/depd/History.md | 103 -- node_modules/depd/LICENSE | 22 - node_modules/depd/index.js | 538 --------- node_modules/depd/lib/browser/index.js | 77 -- node_modules/depd/package.json | 45 - node_modules/make-fetch-happen/lib/remote.js | 10 +- .../node_modules/minipass/LICENSE | 15 + .../node_modules/minipass/dist/cjs/index.js | 1028 +++++++++++++++++ .../minipass/dist/cjs/package.json | 3 + .../node_modules/minipass/dist/mjs/index.js | 1018 ++++++++++++++++ .../minipass/dist/mjs/package.json | 3 + .../node_modules/minipass/package.json | 82 ++ node_modules/make-fetch-happen/package.json | 22 +- .../node_modules/make-fetch-happen/LICENSE | 16 + .../make-fetch-happen/lib/agent.js | 214 ++++ .../make-fetch-happen/lib/cache/entry.js | 469 ++++++++ .../make-fetch-happen/lib/cache/errors.js | 11 + .../make-fetch-happen/lib/cache/index.js | 49 + .../make-fetch-happen/lib/cache/key.js | 17 + .../make-fetch-happen/lib/cache/policy.js | 161 +++ .../node_modules/make-fetch-happen/lib/dns.js | 49 + .../make-fetch-happen/lib/fetch.js | 118 ++ .../make-fetch-happen/lib/index.js | 41 + .../make-fetch-happen/lib/options.js | 54 + .../make-fetch-happen/lib/pipeline.js | 41 + .../make-fetch-happen/lib/remote.js | 121 ++ .../make-fetch-happen/package.json | 78 ++ .../node_modules/make-fetch-happen/LICENSE | 16 + .../make-fetch-happen/lib/agent.js | 214 ++++ .../make-fetch-happen/lib/cache/entry.js | 469 ++++++++ .../make-fetch-happen/lib/cache/errors.js | 11 + .../make-fetch-happen/lib/cache/index.js | 49 + .../make-fetch-happen/lib/cache/key.js | 17 + .../make-fetch-happen/lib/cache/policy.js | 161 +++ .../node_modules/make-fetch-happen/lib/dns.js | 49 + .../make-fetch-happen/lib/fetch.js | 118 ++ .../make-fetch-happen/lib/index.js | 41 + .../make-fetch-happen/lib/options.js | 54 + .../make-fetch-happen/lib/pipeline.js | 41 + .../make-fetch-happen/lib/remote.js | 121 ++ .../make-fetch-happen/package.json | 78 ++ .../node_modules/make-fetch-happen/LICENSE | 16 + .../make-fetch-happen/lib/agent.js | 214 ++++ .../make-fetch-happen/lib/cache/entry.js | 469 ++++++++ .../make-fetch-happen/lib/cache/errors.js | 11 + .../make-fetch-happen/lib/cache/index.js | 49 + .../make-fetch-happen/lib/cache/key.js | 17 + .../make-fetch-happen/lib/cache/policy.js | 161 +++ .../node_modules/make-fetch-happen/lib/dns.js | 49 + .../make-fetch-happen/lib/fetch.js | 118 ++ .../make-fetch-happen/lib/index.js | 41 + .../make-fetch-happen/lib/options.js | 54 + .../make-fetch-happen/lib/pipeline.js | 41 + .../make-fetch-happen/lib/remote.js | 121 ++ .../make-fetch-happen/package.json | 78 ++ package-lock.json | 169 ++- package.json | 2 +- 77 files changed, 6659 insertions(+), 878 deletions(-) rename node_modules/{npm-registry-fetch => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/LICENSE (100%) rename node_modules/{ => @npmcli/metavuln-calculator/node_modules}/make-fetch-happen/lib/agent.js (100%) rename node_modules/{npm-registry-fetch => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/cache/entry.js (100%) rename node_modules/{npm-registry-fetch => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/cache/errors.js (100%) rename node_modules/{npm-registry-fetch => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/cache/index.js (100%) rename node_modules/{npm-registry-fetch => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/cache/key.js (100%) rename node_modules/{npm-registry-fetch => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/cache/policy.js (100%) rename node_modules/{ => @npmcli/metavuln-calculator/node_modules}/make-fetch-happen/lib/dns.js (100%) rename node_modules/{npm-registry-fetch => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/fetch.js (100%) rename node_modules/{npm-registry-fetch => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/index.js (100%) rename node_modules/{npm-registry-fetch => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/options.js (100%) rename node_modules/{npm-registry-fetch => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/pipeline.js (100%) rename node_modules/{npm-registry-fetch => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/lib/remote.js (93%) rename node_modules/{npm-registry-fetch => @npmcli/metavuln-calculator}/node_modules/make-fetch-happen/package.json (82%) delete mode 100644 node_modules/depd/History.md delete mode 100644 node_modules/depd/LICENSE delete mode 100644 node_modules/depd/index.js delete mode 100644 node_modules/depd/lib/browser/index.js delete mode 100644 node_modules/depd/package.json create mode 100644 node_modules/make-fetch-happen/node_modules/minipass/LICENSE create mode 100644 node_modules/make-fetch-happen/node_modules/minipass/dist/cjs/index.js create mode 100644 node_modules/make-fetch-happen/node_modules/minipass/dist/cjs/package.json create mode 100644 node_modules/make-fetch-happen/node_modules/minipass/dist/mjs/index.js create mode 100644 node_modules/make-fetch-happen/node_modules/minipass/dist/mjs/package.json create mode 100644 node_modules/make-fetch-happen/node_modules/minipass/package.json create mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE create mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js create mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js create mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js create mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js create mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js create mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js create mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/dns.js create mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js create mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js create mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js create mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js create mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js create mode 100644 node_modules/node-gyp/node_modules/make-fetch-happen/package.json create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/LICENSE create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/agent.js create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/entry.js create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/errors.js create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/index.js create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/key.js create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/policy.js create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/dns.js create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/fetch.js create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/index.js create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/options.js create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/pipeline.js create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/remote.js create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/package.json create mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE create mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/agent.js create mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js create mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js create mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js create mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js create mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js create mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/dns.js create mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js create mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js create mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js create mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js create mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js create mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/package.json diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md index e687d7c02baa9..eeeea398ab182 100644 --- a/DEPENDENCIES.md +++ b/DEPENDENCIES.md @@ -82,6 +82,7 @@ graph LR; libnpmversion-->semver; make-fetch-happen-->cacache; make-fetch-happen-->minipass-fetch; + make-fetch-happen-->npmcli-agent["@npmcli/agent"]; make-fetch-happen-->ssri; nopt-->abbrev; normalize-package-data-->hosted-git-info; @@ -259,8 +260,6 @@ graph LR; ```mermaid graph LR; agent-base-->debug; - agentkeepalive-->debug; - agentkeepalive-->depd; agentkeepalive-->humanize-ms; aggregate-error-->clean-stack; aggregate-error-->indent-string; @@ -460,6 +459,7 @@ graph LR; make-fetch-happen-->minipass-pipeline; make-fetch-happen-->minipass; make-fetch-happen-->negotiator; + make-fetch-happen-->npmcli-agent["@npmcli/agent"]; make-fetch-happen-->promise-retry; make-fetch-happen-->socks-proxy-agent; make-fetch-happen-->ssri; @@ -598,6 +598,8 @@ graph LR; npm-registry-fetch-->minizlib; npm-registry-fetch-->npm-package-arg; npm-registry-fetch-->proc-log; + npmcli-agent-->lru-cache; + npmcli-agent-->socks; npmcli-arborist-->benchmark; npmcli-arborist-->bin-links; npmcli-arborist-->cacache; @@ -828,4 +830,4 @@ packages higher up the chain. - @npmcli/git, make-fetch-happen, @npmcli/config, init-package-json - @npmcli/installed-package-contents, @npmcli/map-workspaces, cacache, npm-pick-manifest, @npmcli/run-script, read-package-json, promzard - @npmcli/docs, @npmcli/fs, npm-bundled, read-package-json-fast, unique-filename, npm-install-checks, npm-package-arg, npm-packlist, normalize-package-data, bin-links, nopt, npmlog, parse-conflict-json, @npmcli/mock-globals, read - - @npmcli/eslint-config, @npmcli/template-oss, ignore-walk, semver, npm-normalize-package-bin, @npmcli/name-from-folder, json-parse-even-better-errors, fs-minipass, ssri, unique-slug, @npmcli/promise-spawn, hosted-git-info, proc-log, validate-npm-package-name, @npmcli/node-gyp, minipass-fetch, @npmcli/query, cmd-shim, read-cmd-shim, write-file-atomic, abbrev, are-we-there-yet, gauge, minify-registry-metadata, ini, @npmcli/disparity-colors, mute-stream, npm-audit-report, npm-user-validate + - @npmcli/eslint-config, @npmcli/template-oss, ignore-walk, semver, npm-normalize-package-bin, @npmcli/name-from-folder, json-parse-even-better-errors, fs-minipass, ssri, unique-slug, @npmcli/promise-spawn, hosted-git-info, proc-log, validate-npm-package-name, @npmcli/node-gyp, minipass-fetch, @npmcli/query, cmd-shim, read-cmd-shim, write-file-atomic, abbrev, are-we-there-yet, gauge, minify-registry-metadata, ini, @npmcli/disparity-colors, mute-stream, @npmcli/agent, npm-audit-report, npm-user-validate diff --git a/node_modules/.gitignore b/node_modules/.gitignore index d0134e49ddd93..52d4301bed174 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -27,6 +27,7 @@ !/@npmcli/metavuln-calculator !/@npmcli/metavuln-calculator/node_modules/ /@npmcli/metavuln-calculator/node_modules/* +!/@npmcli/metavuln-calculator/node_modules/make-fetch-happen !/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch !/@npmcli/metavuln-calculator/node_modules/pacote !/@npmcli/name-from-folder @@ -97,7 +98,6 @@ !/debug/node_modules/ms !/defaults !/delegates -!/depd !/diff !/eastasianwidth !/emoji-regex @@ -149,6 +149,9 @@ !/just-diff !/lru-cache !/make-fetch-happen +!/make-fetch-happen/node_modules/ +/make-fetch-happen/node_modules/* +!/make-fetch-happen/node_modules/minipass !/minimatch !/minipass-collect !/minipass-collect/node_modules/ @@ -188,6 +191,7 @@ !/node-gyp/node_modules/brace-expansion !/node-gyp/node_modules/gauge !/node-gyp/node_modules/glob +!/node-gyp/node_modules/make-fetch-happen !/node-gyp/node_modules/minimatch !/node-gyp/node_modules/nopt !/node-gyp/node_modules/npmlog @@ -207,7 +211,6 @@ !/npm-registry-fetch !/npm-registry-fetch/node_modules/ /npm-registry-fetch/node_modules/* -!/npm-registry-fetch/node_modules/make-fetch-happen !/npm-registry-fetch/node_modules/minipass !/npm-user-validate !/npmlog @@ -256,6 +259,9 @@ !/shebang-regex !/signal-exit !/sigstore +!/sigstore/node_modules/ +/sigstore/node_modules/* +!/sigstore/node_modules/make-fetch-happen !/smart-buffer !/socks-proxy-agent !/socks @@ -281,6 +287,9 @@ !/tiny-relative-date !/treeverse !/tuf-js +!/tuf-js/node_modules/ +/tuf-js/node_modules/* +!/tuf-js/node_modules/make-fetch-happen !/unique-filename !/unique-slug !/util-deprecate diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/LICENSE similarity index 100% rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/LICENSE diff --git a/node_modules/make-fetch-happen/lib/agent.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/agent.js similarity index 100% rename from node_modules/make-fetch-happen/lib/agent.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/agent.js diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/entry.js similarity index 100% rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/entry.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/entry.js diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/errors.js similarity index 100% rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/errors.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/errors.js diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/index.js similarity index 100% rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/index.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/index.js diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/key.js similarity index 100% rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/key.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/key.js diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/policy.js similarity index 100% rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/policy.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/policy.js diff --git a/node_modules/make-fetch-happen/lib/dns.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/dns.js similarity index 100% rename from node_modules/make-fetch-happen/lib/dns.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/dns.js diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/fetch.js similarity index 100% rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/fetch.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/fetch.js diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/index.js similarity index 100% rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/index.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/index.js diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/options.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/options.js similarity index 100% rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/options.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/options.js diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/pipeline.js similarity index 100% rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/pipeline.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/pipeline.js diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/remote.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/remote.js similarity index 93% rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/remote.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/remote.js index 2aef9f8f969b0..bdbcc79cad908 100644 --- a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/remote.js +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/remote.js @@ -4,7 +4,7 @@ const promiseRetry = require('promise-retry') const ssri = require('ssri') const CachingMinipassPipeline = require('./pipeline.js') -const { getAgent } = require('@npmcli/agent') +const getAgent = require('./agent.js') const pkg = require('../package.json') const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` @@ -14,15 +14,9 @@ const RETRY_ERRORS = [ 'ECONNREFUSED', // remote host refused to open connection 'EADDRINUSE', // failed to bind to a local port (proxy?) 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW - // from @npmcli/agent - 'ECONNECTIONTIMEOUT', - 'EIDLETIMEOUT', - 'ERESPONSETIMEOUT', - 'ETRANSFERTIMEOUT', + 'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive // Known codes we do NOT retry on: // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) - // EINVALIDPROXY // invalid protocol from @npmcli/agent - // EINVALIDRESPONSE // invalid status code from @npmcli/agent ] const RETRY_TYPES = [ diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/package.json similarity index 82% rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/package.json index 419db8fbb1289..fd415dc9966fa 100644 --- a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/package.json @@ -1,6 +1,6 @@ { "name": "make-fetch-happen", - "version": "12.0.0", + "version": "11.1.1", "description": "Opinionated, caching, retrying fetch client", "main": "lib/index.js", "files": [ @@ -33,28 +33,32 @@ "author": "GitHub Inc.", "license": "ISC", "dependencies": { - "@npmcli/agent": "^1.1.0", + "agentkeepalive": "^4.2.1", "cacache": "^17.0.0", "http-cache-semantics": "^4.1.1", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", "is-lambda": "^1.0.1", - "minipass": "^7.0.2", + "lru-cache": "^7.7.1", + "minipass": "^5.0.0", "minipass-fetch": "^3.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^0.6.3", "promise-retry": "^2.0.1", + "socks-proxy-agent": "^7.0.0", "ssri": "^10.0.0" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.18.0", + "@npmcli/template-oss": "4.14.1", "nock": "^13.2.4", "safe-buffer": "^5.2.1", "standard-version": "^9.3.2", "tap": "^16.0.0" }, "engines": { - "node": "^16.13.0 || >=18.0.0" + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" }, "tap": { "color": 1, @@ -68,13 +72,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "ciVersions": [ - "16.13.0", - "16.x", - "18.0.0", - "18.x" - ], - "version": "4.18.0", + "version": "4.14.1", "publish": "true" } } diff --git a/node_modules/agentkeepalive/History.md b/node_modules/agentkeepalive/History.md index a08e98e0bd504..6877834dd92a5 100644 --- a/node_modules/agentkeepalive/History.md +++ b/node_modules/agentkeepalive/History.md @@ -1,4 +1,16 @@ +4.5.0 / 2023-08-06 +================== + +**others** + * [[`1e5e312`](http://github.com/node-modules/agentkeepalive/commit/1e5e312f36491243372dbfee0dd47607e7b3d94a)] - deps: remove debug and depd (#114) (fengmk2 <>) + +4.4.0 / 2023-08-05 +================== + +**features** + * [[`c7c1e93`](http://github.com/node-modules/agentkeepalive/commit/c7c1e93beba7310d7c2cc9647dd211a686d21cac)] - feat: return socket from createConnection (#113) (Nabeel Bukhari <>) + 4.3.0 / 2023-03-06 ================== diff --git a/node_modules/agentkeepalive/lib/agent.js b/node_modules/agentkeepalive/lib/agent.js index a7065b5e5d1ad..8bd354effa05e 100644 --- a/node_modules/agentkeepalive/lib/agent.js +++ b/node_modules/agentkeepalive/lib/agent.js @@ -2,8 +2,7 @@ const OriginalAgent = require('http').Agent; const ms = require('humanize-ms'); -const debug = require('debug')('agentkeepalive'); -const deprecate = require('depd')('agentkeepalive'); +const debug = require('util').debuglog('agentkeepalive'); const { INIT_SOCKET, CURRENT_ID, @@ -27,6 +26,10 @@ if (majorVersion >= 11 && majorVersion <= 12) { defaultTimeoutListenerCount = 3; } +function deprecate(message) { + console.log('[agentkeepalive:deprecated] %s', message); +} + class Agent extends OriginalAgent { constructor(options) { options = options || {}; @@ -230,6 +233,7 @@ class Agent extends OriginalAgent { const newSocket = super.createConnection(options, onNewCreate); if (newSocket) onNewCreate(null, newSocket); + return newSocket; } get statusChanged() { diff --git a/node_modules/agentkeepalive/lib/https_agent.js b/node_modules/agentkeepalive/lib/https_agent.js index 73f529d65e7ff..344fb32cadd86 100644 --- a/node_modules/agentkeepalive/lib/https_agent.js +++ b/node_modules/agentkeepalive/lib/https_agent.js @@ -25,8 +25,8 @@ class HttpsAgent extends HttpAgent { }; } - createConnection(options) { - const socket = this[CREATE_HTTPS_CONNECTION](options); + createConnection(options, oncreate) { + const socket = this[CREATE_HTTPS_CONNECTION](options, oncreate); this[INIT_SOCKET](socket, options); return socket; } diff --git a/node_modules/agentkeepalive/package.json b/node_modules/agentkeepalive/package.json index 3115fee69a041..d8e9aa7160d0b 100644 --- a/node_modules/agentkeepalive/package.json +++ b/node_modules/agentkeepalive/package.json @@ -1,6 +1,6 @@ { "name": "agentkeepalive", - "version": "4.3.0", + "version": "4.5.0", "description": "Missing keepalive http.Agent", "main": "index.js", "browser": "browser.js", @@ -14,7 +14,7 @@ "contributor": "git-contributor", "test": "npm run lint && egg-bin test --full-trace", "test-local": "egg-bin test --full-trace", - "cov": "cross-env DEBUG=agentkeepalive egg-bin cov --full-trace", + "cov": "cross-env NODE_DEBUG=agentkeepalive egg-bin cov --full-trace", "ci": "npm run lint && npm run cov", "lint": "eslint lib test index.js" }, @@ -35,8 +35,6 @@ "HttpsAgent" ], "dependencies": { - "debug": "^4.1.0", - "depd": "^2.0.0", "humanize-ms": "^1.2.1" }, "devDependencies": { diff --git a/node_modules/depd/History.md b/node_modules/depd/History.md deleted file mode 100644 index cd9ebaaa9963f..0000000000000 --- a/node_modules/depd/History.md +++ /dev/null @@ -1,103 +0,0 @@ -2.0.0 / 2018-10-26 -================== - - * Drop support for Node.js 0.6 - * Replace internal `eval` usage with `Function` constructor - * Use instance methods on `process` to check for listeners - -1.1.2 / 2018-01-11 -================== - - * perf: remove argument reassignment - * Support Node.js 0.6 to 9.x - -1.1.1 / 2017-07-27 -================== - - * Remove unnecessary `Buffer` loading - * Support Node.js 0.6 to 8.x - -1.1.0 / 2015-09-14 -================== - - * Enable strict mode in more places - * Support io.js 3.x - * Support io.js 2.x - * Support web browser loading - - Requires bundler like Browserify or webpack - -1.0.1 / 2015-04-07 -================== - - * Fix `TypeError`s when under `'use strict'` code - * Fix useless type name on auto-generated messages - * Support io.js 1.x - * Support Node.js 0.12 - -1.0.0 / 2014-09-17 -================== - - * No changes - -0.4.5 / 2014-09-09 -================== - - * Improve call speed to functions using the function wrapper - * Support Node.js 0.6 - -0.4.4 / 2014-07-27 -================== - - * Work-around v8 generating empty stack traces - -0.4.3 / 2014-07-26 -================== - - * Fix exception when global `Error.stackTraceLimit` is too low - -0.4.2 / 2014-07-19 -================== - - * Correct call site for wrapped functions and properties - -0.4.1 / 2014-07-19 -================== - - * Improve automatic message generation for function properties - -0.4.0 / 2014-07-19 -================== - - * Add `TRACE_DEPRECATION` environment variable - * Remove non-standard grey color from color output - * Support `--no-deprecation` argument - * Support `--trace-deprecation` argument - * Support `deprecate.property(fn, prop, message)` - -0.3.0 / 2014-06-16 -================== - - * Add `NO_DEPRECATION` environment variable - -0.2.0 / 2014-06-15 -================== - - * Add `deprecate.property(obj, prop, message)` - * Remove `supports-color` dependency for node.js 0.8 - -0.1.0 / 2014-06-15 -================== - - * Add `deprecate.function(fn, message)` - * Add `process.on('deprecation', fn)` emitter - * Automatically generate message when omitted from `deprecate()` - -0.0.1 / 2014-06-15 -================== - - * Fix warning for dynamic calls at singe call site - -0.0.0 / 2014-06-15 -================== - - * Initial implementation diff --git a/node_modules/depd/LICENSE b/node_modules/depd/LICENSE deleted file mode 100644 index 248de7af2bd16..0000000000000 --- a/node_modules/depd/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -(The MIT License) - -Copyright (c) 2014-2018 Douglas Christopher Wilson - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/depd/index.js b/node_modules/depd/index.js deleted file mode 100644 index 1bf2fcfdeffc9..0000000000000 --- a/node_modules/depd/index.js +++ /dev/null @@ -1,538 +0,0 @@ -/*! - * depd - * Copyright(c) 2014-2018 Douglas Christopher Wilson - * MIT Licensed - */ - -/** - * Module dependencies. - */ - -var relative = require('path').relative - -/** - * Module exports. - */ - -module.exports = depd - -/** - * Get the path to base files on. - */ - -var basePath = process.cwd() - -/** - * Determine if namespace is contained in the string. - */ - -function containsNamespace (str, namespace) { - var vals = str.split(/[ ,]+/) - var ns = String(namespace).toLowerCase() - - for (var i = 0; i < vals.length; i++) { - var val = vals[i] - - // namespace contained - if (val && (val === '*' || val.toLowerCase() === ns)) { - return true - } - } - - return false -} - -/** - * Convert a data descriptor to accessor descriptor. - */ - -function convertDataDescriptorToAccessor (obj, prop, message) { - var descriptor = Object.getOwnPropertyDescriptor(obj, prop) - var value = descriptor.value - - descriptor.get = function getter () { return value } - - if (descriptor.writable) { - descriptor.set = function setter (val) { return (value = val) } - } - - delete descriptor.value - delete descriptor.writable - - Object.defineProperty(obj, prop, descriptor) - - return descriptor -} - -/** - * Create arguments string to keep arity. - */ - -function createArgumentsString (arity) { - var str = '' - - for (var i = 0; i < arity; i++) { - str += ', arg' + i - } - - return str.substr(2) -} - -/** - * Create stack string from stack. - */ - -function createStackString (stack) { - var str = this.name + ': ' + this.namespace - - if (this.message) { - str += ' deprecated ' + this.message - } - - for (var i = 0; i < stack.length; i++) { - str += '\n at ' + stack[i].toString() - } - - return str -} - -/** - * Create deprecate for namespace in caller. - */ - -function depd (namespace) { - if (!namespace) { - throw new TypeError('argument namespace is required') - } - - var stack = getStack() - var site = callSiteLocation(stack[1]) - var file = site[0] - - function deprecate (message) { - // call to self as log - log.call(deprecate, message) - } - - deprecate._file = file - deprecate._ignored = isignored(namespace) - deprecate._namespace = namespace - deprecate._traced = istraced(namespace) - deprecate._warned = Object.create(null) - - deprecate.function = wrapfunction - deprecate.property = wrapproperty - - return deprecate -} - -/** - * Determine if event emitter has listeners of a given type. - * - * The way to do this check is done three different ways in Node.js >= 0.8 - * so this consolidates them into a minimal set using instance methods. - * - * @param {EventEmitter} emitter - * @param {string} type - * @returns {boolean} - * @private - */ - -function eehaslisteners (emitter, type) { - var count = typeof emitter.listenerCount !== 'function' - ? emitter.listeners(type).length - : emitter.listenerCount(type) - - return count > 0 -} - -/** - * Determine if namespace is ignored. - */ - -function isignored (namespace) { - if (process.noDeprecation) { - // --no-deprecation support - return true - } - - var str = process.env.NO_DEPRECATION || '' - - // namespace ignored - return containsNamespace(str, namespace) -} - -/** - * Determine if namespace is traced. - */ - -function istraced (namespace) { - if (process.traceDeprecation) { - // --trace-deprecation support - return true - } - - var str = process.env.TRACE_DEPRECATION || '' - - // namespace traced - return containsNamespace(str, namespace) -} - -/** - * Display deprecation message. - */ - -function log (message, site) { - var haslisteners = eehaslisteners(process, 'deprecation') - - // abort early if no destination - if (!haslisteners && this._ignored) { - return - } - - var caller - var callFile - var callSite - var depSite - var i = 0 - var seen = false - var stack = getStack() - var file = this._file - - if (site) { - // provided site - depSite = site - callSite = callSiteLocation(stack[1]) - callSite.name = depSite.name - file = callSite[0] - } else { - // get call site - i = 2 - depSite = callSiteLocation(stack[i]) - callSite = depSite - } - - // get caller of deprecated thing in relation to file - for (; i < stack.length; i++) { - caller = callSiteLocation(stack[i]) - callFile = caller[0] - - if (callFile === file) { - seen = true - } else if (callFile === this._file) { - file = this._file - } else if (seen) { - break - } - } - - var key = caller - ? depSite.join(':') + '__' + caller.join(':') - : undefined - - if (key !== undefined && key in this._warned) { - // already warned - return - } - - this._warned[key] = true - - // generate automatic message from call site - var msg = message - if (!msg) { - msg = callSite === depSite || !callSite.name - ? defaultMessage(depSite) - : defaultMessage(callSite) - } - - // emit deprecation if listeners exist - if (haslisteners) { - var err = DeprecationError(this._namespace, msg, stack.slice(i)) - process.emit('deprecation', err) - return - } - - // format and write message - var format = process.stderr.isTTY - ? formatColor - : formatPlain - var output = format.call(this, msg, caller, stack.slice(i)) - process.stderr.write(output + '\n', 'utf8') -} - -/** - * Get call site location as array. - */ - -function callSiteLocation (callSite) { - var file = callSite.getFileName() || '' - var line = callSite.getLineNumber() - var colm = callSite.getColumnNumber() - - if (callSite.isEval()) { - file = callSite.getEvalOrigin() + ', ' + file - } - - var site = [file, line, colm] - - site.callSite = callSite - site.name = callSite.getFunctionName() - - return site -} - -/** - * Generate a default message from the site. - */ - -function defaultMessage (site) { - var callSite = site.callSite - var funcName = site.name - - // make useful anonymous name - if (!funcName) { - funcName = '' - } - - var context = callSite.getThis() - var typeName = context && callSite.getTypeName() - - // ignore useless type name - if (typeName === 'Object') { - typeName = undefined - } - - // make useful type name - if (typeName === 'Function') { - typeName = context.name || typeName - } - - return typeName && callSite.getMethodName() - ? typeName + '.' + funcName - : funcName -} - -/** - * Format deprecation message without color. - */ - -function formatPlain (msg, caller, stack) { - var timestamp = new Date().toUTCString() - - var formatted = timestamp + - ' ' + this._namespace + - ' deprecated ' + msg - - // add stack trace - if (this._traced) { - for (var i = 0; i < stack.length; i++) { - formatted += '\n at ' + stack[i].toString() - } - - return formatted - } - - if (caller) { - formatted += ' at ' + formatLocation(caller) - } - - return formatted -} - -/** - * Format deprecation message with color. - */ - -function formatColor (msg, caller, stack) { - var formatted = '\x1b[36;1m' + this._namespace + '\x1b[22;39m' + // bold cyan - ' \x1b[33;1mdeprecated\x1b[22;39m' + // bold yellow - ' \x1b[0m' + msg + '\x1b[39m' // reset - - // add stack trace - if (this._traced) { - for (var i = 0; i < stack.length; i++) { - formatted += '\n \x1b[36mat ' + stack[i].toString() + '\x1b[39m' // cyan - } - - return formatted - } - - if (caller) { - formatted += ' \x1b[36m' + formatLocation(caller) + '\x1b[39m' // cyan - } - - return formatted -} - -/** - * Format call site location. - */ - -function formatLocation (callSite) { - return relative(basePath, callSite[0]) + - ':' + callSite[1] + - ':' + callSite[2] -} - -/** - * Get the stack as array of call sites. - */ - -function getStack () { - var limit = Error.stackTraceLimit - var obj = {} - var prep = Error.prepareStackTrace - - Error.prepareStackTrace = prepareObjectStackTrace - Error.stackTraceLimit = Math.max(10, limit) - - // capture the stack - Error.captureStackTrace(obj) - - // slice this function off the top - var stack = obj.stack.slice(1) - - Error.prepareStackTrace = prep - Error.stackTraceLimit = limit - - return stack -} - -/** - * Capture call site stack from v8. - */ - -function prepareObjectStackTrace (obj, stack) { - return stack -} - -/** - * Return a wrapped function in a deprecation message. - */ - -function wrapfunction (fn, message) { - if (typeof fn !== 'function') { - throw new TypeError('argument fn must be a function') - } - - var args = createArgumentsString(fn.length) - var stack = getStack() - var site = callSiteLocation(stack[1]) - - site.name = fn.name - - // eslint-disable-next-line no-new-func - var deprecatedfn = new Function('fn', 'log', 'deprecate', 'message', 'site', - '"use strict"\n' + - 'return function (' + args + ') {' + - 'log.call(deprecate, message, site)\n' + - 'return fn.apply(this, arguments)\n' + - '}')(fn, log, this, message, site) - - return deprecatedfn -} - -/** - * Wrap property in a deprecation message. - */ - -function wrapproperty (obj, prop, message) { - if (!obj || (typeof obj !== 'object' && typeof obj !== 'function')) { - throw new TypeError('argument obj must be object') - } - - var descriptor = Object.getOwnPropertyDescriptor(obj, prop) - - if (!descriptor) { - throw new TypeError('must call property on owner object') - } - - if (!descriptor.configurable) { - throw new TypeError('property must be configurable') - } - - var deprecate = this - var stack = getStack() - var site = callSiteLocation(stack[1]) - - // set site name - site.name = prop - - // convert data descriptor - if ('value' in descriptor) { - descriptor = convertDataDescriptorToAccessor(obj, prop, message) - } - - var get = descriptor.get - var set = descriptor.set - - // wrap getter - if (typeof get === 'function') { - descriptor.get = function getter () { - log.call(deprecate, message, site) - return get.apply(this, arguments) - } - } - - // wrap setter - if (typeof set === 'function') { - descriptor.set = function setter () { - log.call(deprecate, message, site) - return set.apply(this, arguments) - } - } - - Object.defineProperty(obj, prop, descriptor) -} - -/** - * Create DeprecationError for deprecation - */ - -function DeprecationError (namespace, message, stack) { - var error = new Error() - var stackString - - Object.defineProperty(error, 'constructor', { - value: DeprecationError - }) - - Object.defineProperty(error, 'message', { - configurable: true, - enumerable: false, - value: message, - writable: true - }) - - Object.defineProperty(error, 'name', { - enumerable: false, - configurable: true, - value: 'DeprecationError', - writable: true - }) - - Object.defineProperty(error, 'namespace', { - configurable: true, - enumerable: false, - value: namespace, - writable: true - }) - - Object.defineProperty(error, 'stack', { - configurable: true, - enumerable: false, - get: function () { - if (stackString !== undefined) { - return stackString - } - - // prepare stack trace - return (stackString = createStackString.call(this, stack)) - }, - set: function setter (val) { - stackString = val - } - }) - - return error -} diff --git a/node_modules/depd/lib/browser/index.js b/node_modules/depd/lib/browser/index.js deleted file mode 100644 index 6be45cc20b33f..0000000000000 --- a/node_modules/depd/lib/browser/index.js +++ /dev/null @@ -1,77 +0,0 @@ -/*! - * depd - * Copyright(c) 2015 Douglas Christopher Wilson - * MIT Licensed - */ - -'use strict' - -/** - * Module exports. - * @public - */ - -module.exports = depd - -/** - * Create deprecate for namespace in caller. - */ - -function depd (namespace) { - if (!namespace) { - throw new TypeError('argument namespace is required') - } - - function deprecate (message) { - // no-op in browser - } - - deprecate._file = undefined - deprecate._ignored = true - deprecate._namespace = namespace - deprecate._traced = false - deprecate._warned = Object.create(null) - - deprecate.function = wrapfunction - deprecate.property = wrapproperty - - return deprecate -} - -/** - * Return a wrapped function in a deprecation message. - * - * This is a no-op version of the wrapper, which does nothing but call - * validation. - */ - -function wrapfunction (fn, message) { - if (typeof fn !== 'function') { - throw new TypeError('argument fn must be a function') - } - - return fn -} - -/** - * Wrap property in a deprecation message. - * - * This is a no-op version of the wrapper, which does nothing but call - * validation. - */ - -function wrapproperty (obj, prop, message) { - if (!obj || (typeof obj !== 'object' && typeof obj !== 'function')) { - throw new TypeError('argument obj must be object') - } - - var descriptor = Object.getOwnPropertyDescriptor(obj, prop) - - if (!descriptor) { - throw new TypeError('must call property on owner object') - } - - if (!descriptor.configurable) { - throw new TypeError('property must be configurable') - } -} diff --git a/node_modules/depd/package.json b/node_modules/depd/package.json deleted file mode 100644 index 3857e199184a0..0000000000000 --- a/node_modules/depd/package.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "name": "depd", - "description": "Deprecate all the things", - "version": "2.0.0", - "author": "Douglas Christopher Wilson ", - "license": "MIT", - "keywords": [ - "deprecate", - "deprecated" - ], - "repository": "dougwilson/nodejs-depd", - "browser": "lib/browser/index.js", - "devDependencies": { - "benchmark": "2.1.4", - "beautify-benchmark": "0.2.4", - "eslint": "5.7.0", - "eslint-config-standard": "12.0.0", - "eslint-plugin-import": "2.14.0", - "eslint-plugin-markdown": "1.0.0-beta.7", - "eslint-plugin-node": "7.0.1", - "eslint-plugin-promise": "4.0.1", - "eslint-plugin-standard": "4.0.0", - "istanbul": "0.4.5", - "mocha": "5.2.0", - "safe-buffer": "5.1.2", - "uid-safe": "2.1.5" - }, - "files": [ - "lib/", - "History.md", - "LICENSE", - "index.js", - "Readme.md" - ], - "engines": { - "node": ">= 0.8" - }, - "scripts": { - "bench": "node benchmark/index.js", - "lint": "eslint --plugin markdown --ext js,md .", - "test": "mocha --reporter spec --bail test/", - "test-ci": "istanbul cover --print=none node_modules/mocha/bin/_mocha -- --reporter spec test/ && istanbul report lcovonly text-summary", - "test-cov": "istanbul cover --print=none node_modules/mocha/bin/_mocha -- --reporter dot test/ && istanbul report lcov text-summary" - } -} diff --git a/node_modules/make-fetch-happen/lib/remote.js b/node_modules/make-fetch-happen/lib/remote.js index bdbcc79cad908..2aef9f8f969b0 100644 --- a/node_modules/make-fetch-happen/lib/remote.js +++ b/node_modules/make-fetch-happen/lib/remote.js @@ -4,7 +4,7 @@ const promiseRetry = require('promise-retry') const ssri = require('ssri') const CachingMinipassPipeline = require('./pipeline.js') -const getAgent = require('./agent.js') +const { getAgent } = require('@npmcli/agent') const pkg = require('../package.json') const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` @@ -14,9 +14,15 @@ const RETRY_ERRORS = [ 'ECONNREFUSED', // remote host refused to open connection 'EADDRINUSE', // failed to bind to a local port (proxy?) 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW - 'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive + // from @npmcli/agent + 'ECONNECTIONTIMEOUT', + 'EIDLETIMEOUT', + 'ERESPONSETIMEOUT', + 'ETRANSFERTIMEOUT', // Known codes we do NOT retry on: // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) + // EINVALIDPROXY // invalid protocol from @npmcli/agent + // EINVALIDRESPONSE // invalid status code from @npmcli/agent ] const RETRY_TYPES = [ diff --git a/node_modules/make-fetch-happen/node_modules/minipass/LICENSE b/node_modules/make-fetch-happen/node_modules/minipass/LICENSE new file mode 100644 index 0000000000000..97f8e32ed82e4 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/minipass/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/make-fetch-happen/node_modules/minipass/dist/cjs/index.js b/node_modules/make-fetch-happen/node_modules/minipass/dist/cjs/index.js new file mode 100644 index 0000000000000..b6cdae8eb514b --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/minipass/dist/cjs/index.js @@ -0,0 +1,1028 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0; +const proc = typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + }; +const events_1 = require("events"); +const stream_1 = __importDefault(require("stream")); +const string_decoder_1 = require("string_decoder"); +/** + * Return true if the argument is a Minipass stream, Node stream, or something + * else that Minipass can interact with. + */ +const isStream = (s) => !!s && + typeof s === 'object' && + (s instanceof Minipass || + s instanceof stream_1.default || + (0, exports.isReadable)(s) || + (0, exports.isWritable)(s)); +exports.isStream = isStream; +/** + * Return true if the argument is a valid {@link Minipass.Readable} + */ +const isReadable = (s) => !!s && + typeof s === 'object' && + s instanceof events_1.EventEmitter && + typeof s.pipe === 'function' && + // node core Writable streams have a pipe() method, but it throws + s.pipe !== stream_1.default.Writable.prototype.pipe; +exports.isReadable = isReadable; +/** + * Return true if the argument is a valid {@link Minipass.Writable} + */ +const isWritable = (s) => !!s && + typeof s === 'object' && + s instanceof events_1.EventEmitter && + typeof s.write === 'function' && + typeof s.end === 'function'; +exports.isWritable = isWritable; +const EOF = Symbol('EOF'); +const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); +const EMITTED_END = Symbol('emittedEnd'); +const EMITTING_END = Symbol('emittingEnd'); +const EMITTED_ERROR = Symbol('emittedError'); +const CLOSED = Symbol('closed'); +const READ = Symbol('read'); +const FLUSH = Symbol('flush'); +const FLUSHCHUNK = Symbol('flushChunk'); +const ENCODING = Symbol('encoding'); +const DECODER = Symbol('decoder'); +const FLOWING = Symbol('flowing'); +const PAUSED = Symbol('paused'); +const RESUME = Symbol('resume'); +const BUFFER = Symbol('buffer'); +const PIPES = Symbol('pipes'); +const BUFFERLENGTH = Symbol('bufferLength'); +const BUFFERPUSH = Symbol('bufferPush'); +const BUFFERSHIFT = Symbol('bufferShift'); +const OBJECTMODE = Symbol('objectMode'); +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed'); +// internal event when stream has an error +const ERROR = Symbol('error'); +const EMITDATA = Symbol('emitData'); +const EMITEND = Symbol('emitEnd'); +const EMITEND2 = Symbol('emitEnd2'); +const ASYNC = Symbol('async'); +const ABORT = Symbol('abort'); +const ABORTED = Symbol('aborted'); +const SIGNAL = Symbol('signal'); +const DATALISTENERS = Symbol('dataListeners'); +const DISCARDED = Symbol('discarded'); +const defer = (fn) => Promise.resolve().then(fn); +const nodefer = (fn) => fn(); +const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; +const isArrayBufferLike = (b) => b instanceof ArrayBuffer || + (!!b && + typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0); +const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); +/** + * Internal class representing a pipe to a destination stream. + * + * @internal + */ +class Pipe { + src; + dest; + opts; + ondrain; + constructor(src, dest, opts) { + this.src = src; + this.dest = dest; + this.opts = opts; + this.ondrain = () => src[RESUME](); + this.dest.on('drain', this.ondrain); + } + unpipe() { + this.dest.removeListener('drain', this.ondrain); + } + // only here for the prototype + /* c8 ignore start */ + proxyErrors(_er) { } + /* c8 ignore stop */ + end() { + this.unpipe(); + if (this.opts.end) + this.dest.end(); + } +} +/** + * Internal class representing a pipe to a destination stream where + * errors are proxied. + * + * @internal + */ +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors); + super.unpipe(); + } + constructor(src, dest, opts) { + super(src, dest, opts); + this.proxyErrors = er => dest.emit('error', er); + src.on('error', this.proxyErrors); + } +} +const isObjectModeOptions = (o) => !!o.objectMode; +const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; +/** + * Main export, the Minipass class + * + * `RType` is the type of data emitted, defaults to Buffer + * + * `WType` is the type of data to be written, if RType is buffer or string, + * then any {@link Minipass.ContiguousData} is allowed. + * + * `Events` is the set of event handler signatures that this object + * will emit, see {@link Minipass.Events} + */ +class Minipass extends events_1.EventEmitter { + [FLOWING] = false; + [PAUSED] = false; + [PIPES] = []; + [BUFFER] = []; + [OBJECTMODE]; + [ENCODING]; + [ASYNC]; + [DECODER]; + [EOF] = false; + [EMITTED_END] = false; + [EMITTING_END] = false; + [CLOSED] = false; + [EMITTED_ERROR] = null; + [BUFFERLENGTH] = 0; + [DESTROYED] = false; + [SIGNAL]; + [ABORTED] = false; + [DATALISTENERS] = 0; + [DISCARDED] = false; + /** + * true if the stream can be written + */ + writable = true; + /** + * true if the stream can be read + */ + readable = true; + /** + * If `RType` is Buffer, then options do not need to be provided. + * Otherwise, an options object must be provided to specify either + * {@link Minipass.SharedOptions.objectMode} or + * {@link Minipass.SharedOptions.encoding}, as appropriate. + */ + constructor(...args) { + const options = (args[0] || + {}); + super(); + if (options.objectMode && typeof options.encoding === 'string') { + throw new TypeError('Encoding and objectMode may not be used together'); + } + if (isObjectModeOptions(options)) { + this[OBJECTMODE] = true; + this[ENCODING] = null; + } + else if (isEncodingOptions(options)) { + this[ENCODING] = options.encoding; + this[OBJECTMODE] = false; + } + else { + this[OBJECTMODE] = false; + this[ENCODING] = null; + } + this[ASYNC] = !!options.async; + this[DECODER] = this[ENCODING] + ? new string_decoder_1.StringDecoder(this[ENCODING]) + : null; + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); + } + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); + } + const { signal } = options; + if (signal) { + this[SIGNAL] = signal; + if (signal.aborted) { + this[ABORT](); + } + else { + signal.addEventListener('abort', () => this[ABORT]()); + } + } + } + /** + * The amount of data stored in the buffer waiting to be read. + * + * For Buffer strings, this will be the total byte length. + * For string encoding streams, this will be the string character length, + * according to JavaScript's `string.length` logic. + * For objectMode streams, this is a count of the items waiting to be + * emitted. + */ + get bufferLength() { + return this[BUFFERLENGTH]; + } + /** + * The `BufferEncoding` currently in use, or `null` + */ + get encoding() { + return this[ENCODING]; + } + /** + * @deprecated - This is a read only property + */ + set encoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * @deprecated - Encoding may only be set at instantiation time + */ + setEncoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * True if this is an objectMode stream + */ + get objectMode() { + return this[OBJECTMODE]; + } + /** + * @deprecated - This is a read-only property + */ + set objectMode(_om) { + throw new Error('objectMode must be set at instantiation time'); + } + /** + * true if this is an async stream + */ + get ['async']() { + return this[ASYNC]; + } + /** + * Set to true to make this stream async. + * + * Once set, it cannot be unset, as this would potentially cause incorrect + * behavior. Ie, a sync stream can be made async, but an async stream + * cannot be safely made sync. + */ + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a; + } + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true; + this.emit('abort', this[SIGNAL]?.reason); + this.destroy(this[SIGNAL]?.reason); + } + /** + * True if the stream has been aborted. + */ + get aborted() { + return this[ABORTED]; + } + /** + * No-op setter. Stream aborted status is set via the AbortSignal provided + * in the constructor options. + */ + set aborted(_) { } + write(chunk, encoding, cb) { + if (this[ABORTED]) + return false; + if (this[EOF]) + throw new Error('write after end'); + if (this[DESTROYED]) { + this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); + return true; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (!encoding) + encoding = 'utf8'; + const fn = this[ASYNC] ? defer : nodefer; + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything is only allowed if in object mode, so throw + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); + } + else if (isArrayBufferLike(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk); + } + else if (typeof chunk !== 'string') { + throw new Error('Non-contiguous data written to non-objectMode stream'); + } + } + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + // maybe impossible? + /* c8 ignore start */ + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + /* c8 ignore stop */ + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { + //@ts-ignore - sinful unsafe type change + chunk = Buffer.from(chunk, encoding); + } + if (Buffer.isBuffer(chunk) && this[ENCODING]) { + //@ts-ignore - sinful unsafe type change + chunk = this[DECODER].write(chunk); + } + // Note: flushing CAN potentially switch us into not-flowing mode + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + /** + * Low-level explicit read method. + * + * In objectMode, the argument is ignored, and one item is returned if + * available. + * + * `n` is the number of bytes (or in the case of encoding streams, + * characters) to consume. If `n` is not provided, then the entire buffer + * is returned, or `null` is returned if no data is available. + * + * If `n` is greater that the amount of data in the internal buffer, + * then `null` is returned. + */ + read(n) { + if (this[DESTROYED]) + return null; + this[DISCARDED] = false; + if (this[BUFFERLENGTH] === 0 || + n === 0 || + (n && n > this[BUFFERLENGTH])) { + this[MAYBE_EMIT_END](); + return null; + } + if (this[OBJECTMODE]) + n = null; + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + // not object mode, so if we have an encoding, then RType is string + // otherwise, must be Buffer + this[BUFFER] = [ + (this[ENCODING] + ? this[BUFFER].join('') + : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), + ]; + } + const ret = this[READ](n || null, this[BUFFER][0]); + this[MAYBE_EMIT_END](); + return ret; + } + [READ](n, chunk) { + if (this[OBJECTMODE]) + this[BUFFERSHIFT](); + else { + const c = chunk; + if (n === c.length || n === null) + this[BUFFERSHIFT](); + else if (typeof c === 'string') { + this[BUFFER][0] = c.slice(n); + chunk = c.slice(0, n); + this[BUFFERLENGTH] -= n; + } + else { + this[BUFFER][0] = c.subarray(n); + chunk = c.subarray(0, n); + this[BUFFERLENGTH] -= n; + } + } + this.emit('data', chunk); + if (!this[BUFFER].length && !this[EOF]) + this.emit('drain'); + return chunk; + } + end(chunk, encoding, cb) { + if (typeof chunk === 'function') { + cb = chunk; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (chunk !== undefined) + this.write(chunk, encoding); + if (cb) + this.once('end', cb); + this[EOF] = true; + this.writable = false; + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this[FLOWING] || !this[PAUSED]) + this[MAYBE_EMIT_END](); + return this; + } + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) + return; + if (!this[DATALISTENERS] && !this[PIPES].length) { + this[DISCARDED] = true; + } + this[PAUSED] = false; + this[FLOWING] = true; + this.emit('resume'); + if (this[BUFFER].length) + this[FLUSH](); + else if (this[EOF]) + this[MAYBE_EMIT_END](); + else + this.emit('drain'); + } + /** + * Resume the stream if it is currently in a paused state + * + * If called when there are no pipe destinations or `data` event listeners, + * this will place the stream in a "discarded" state, where all data will + * be thrown away. The discarded state is removed if a pipe destination or + * data handler is added, if pause() is called, or if any synchronous or + * asynchronous iteration is started. + */ + resume() { + return this[RESUME](); + } + /** + * Pause the stream + */ + pause() { + this[FLOWING] = false; + this[PAUSED] = true; + this[DISCARDED] = false; + } + /** + * true if the stream has been forcibly destroyed + */ + get destroyed() { + return this[DESTROYED]; + } + /** + * true if the stream is currently in a flowing state, meaning that + * any writes will be immediately emitted. + */ + get flowing() { + return this[FLOWING]; + } + /** + * true if the stream is currently in a paused state + */ + get paused() { + return this[PAUSED]; + } + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1; + else + this[BUFFERLENGTH] += chunk.length; + this[BUFFER].push(chunk); + } + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1; + else + this[BUFFERLENGTH] -= this[BUFFER][0].length; + return this[BUFFER].shift(); + } + [FLUSH](noDrain = false) { + do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && + this[BUFFER].length); + if (!noDrain && !this[BUFFER].length && !this[EOF]) + this.emit('drain'); + } + [FLUSHCHUNK](chunk) { + this.emit('data', chunk); + return this[FLOWING]; + } + /** + * Pipe all data emitted by this stream into the destination provided. + * + * Triggers the flow of data. + */ + pipe(dest, opts) { + if (this[DESTROYED]) + return dest; + this[DISCARDED] = false; + const ended = this[EMITTED_END]; + opts = opts || {}; + if (dest === proc.stdout || dest === proc.stderr) + opts.end = false; + else + opts.end = opts.end !== false; + opts.proxyErrors = !!opts.proxyErrors; + // piping an ended stream ends immediately + if (ended) { + if (opts.end) + dest.end(); + } + else { + // "as" here just ignores the WType, which pipes don't care about, + // since they're only consuming from us, and writing to the dest + this[PIPES].push(!opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts)); + if (this[ASYNC]) + defer(() => this[RESUME]()); + else + this[RESUME](); + } + return dest; + } + /** + * Fully unhook a piped destination stream. + * + * If the destination stream was the only consumer of this stream (ie, + * there are no other piped destinations or `'data'` event listeners) + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest); + if (p) { + if (this[PIPES].length === 1) { + if (this[FLOWING] && this[DATALISTENERS] === 0) { + this[FLOWING] = false; + } + this[PIPES] = []; + } + else + this[PIPES].splice(this[PIPES].indexOf(p), 1); + p.unpipe(); + } + } + /** + * Alias for {@link Minipass#on} + */ + addListener(ev, handler) { + return this.on(ev, handler); + } + /** + * Mostly identical to `EventEmitter.on`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * - Adding a 'data' event handler will trigger the flow of data + * + * - Adding a 'readable' event handler when there is data waiting to be read + * will cause 'readable' to be emitted immediately. + * + * - Adding an 'endish' event handler ('end', 'finish', etc.) which has + * already passed will cause the event to be emitted immediately and all + * handlers removed. + * + * - Adding an 'error' event handler after an error has been emitted will + * cause the event to be re-emitted immediately with the error previously + * raised. + */ + on(ev, handler) { + const ret = super.on(ev, handler); + if (ev === 'data') { + this[DISCARDED] = false; + this[DATALISTENERS]++; + if (!this[PIPES].length && !this[FLOWING]) { + this[RESUME](); + } + } + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { + super.emit('readable'); + } + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev); + this.removeAllListeners(ev); + } + else if (ev === 'error' && this[EMITTED_ERROR]) { + const h = handler; + if (this[ASYNC]) + defer(() => h.call(this, this[EMITTED_ERROR])); + else + h.call(this, this[EMITTED_ERROR]); + } + return ret; + } + /** + * Alias for {@link Minipass#off} + */ + removeListener(ev, handler) { + return this.off(ev, handler); + } + /** + * Mostly identical to `EventEmitter.off` + * + * If a 'data' event handler is removed, and it was the last consumer + * (ie, there are no pipe destinations or other 'data' event listeners), + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + off(ev, handler) { + const ret = super.off(ev, handler); + // if we previously had listeners, and now we don't, and we don't + // have any pipes, then stop the flow, unless it's been explicitly + // put in a discarded flowing state via stream.resume(). + if (ev === 'data') { + this[DATALISTENERS] = this.listeners('data').length; + if (this[DATALISTENERS] === 0 && + !this[DISCARDED] && + !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * Mostly identical to `EventEmitter.removeAllListeners` + * + * If all 'data' event handlers are removed, and they were the last consumer + * (ie, there are no pipe destinations), then the flow of data will stop + * until there is another consumer or {@link Minipass#resume} is explicitly + * called. + */ + removeAllListeners(ev) { + const ret = super.removeAllListeners(ev); + if (ev === 'data' || ev === undefined) { + this[DATALISTENERS] = 0; + if (!this[DISCARDED] && !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * true if the 'end' event has been emitted + */ + get emittedEnd() { + return this[EMITTED_END]; + } + [MAYBE_EMIT_END]() { + if (!this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF]) { + this[EMITTING_END] = true; + this.emit('end'); + this.emit('prefinish'); + this.emit('finish'); + if (this[CLOSED]) + this.emit('close'); + this[EMITTING_END] = false; + } + } + /** + * Mostly identical to `EventEmitter.emit`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * If the stream has been destroyed, and the event is something other + * than 'close' or 'error', then `false` is returned and no handlers + * are called. + * + * If the event is 'end', and has already been emitted, then the event + * is ignored. If the stream is in a paused or non-flowing state, then + * the event will be deferred until data flow resumes. If the stream is + * async, then handlers will be called on the next tick rather than + * immediately. + * + * If the event is 'close', and 'end' has not yet been emitted, then + * the event will be deferred until after 'end' is emitted. + * + * If the event is 'error', and an AbortSignal was provided for the stream, + * and there are no listeners, then the event is ignored, matching the + * behavior of node core streams in the presense of an AbortSignal. + * + * If the event is 'finish' or 'prefinish', then all listeners will be + * removed after emitting the event, to prevent double-firing. + */ + emit(ev, ...args) { + const data = args[0]; + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && + ev !== 'close' && + ev !== DESTROYED && + this[DESTROYED]) { + return false; + } + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? (defer(() => this[EMITDATA](data)), true) + : this[EMITDATA](data); + } + else if (ev === 'end') { + return this[EMITEND](); + } + else if (ev === 'close') { + this[CLOSED] = true; + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) + return false; + const ret = super.emit('close'); + this.removeAllListeners('close'); + return ret; + } + else if (ev === 'error') { + this[EMITTED_ERROR] = data; + super.emit(ERROR, data); + const ret = !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false; + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'resume') { + const ret = super.emit('resume'); + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev); + this.removeAllListeners(ev); + return ret; + } + // Some other unknown event + const ret = super.emit(ev, ...args); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) + this.pause(); + } + const ret = this[DISCARDED] ? false : super.emit('data', data); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITEND]() { + if (this[EMITTED_END]) + return false; + this[EMITTED_END] = true; + this.readable = false; + return this[ASYNC] + ? (defer(() => this[EMITEND2]()), true) + : this[EMITEND2](); + } + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end(); + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data); + } + if (!this[DISCARDED]) + super.emit('data', data); + } + } + for (const p of this[PIPES]) { + p.end(); + } + const ret = super.emit('end'); + this.removeAllListeners('end'); + return ret; + } + /** + * Return a Promise that resolves to an array of all emitted data once + * the stream ends. + */ + async collect() { + const buf = Object.assign([], { + dataLength: 0, + }); + if (!this[OBJECTMODE]) + buf.dataLength = 0; + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise(); + this.on('data', c => { + buf.push(c); + if (!this[OBJECTMODE]) + buf.dataLength += c.length; + }); + await p; + return buf; + } + /** + * Return a Promise that resolves to the concatenation of all emitted data + * once the stream ends. + * + * Not allowed on objectMode streams. + */ + async concat() { + if (this[OBJECTMODE]) { + throw new Error('cannot concat in objectMode'); + } + const buf = await this.collect(); + return (this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength)); + } + /** + * Return a void Promise that resolves once the stream ends. + */ + async promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))); + this.on('error', er => reject(er)); + this.on('end', () => resolve()); + }); + } + /** + * Asynchronous `for await of` iteration. + * + * This will continue emitting all chunks until the stream terminates. + */ + [Symbol.asyncIterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = async () => { + this.pause(); + stopped = true; + return { value: undefined, done: true }; + }; + const next = () => { + if (stopped) + return stop(); + const res = this.read(); + if (res !== null) + return Promise.resolve({ done: false, value: res }); + if (this[EOF]) + return stop(); + let resolve; + let reject; + const onerr = (er) => { + this.off('data', ondata); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + stop(); + reject(er); + }; + const ondata = (value) => { + this.off('error', onerr); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + this.pause(); + resolve({ value, done: !!this[EOF] }); + }; + const onend = () => { + this.off('error', onerr); + this.off('data', ondata); + this.off(DESTROYED, ondestroy); + stop(); + resolve({ done: true, value: undefined }); + }; + const ondestroy = () => onerr(new Error('stream destroyed')); + return new Promise((res, rej) => { + reject = rej; + resolve = res; + this.once(DESTROYED, ondestroy); + this.once('error', onerr); + this.once('end', onend); + this.once('data', ondata); + }); + }; + return { + next, + throw: stop, + return: stop, + [Symbol.asyncIterator]() { + return this; + }, + }; + } + /** + * Synchronous `for of` iteration. + * + * The iteration will terminate when the internal buffer runs out, even + * if the stream has not yet terminated. + */ + [Symbol.iterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = () => { + this.pause(); + this.off(ERROR, stop); + this.off(DESTROYED, stop); + this.off('end', stop); + stopped = true; + return { done: true, value: undefined }; + }; + const next = () => { + if (stopped) + return stop(); + const value = this.read(); + return value === null ? stop() : { done: false, value }; + }; + this.once('end', stop); + this.once(ERROR, stop); + this.once(DESTROYED, stop); + return { + next, + throw: stop, + return: stop, + [Symbol.iterator]() { + return this; + }, + }; + } + /** + * Destroy a stream, preventing it from being used for any further purpose. + * + * If the stream has a `close()` method, then it will be called on + * destruction. + * + * After destruction, any attempt to write data, read data, or emit most + * events will be ignored. + * + * If an error argument is provided, then it will be emitted in an + * 'error' event. + */ + destroy(er) { + if (this[DESTROYED]) { + if (er) + this.emit('error', er); + else + this.emit(DESTROYED); + return this; + } + this[DESTROYED] = true; + this[DISCARDED] = true; + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0; + this[BUFFERLENGTH] = 0; + const wc = this; + if (typeof wc.close === 'function' && !this[CLOSED]) + wc.close(); + if (er) + this.emit('error', er); + // if no error to emit, still reject pending promises + else + this.emit(DESTROYED); + return this; + } + /** + * Alias for {@link isStream} + * + * Former export location, maintained for backwards compatibility. + * + * @deprecated + */ + static get isStream() { + return exports.isStream; + } +} +exports.Minipass = Minipass; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/make-fetch-happen/node_modules/minipass/dist/cjs/package.json b/node_modules/make-fetch-happen/node_modules/minipass/dist/cjs/package.json new file mode 100644 index 0000000000000..5bbefffbabee3 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/minipass/dist/cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/make-fetch-happen/node_modules/minipass/dist/mjs/index.js b/node_modules/make-fetch-happen/node_modules/minipass/dist/mjs/index.js new file mode 100644 index 0000000000000..b65fafbae43a4 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/minipass/dist/mjs/index.js @@ -0,0 +1,1018 @@ +const proc = typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + }; +import { EventEmitter } from 'events'; +import Stream from 'stream'; +import { StringDecoder } from 'string_decoder'; +/** + * Return true if the argument is a Minipass stream, Node stream, or something + * else that Minipass can interact with. + */ +export const isStream = (s) => !!s && + typeof s === 'object' && + (s instanceof Minipass || + s instanceof Stream || + isReadable(s) || + isWritable(s)); +/** + * Return true if the argument is a valid {@link Minipass.Readable} + */ +export const isReadable = (s) => !!s && + typeof s === 'object' && + s instanceof EventEmitter && + typeof s.pipe === 'function' && + // node core Writable streams have a pipe() method, but it throws + s.pipe !== Stream.Writable.prototype.pipe; +/** + * Return true if the argument is a valid {@link Minipass.Writable} + */ +export const isWritable = (s) => !!s && + typeof s === 'object' && + s instanceof EventEmitter && + typeof s.write === 'function' && + typeof s.end === 'function'; +const EOF = Symbol('EOF'); +const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); +const EMITTED_END = Symbol('emittedEnd'); +const EMITTING_END = Symbol('emittingEnd'); +const EMITTED_ERROR = Symbol('emittedError'); +const CLOSED = Symbol('closed'); +const READ = Symbol('read'); +const FLUSH = Symbol('flush'); +const FLUSHCHUNK = Symbol('flushChunk'); +const ENCODING = Symbol('encoding'); +const DECODER = Symbol('decoder'); +const FLOWING = Symbol('flowing'); +const PAUSED = Symbol('paused'); +const RESUME = Symbol('resume'); +const BUFFER = Symbol('buffer'); +const PIPES = Symbol('pipes'); +const BUFFERLENGTH = Symbol('bufferLength'); +const BUFFERPUSH = Symbol('bufferPush'); +const BUFFERSHIFT = Symbol('bufferShift'); +const OBJECTMODE = Symbol('objectMode'); +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed'); +// internal event when stream has an error +const ERROR = Symbol('error'); +const EMITDATA = Symbol('emitData'); +const EMITEND = Symbol('emitEnd'); +const EMITEND2 = Symbol('emitEnd2'); +const ASYNC = Symbol('async'); +const ABORT = Symbol('abort'); +const ABORTED = Symbol('aborted'); +const SIGNAL = Symbol('signal'); +const DATALISTENERS = Symbol('dataListeners'); +const DISCARDED = Symbol('discarded'); +const defer = (fn) => Promise.resolve().then(fn); +const nodefer = (fn) => fn(); +const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; +const isArrayBufferLike = (b) => b instanceof ArrayBuffer || + (!!b && + typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0); +const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); +/** + * Internal class representing a pipe to a destination stream. + * + * @internal + */ +class Pipe { + src; + dest; + opts; + ondrain; + constructor(src, dest, opts) { + this.src = src; + this.dest = dest; + this.opts = opts; + this.ondrain = () => src[RESUME](); + this.dest.on('drain', this.ondrain); + } + unpipe() { + this.dest.removeListener('drain', this.ondrain); + } + // only here for the prototype + /* c8 ignore start */ + proxyErrors(_er) { } + /* c8 ignore stop */ + end() { + this.unpipe(); + if (this.opts.end) + this.dest.end(); + } +} +/** + * Internal class representing a pipe to a destination stream where + * errors are proxied. + * + * @internal + */ +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors); + super.unpipe(); + } + constructor(src, dest, opts) { + super(src, dest, opts); + this.proxyErrors = er => dest.emit('error', er); + src.on('error', this.proxyErrors); + } +} +const isObjectModeOptions = (o) => !!o.objectMode; +const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; +/** + * Main export, the Minipass class + * + * `RType` is the type of data emitted, defaults to Buffer + * + * `WType` is the type of data to be written, if RType is buffer or string, + * then any {@link Minipass.ContiguousData} is allowed. + * + * `Events` is the set of event handler signatures that this object + * will emit, see {@link Minipass.Events} + */ +export class Minipass extends EventEmitter { + [FLOWING] = false; + [PAUSED] = false; + [PIPES] = []; + [BUFFER] = []; + [OBJECTMODE]; + [ENCODING]; + [ASYNC]; + [DECODER]; + [EOF] = false; + [EMITTED_END] = false; + [EMITTING_END] = false; + [CLOSED] = false; + [EMITTED_ERROR] = null; + [BUFFERLENGTH] = 0; + [DESTROYED] = false; + [SIGNAL]; + [ABORTED] = false; + [DATALISTENERS] = 0; + [DISCARDED] = false; + /** + * true if the stream can be written + */ + writable = true; + /** + * true if the stream can be read + */ + readable = true; + /** + * If `RType` is Buffer, then options do not need to be provided. + * Otherwise, an options object must be provided to specify either + * {@link Minipass.SharedOptions.objectMode} or + * {@link Minipass.SharedOptions.encoding}, as appropriate. + */ + constructor(...args) { + const options = (args[0] || + {}); + super(); + if (options.objectMode && typeof options.encoding === 'string') { + throw new TypeError('Encoding and objectMode may not be used together'); + } + if (isObjectModeOptions(options)) { + this[OBJECTMODE] = true; + this[ENCODING] = null; + } + else if (isEncodingOptions(options)) { + this[ENCODING] = options.encoding; + this[OBJECTMODE] = false; + } + else { + this[OBJECTMODE] = false; + this[ENCODING] = null; + } + this[ASYNC] = !!options.async; + this[DECODER] = this[ENCODING] + ? new StringDecoder(this[ENCODING]) + : null; + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); + } + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); + } + const { signal } = options; + if (signal) { + this[SIGNAL] = signal; + if (signal.aborted) { + this[ABORT](); + } + else { + signal.addEventListener('abort', () => this[ABORT]()); + } + } + } + /** + * The amount of data stored in the buffer waiting to be read. + * + * For Buffer strings, this will be the total byte length. + * For string encoding streams, this will be the string character length, + * according to JavaScript's `string.length` logic. + * For objectMode streams, this is a count of the items waiting to be + * emitted. + */ + get bufferLength() { + return this[BUFFERLENGTH]; + } + /** + * The `BufferEncoding` currently in use, or `null` + */ + get encoding() { + return this[ENCODING]; + } + /** + * @deprecated - This is a read only property + */ + set encoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * @deprecated - Encoding may only be set at instantiation time + */ + setEncoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * True if this is an objectMode stream + */ + get objectMode() { + return this[OBJECTMODE]; + } + /** + * @deprecated - This is a read-only property + */ + set objectMode(_om) { + throw new Error('objectMode must be set at instantiation time'); + } + /** + * true if this is an async stream + */ + get ['async']() { + return this[ASYNC]; + } + /** + * Set to true to make this stream async. + * + * Once set, it cannot be unset, as this would potentially cause incorrect + * behavior. Ie, a sync stream can be made async, but an async stream + * cannot be safely made sync. + */ + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a; + } + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true; + this.emit('abort', this[SIGNAL]?.reason); + this.destroy(this[SIGNAL]?.reason); + } + /** + * True if the stream has been aborted. + */ + get aborted() { + return this[ABORTED]; + } + /** + * No-op setter. Stream aborted status is set via the AbortSignal provided + * in the constructor options. + */ + set aborted(_) { } + write(chunk, encoding, cb) { + if (this[ABORTED]) + return false; + if (this[EOF]) + throw new Error('write after end'); + if (this[DESTROYED]) { + this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); + return true; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (!encoding) + encoding = 'utf8'; + const fn = this[ASYNC] ? defer : nodefer; + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything is only allowed if in object mode, so throw + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); + } + else if (isArrayBufferLike(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk); + } + else if (typeof chunk !== 'string') { + throw new Error('Non-contiguous data written to non-objectMode stream'); + } + } + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + // maybe impossible? + /* c8 ignore start */ + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + /* c8 ignore stop */ + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { + //@ts-ignore - sinful unsafe type change + chunk = Buffer.from(chunk, encoding); + } + if (Buffer.isBuffer(chunk) && this[ENCODING]) { + //@ts-ignore - sinful unsafe type change + chunk = this[DECODER].write(chunk); + } + // Note: flushing CAN potentially switch us into not-flowing mode + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + /** + * Low-level explicit read method. + * + * In objectMode, the argument is ignored, and one item is returned if + * available. + * + * `n` is the number of bytes (or in the case of encoding streams, + * characters) to consume. If `n` is not provided, then the entire buffer + * is returned, or `null` is returned if no data is available. + * + * If `n` is greater that the amount of data in the internal buffer, + * then `null` is returned. + */ + read(n) { + if (this[DESTROYED]) + return null; + this[DISCARDED] = false; + if (this[BUFFERLENGTH] === 0 || + n === 0 || + (n && n > this[BUFFERLENGTH])) { + this[MAYBE_EMIT_END](); + return null; + } + if (this[OBJECTMODE]) + n = null; + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + // not object mode, so if we have an encoding, then RType is string + // otherwise, must be Buffer + this[BUFFER] = [ + (this[ENCODING] + ? this[BUFFER].join('') + : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), + ]; + } + const ret = this[READ](n || null, this[BUFFER][0]); + this[MAYBE_EMIT_END](); + return ret; + } + [READ](n, chunk) { + if (this[OBJECTMODE]) + this[BUFFERSHIFT](); + else { + const c = chunk; + if (n === c.length || n === null) + this[BUFFERSHIFT](); + else if (typeof c === 'string') { + this[BUFFER][0] = c.slice(n); + chunk = c.slice(0, n); + this[BUFFERLENGTH] -= n; + } + else { + this[BUFFER][0] = c.subarray(n); + chunk = c.subarray(0, n); + this[BUFFERLENGTH] -= n; + } + } + this.emit('data', chunk); + if (!this[BUFFER].length && !this[EOF]) + this.emit('drain'); + return chunk; + } + end(chunk, encoding, cb) { + if (typeof chunk === 'function') { + cb = chunk; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (chunk !== undefined) + this.write(chunk, encoding); + if (cb) + this.once('end', cb); + this[EOF] = true; + this.writable = false; + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this[FLOWING] || !this[PAUSED]) + this[MAYBE_EMIT_END](); + return this; + } + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) + return; + if (!this[DATALISTENERS] && !this[PIPES].length) { + this[DISCARDED] = true; + } + this[PAUSED] = false; + this[FLOWING] = true; + this.emit('resume'); + if (this[BUFFER].length) + this[FLUSH](); + else if (this[EOF]) + this[MAYBE_EMIT_END](); + else + this.emit('drain'); + } + /** + * Resume the stream if it is currently in a paused state + * + * If called when there are no pipe destinations or `data` event listeners, + * this will place the stream in a "discarded" state, where all data will + * be thrown away. The discarded state is removed if a pipe destination or + * data handler is added, if pause() is called, or if any synchronous or + * asynchronous iteration is started. + */ + resume() { + return this[RESUME](); + } + /** + * Pause the stream + */ + pause() { + this[FLOWING] = false; + this[PAUSED] = true; + this[DISCARDED] = false; + } + /** + * true if the stream has been forcibly destroyed + */ + get destroyed() { + return this[DESTROYED]; + } + /** + * true if the stream is currently in a flowing state, meaning that + * any writes will be immediately emitted. + */ + get flowing() { + return this[FLOWING]; + } + /** + * true if the stream is currently in a paused state + */ + get paused() { + return this[PAUSED]; + } + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1; + else + this[BUFFERLENGTH] += chunk.length; + this[BUFFER].push(chunk); + } + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1; + else + this[BUFFERLENGTH] -= this[BUFFER][0].length; + return this[BUFFER].shift(); + } + [FLUSH](noDrain = false) { + do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && + this[BUFFER].length); + if (!noDrain && !this[BUFFER].length && !this[EOF]) + this.emit('drain'); + } + [FLUSHCHUNK](chunk) { + this.emit('data', chunk); + return this[FLOWING]; + } + /** + * Pipe all data emitted by this stream into the destination provided. + * + * Triggers the flow of data. + */ + pipe(dest, opts) { + if (this[DESTROYED]) + return dest; + this[DISCARDED] = false; + const ended = this[EMITTED_END]; + opts = opts || {}; + if (dest === proc.stdout || dest === proc.stderr) + opts.end = false; + else + opts.end = opts.end !== false; + opts.proxyErrors = !!opts.proxyErrors; + // piping an ended stream ends immediately + if (ended) { + if (opts.end) + dest.end(); + } + else { + // "as" here just ignores the WType, which pipes don't care about, + // since they're only consuming from us, and writing to the dest + this[PIPES].push(!opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts)); + if (this[ASYNC]) + defer(() => this[RESUME]()); + else + this[RESUME](); + } + return dest; + } + /** + * Fully unhook a piped destination stream. + * + * If the destination stream was the only consumer of this stream (ie, + * there are no other piped destinations or `'data'` event listeners) + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest); + if (p) { + if (this[PIPES].length === 1) { + if (this[FLOWING] && this[DATALISTENERS] === 0) { + this[FLOWING] = false; + } + this[PIPES] = []; + } + else + this[PIPES].splice(this[PIPES].indexOf(p), 1); + p.unpipe(); + } + } + /** + * Alias for {@link Minipass#on} + */ + addListener(ev, handler) { + return this.on(ev, handler); + } + /** + * Mostly identical to `EventEmitter.on`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * - Adding a 'data' event handler will trigger the flow of data + * + * - Adding a 'readable' event handler when there is data waiting to be read + * will cause 'readable' to be emitted immediately. + * + * - Adding an 'endish' event handler ('end', 'finish', etc.) which has + * already passed will cause the event to be emitted immediately and all + * handlers removed. + * + * - Adding an 'error' event handler after an error has been emitted will + * cause the event to be re-emitted immediately with the error previously + * raised. + */ + on(ev, handler) { + const ret = super.on(ev, handler); + if (ev === 'data') { + this[DISCARDED] = false; + this[DATALISTENERS]++; + if (!this[PIPES].length && !this[FLOWING]) { + this[RESUME](); + } + } + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { + super.emit('readable'); + } + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev); + this.removeAllListeners(ev); + } + else if (ev === 'error' && this[EMITTED_ERROR]) { + const h = handler; + if (this[ASYNC]) + defer(() => h.call(this, this[EMITTED_ERROR])); + else + h.call(this, this[EMITTED_ERROR]); + } + return ret; + } + /** + * Alias for {@link Minipass#off} + */ + removeListener(ev, handler) { + return this.off(ev, handler); + } + /** + * Mostly identical to `EventEmitter.off` + * + * If a 'data' event handler is removed, and it was the last consumer + * (ie, there are no pipe destinations or other 'data' event listeners), + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + off(ev, handler) { + const ret = super.off(ev, handler); + // if we previously had listeners, and now we don't, and we don't + // have any pipes, then stop the flow, unless it's been explicitly + // put in a discarded flowing state via stream.resume(). + if (ev === 'data') { + this[DATALISTENERS] = this.listeners('data').length; + if (this[DATALISTENERS] === 0 && + !this[DISCARDED] && + !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * Mostly identical to `EventEmitter.removeAllListeners` + * + * If all 'data' event handlers are removed, and they were the last consumer + * (ie, there are no pipe destinations), then the flow of data will stop + * until there is another consumer or {@link Minipass#resume} is explicitly + * called. + */ + removeAllListeners(ev) { + const ret = super.removeAllListeners(ev); + if (ev === 'data' || ev === undefined) { + this[DATALISTENERS] = 0; + if (!this[DISCARDED] && !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * true if the 'end' event has been emitted + */ + get emittedEnd() { + return this[EMITTED_END]; + } + [MAYBE_EMIT_END]() { + if (!this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF]) { + this[EMITTING_END] = true; + this.emit('end'); + this.emit('prefinish'); + this.emit('finish'); + if (this[CLOSED]) + this.emit('close'); + this[EMITTING_END] = false; + } + } + /** + * Mostly identical to `EventEmitter.emit`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * If the stream has been destroyed, and the event is something other + * than 'close' or 'error', then `false` is returned and no handlers + * are called. + * + * If the event is 'end', and has already been emitted, then the event + * is ignored. If the stream is in a paused or non-flowing state, then + * the event will be deferred until data flow resumes. If the stream is + * async, then handlers will be called on the next tick rather than + * immediately. + * + * If the event is 'close', and 'end' has not yet been emitted, then + * the event will be deferred until after 'end' is emitted. + * + * If the event is 'error', and an AbortSignal was provided for the stream, + * and there are no listeners, then the event is ignored, matching the + * behavior of node core streams in the presense of an AbortSignal. + * + * If the event is 'finish' or 'prefinish', then all listeners will be + * removed after emitting the event, to prevent double-firing. + */ + emit(ev, ...args) { + const data = args[0]; + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && + ev !== 'close' && + ev !== DESTROYED && + this[DESTROYED]) { + return false; + } + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? (defer(() => this[EMITDATA](data)), true) + : this[EMITDATA](data); + } + else if (ev === 'end') { + return this[EMITEND](); + } + else if (ev === 'close') { + this[CLOSED] = true; + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) + return false; + const ret = super.emit('close'); + this.removeAllListeners('close'); + return ret; + } + else if (ev === 'error') { + this[EMITTED_ERROR] = data; + super.emit(ERROR, data); + const ret = !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false; + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'resume') { + const ret = super.emit('resume'); + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev); + this.removeAllListeners(ev); + return ret; + } + // Some other unknown event + const ret = super.emit(ev, ...args); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) + this.pause(); + } + const ret = this[DISCARDED] ? false : super.emit('data', data); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITEND]() { + if (this[EMITTED_END]) + return false; + this[EMITTED_END] = true; + this.readable = false; + return this[ASYNC] + ? (defer(() => this[EMITEND2]()), true) + : this[EMITEND2](); + } + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end(); + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data); + } + if (!this[DISCARDED]) + super.emit('data', data); + } + } + for (const p of this[PIPES]) { + p.end(); + } + const ret = super.emit('end'); + this.removeAllListeners('end'); + return ret; + } + /** + * Return a Promise that resolves to an array of all emitted data once + * the stream ends. + */ + async collect() { + const buf = Object.assign([], { + dataLength: 0, + }); + if (!this[OBJECTMODE]) + buf.dataLength = 0; + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise(); + this.on('data', c => { + buf.push(c); + if (!this[OBJECTMODE]) + buf.dataLength += c.length; + }); + await p; + return buf; + } + /** + * Return a Promise that resolves to the concatenation of all emitted data + * once the stream ends. + * + * Not allowed on objectMode streams. + */ + async concat() { + if (this[OBJECTMODE]) { + throw new Error('cannot concat in objectMode'); + } + const buf = await this.collect(); + return (this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength)); + } + /** + * Return a void Promise that resolves once the stream ends. + */ + async promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))); + this.on('error', er => reject(er)); + this.on('end', () => resolve()); + }); + } + /** + * Asynchronous `for await of` iteration. + * + * This will continue emitting all chunks until the stream terminates. + */ + [Symbol.asyncIterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = async () => { + this.pause(); + stopped = true; + return { value: undefined, done: true }; + }; + const next = () => { + if (stopped) + return stop(); + const res = this.read(); + if (res !== null) + return Promise.resolve({ done: false, value: res }); + if (this[EOF]) + return stop(); + let resolve; + let reject; + const onerr = (er) => { + this.off('data', ondata); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + stop(); + reject(er); + }; + const ondata = (value) => { + this.off('error', onerr); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + this.pause(); + resolve({ value, done: !!this[EOF] }); + }; + const onend = () => { + this.off('error', onerr); + this.off('data', ondata); + this.off(DESTROYED, ondestroy); + stop(); + resolve({ done: true, value: undefined }); + }; + const ondestroy = () => onerr(new Error('stream destroyed')); + return new Promise((res, rej) => { + reject = rej; + resolve = res; + this.once(DESTROYED, ondestroy); + this.once('error', onerr); + this.once('end', onend); + this.once('data', ondata); + }); + }; + return { + next, + throw: stop, + return: stop, + [Symbol.asyncIterator]() { + return this; + }, + }; + } + /** + * Synchronous `for of` iteration. + * + * The iteration will terminate when the internal buffer runs out, even + * if the stream has not yet terminated. + */ + [Symbol.iterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = () => { + this.pause(); + this.off(ERROR, stop); + this.off(DESTROYED, stop); + this.off('end', stop); + stopped = true; + return { done: true, value: undefined }; + }; + const next = () => { + if (stopped) + return stop(); + const value = this.read(); + return value === null ? stop() : { done: false, value }; + }; + this.once('end', stop); + this.once(ERROR, stop); + this.once(DESTROYED, stop); + return { + next, + throw: stop, + return: stop, + [Symbol.iterator]() { + return this; + }, + }; + } + /** + * Destroy a stream, preventing it from being used for any further purpose. + * + * If the stream has a `close()` method, then it will be called on + * destruction. + * + * After destruction, any attempt to write data, read data, or emit most + * events will be ignored. + * + * If an error argument is provided, then it will be emitted in an + * 'error' event. + */ + destroy(er) { + if (this[DESTROYED]) { + if (er) + this.emit('error', er); + else + this.emit(DESTROYED); + return this; + } + this[DESTROYED] = true; + this[DISCARDED] = true; + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0; + this[BUFFERLENGTH] = 0; + const wc = this; + if (typeof wc.close === 'function' && !this[CLOSED]) + wc.close(); + if (er) + this.emit('error', er); + // if no error to emit, still reject pending promises + else + this.emit(DESTROYED); + return this; + } + /** + * Alias for {@link isStream} + * + * Former export location, maintained for backwards compatibility. + * + * @deprecated + */ + static get isStream() { + return isStream; + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/make-fetch-happen/node_modules/minipass/dist/mjs/package.json b/node_modules/make-fetch-happen/node_modules/minipass/dist/mjs/package.json new file mode 100644 index 0000000000000..3dbc1ca591c05 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/minipass/dist/mjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/make-fetch-happen/node_modules/minipass/package.json b/node_modules/make-fetch-happen/node_modules/minipass/package.json new file mode 100644 index 0000000000000..6faaa247a5bc6 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/minipass/package.json @@ -0,0 +1,82 @@ +{ + "name": "minipass", + "version": "7.0.3", + "description": "minimal implementation of a PassThrough stream", + "main": "./dist/cjs/index.js", + "module": "./dist/mjs/index.js", + "types": "./dist/cjs/index.js", + "exports": { + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + }, + "./package.json": "./package.json" + }, + "files": [ + "dist" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" + }, + "tap": { + "coverage": false, + "node-arg": [ + "--enable-source-maps", + "--no-warnings", + "--loader", + "ts-node/esm" + ], + "ts": false + }, + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "devDependencies": { + "@types/node": "^20.1.2", + "@types/tap": "^15.0.8", + "c8": "^7.13.0", + "prettier": "^2.6.2", + "tap": "^16.3.0", + "ts-node": "^10.9.1", + "typedoc": "^0.24.8", + "typescript": "^5.1.3", + "end-of-stream": "^1.4.0", + "node-abort-controller": "^3.1.1", + "sync-content": "^1.0.2", + "through2": "^2.0.3" + }, + "repository": "https://github.com/isaacs/minipass", + "keywords": [ + "passthrough", + "stream" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } +} diff --git a/node_modules/make-fetch-happen/package.json b/node_modules/make-fetch-happen/package.json index fd415dc9966fa..419db8fbb1289 100644 --- a/node_modules/make-fetch-happen/package.json +++ b/node_modules/make-fetch-happen/package.json @@ -1,6 +1,6 @@ { "name": "make-fetch-happen", - "version": "11.1.1", + "version": "12.0.0", "description": "Opinionated, caching, retrying fetch client", "main": "lib/index.js", "files": [ @@ -33,32 +33,28 @@ "author": "GitHub Inc.", "license": "ISC", "dependencies": { - "agentkeepalive": "^4.2.1", + "@npmcli/agent": "^1.1.0", "cacache": "^17.0.0", "http-cache-semantics": "^4.1.1", - "http-proxy-agent": "^5.0.0", - "https-proxy-agent": "^5.0.0", "is-lambda": "^1.0.1", - "lru-cache": "^7.7.1", - "minipass": "^5.0.0", + "minipass": "^7.0.2", "minipass-fetch": "^3.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^0.6.3", "promise-retry": "^2.0.1", - "socks-proxy-agent": "^7.0.0", "ssri": "^10.0.0" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/template-oss": "4.18.0", "nock": "^13.2.4", "safe-buffer": "^5.2.1", "standard-version": "^9.3.2", "tap": "^16.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" }, "tap": { "color": 1, @@ -72,7 +68,13 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ], + "version": "4.18.0", "publish": "true" } } diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE b/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE new file mode 100644 index 0000000000000..1808eb2844231 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE @@ -0,0 +1,16 @@ +ISC License + +Copyright 2017-2022 (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js new file mode 100644 index 0000000000000..dd68492ed7ea7 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js @@ -0,0 +1,214 @@ +'use strict' +const LRU = require('lru-cache') +const url = require('url') +const isLambda = require('is-lambda') +const dns = require('./dns.js') + +const AGENT_CACHE = new LRU({ max: 50 }) +const HttpAgent = require('agentkeepalive') +const HttpsAgent = HttpAgent.HttpsAgent + +module.exports = getAgent + +const getAgentTimeout = timeout => + typeof timeout !== 'number' || !timeout ? 0 : timeout + 1 + +const getMaxSockets = maxSockets => maxSockets || 15 + +function getAgent (uri, opts) { + const parsedUri = new url.URL(typeof uri === 'string' ? uri : uri.url) + const isHttps = parsedUri.protocol === 'https:' + const pxuri = getProxyUri(parsedUri.href, opts) + + // If opts.timeout is zero, set the agentTimeout to zero as well. A timeout + // of zero disables the timeout behavior (OS limits still apply). Else, if + // opts.timeout is a non-zero value, set it to timeout + 1, to ensure that + // the node-fetch-npm timeout will always fire first, giving us more + // consistent errors. + const agentTimeout = getAgentTimeout(opts.timeout) + const agentMaxSockets = getMaxSockets(opts.maxSockets) + + const key = [ + `https:${isHttps}`, + pxuri + ? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}` + : '>no-proxy<', + `local-address:${opts.localAddress || '>no-local-address<'}`, + `strict-ssl:${isHttps ? opts.rejectUnauthorized : '>no-strict-ssl<'}`, + `ca:${(isHttps && opts.ca) || '>no-ca<'}`, + `cert:${(isHttps && opts.cert) || '>no-cert<'}`, + `key:${(isHttps && opts.key) || '>no-key<'}`, + `timeout:${agentTimeout}`, + `maxSockets:${agentMaxSockets}`, + ].join(':') + + if (opts.agent != null) { // `agent: false` has special behavior! + return opts.agent + } + + // keep alive in AWS lambda makes no sense + const lambdaAgent = !isLambda ? null + : isHttps ? require('https').globalAgent + : require('http').globalAgent + + if (isLambda && !pxuri) { + return lambdaAgent + } + + if (AGENT_CACHE.peek(key)) { + return AGENT_CACHE.get(key) + } + + if (pxuri) { + const pxopts = isLambda ? { + ...opts, + agent: lambdaAgent, + } : opts + const proxy = getProxy(pxuri, pxopts, isHttps) + AGENT_CACHE.set(key, proxy) + return proxy + } + + const agent = isHttps ? new HttpsAgent({ + maxSockets: agentMaxSockets, + ca: opts.ca, + cert: opts.cert, + key: opts.key, + localAddress: opts.localAddress, + rejectUnauthorized: opts.rejectUnauthorized, + timeout: agentTimeout, + freeSocketTimeout: 15000, + lookup: dns.getLookup(opts.dns), + }) : new HttpAgent({ + maxSockets: agentMaxSockets, + localAddress: opts.localAddress, + timeout: agentTimeout, + freeSocketTimeout: 15000, + lookup: dns.getLookup(opts.dns), + }) + AGENT_CACHE.set(key, agent) + return agent +} + +function checkNoProxy (uri, opts) { + const host = new url.URL(uri).hostname.split('.').reverse() + let noproxy = (opts.noProxy || getProcessEnv('no_proxy')) + if (typeof noproxy === 'string') { + noproxy = noproxy.split(',').map(n => n.trim()) + } + + return noproxy && noproxy.some(no => { + const noParts = no.split('.').filter(x => x).reverse() + if (!noParts.length) { + return false + } + for (let i = 0; i < noParts.length; i++) { + if (host[i] !== noParts[i]) { + return false + } + } + return true + }) +} + +module.exports.getProcessEnv = getProcessEnv + +function getProcessEnv (env) { + if (!env) { + return + } + + let value + + if (Array.isArray(env)) { + for (const e of env) { + value = process.env[e] || + process.env[e.toUpperCase()] || + process.env[e.toLowerCase()] + if (typeof value !== 'undefined') { + break + } + } + } + + if (typeof env === 'string') { + value = process.env[env] || + process.env[env.toUpperCase()] || + process.env[env.toLowerCase()] + } + + return value +} + +module.exports.getProxyUri = getProxyUri +function getProxyUri (uri, opts) { + const protocol = new url.URL(uri).protocol + + const proxy = opts.proxy || + ( + protocol === 'https:' && + getProcessEnv('https_proxy') + ) || + ( + protocol === 'http:' && + getProcessEnv(['https_proxy', 'http_proxy', 'proxy']) + ) + if (!proxy) { + return null + } + + const parsedProxy = (typeof proxy === 'string') ? new url.URL(proxy) : proxy + + return !checkNoProxy(uri, opts) && parsedProxy +} + +const getAuth = u => + u.username && u.password ? decodeURIComponent(`${u.username}:${u.password}`) + : u.username ? decodeURIComponent(u.username) + : null + +const getPath = u => u.pathname + u.search + u.hash + +const HttpProxyAgent = require('http-proxy-agent') +const HttpsProxyAgent = require('https-proxy-agent') +const { SocksProxyAgent } = require('socks-proxy-agent') +module.exports.getProxy = getProxy +function getProxy (proxyUrl, opts, isHttps) { + // our current proxy agents do not support an overridden dns lookup method, so will not + // benefit from the dns cache + const popts = { + host: proxyUrl.hostname, + port: proxyUrl.port, + protocol: proxyUrl.protocol, + path: getPath(proxyUrl), + auth: getAuth(proxyUrl), + ca: opts.ca, + cert: opts.cert, + key: opts.key, + timeout: getAgentTimeout(opts.timeout), + localAddress: opts.localAddress, + maxSockets: getMaxSockets(opts.maxSockets), + rejectUnauthorized: opts.rejectUnauthorized, + } + + if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') { + if (!isHttps) { + return new HttpProxyAgent(popts) + } else { + return new HttpsProxyAgent(popts) + } + } else if (proxyUrl.protocol.startsWith('socks')) { + // socks-proxy-agent uses hostname not host + popts.hostname = popts.host + delete popts.host + return new SocksProxyAgent(popts) + } else { + throw Object.assign( + new Error(`unsupported proxy protocol: '${proxyUrl.protocol}'`), + { + code: 'EUNSUPPORTEDPROXY', + url: proxyUrl.href, + } + ) + } +} diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js new file mode 100644 index 0000000000000..45141095074ec --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js @@ -0,0 +1,469 @@ +const { Request, Response } = require('minipass-fetch') +const { Minipass } = require('minipass') +const MinipassFlush = require('minipass-flush') +const cacache = require('cacache') +const url = require('url') + +const CachingMinipassPipeline = require('../pipeline.js') +const CachePolicy = require('./policy.js') +const cacheKey = require('./key.js') +const remote = require('../remote.js') + +const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) + +// allow list for request headers that will be written to the cache index +// note: we will also store any request headers +// that are named in a response's vary header +const KEEP_REQUEST_HEADERS = [ + 'accept-charset', + 'accept-encoding', + 'accept-language', + 'accept', + 'cache-control', +] + +// allow list for response headers that will be written to the cache index +// note: we must not store the real response's age header, or when we load +// a cache policy based on the metadata it will think the cached response +// is always stale +const KEEP_RESPONSE_HEADERS = [ + 'cache-control', + 'content-encoding', + 'content-language', + 'content-type', + 'date', + 'etag', + 'expires', + 'last-modified', + 'link', + 'location', + 'pragma', + 'vary', +] + +// return an object containing all metadata to be written to the index +const getMetadata = (request, response, options) => { + const metadata = { + time: Date.now(), + url: request.url, + reqHeaders: {}, + resHeaders: {}, + + // options on which we must match the request and vary the response + options: { + compress: options.compress != null ? options.compress : request.compress, + }, + } + + // only save the status if it's not a 200 or 304 + if (response.status !== 200 && response.status !== 304) { + metadata.status = response.status + } + + for (const name of KEEP_REQUEST_HEADERS) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + + // if the request's host header differs from the host in the url + // we need to keep it, otherwise it's just noise and we ignore it + const host = request.headers.get('host') + const parsedUrl = new url.URL(request.url) + if (host && parsedUrl.host !== host) { + metadata.reqHeaders.host = host + } + + // if the response has a vary header, make sure + // we store the relevant request headers too + if (response.headers.has('vary')) { + const vary = response.headers.get('vary') + // a vary of "*" means every header causes a different response. + // in that scenario, we do not include any additional headers + // as the freshness check will always fail anyway and we don't + // want to bloat the cache indexes + if (vary !== '*') { + // copy any other request headers that will vary the response + const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) + for (const name of varyHeaders) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + } + } + + for (const name of KEEP_RESPONSE_HEADERS) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + for (const name of options.cacheAdditionalHeaders) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + return metadata +} + +// symbols used to hide objects that may be lazily evaluated in a getter +const _request = Symbol('request') +const _response = Symbol('response') +const _policy = Symbol('policy') + +class CacheEntry { + constructor ({ entry, request, response, options }) { + if (entry) { + this.key = entry.key + this.entry = entry + // previous versions of this module didn't write an explicit timestamp in + // the metadata, so fall back to the entry's timestamp. we can't use the + // entry timestamp to determine staleness because cacache will update it + // when it verifies its data + this.entry.metadata.time = this.entry.metadata.time || this.entry.time + } else { + this.key = cacheKey(request) + } + + this.options = options + + // these properties are behind getters that lazily evaluate + this[_request] = request + this[_response] = response + this[_policy] = null + } + + // returns a CacheEntry instance that satisfies the given request + // or undefined if no existing entry satisfies + static async find (request, options) { + try { + // compacts the index and returns an array of unique entries + var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { + const entryA = new CacheEntry({ entry: A, options }) + const entryB = new CacheEntry({ entry: B, options }) + return entryA.policy.satisfies(entryB.request) + }, { + validateEntry: (entry) => { + // clean out entries with a buggy content-encoding value + if (entry.metadata && + entry.metadata.resHeaders && + entry.metadata.resHeaders['content-encoding'] === null) { + return false + } + + // if an integrity is null, it needs to have a status specified + if (entry.integrity === null) { + return !!(entry.metadata && entry.metadata.status) + } + + return true + }, + }) + } catch (err) { + // if the compact request fails, ignore the error and return + return + } + + // a cache mode of 'reload' means to behave as though we have no cache + // on the way to the network. return undefined to allow cacheFetch to + // create a brand new request no matter what. + if (options.cache === 'reload') { + return + } + + // find the specific entry that satisfies the request + let match + for (const entry of matches) { + const _entry = new CacheEntry({ + entry, + options, + }) + + if (_entry.policy.satisfies(request)) { + match = _entry + break + } + } + + return match + } + + // if the user made a PUT/POST/PATCH then we invalidate our + // cache for the same url by deleting the index entirely + static async invalidate (request, options) { + const key = cacheKey(request) + try { + await cacache.rm.entry(options.cachePath, key, { removeFully: true }) + } catch (err) { + // ignore errors + } + } + + get request () { + if (!this[_request]) { + this[_request] = new Request(this.entry.metadata.url, { + method: 'GET', + headers: this.entry.metadata.reqHeaders, + ...this.entry.metadata.options, + }) + } + + return this[_request] + } + + get response () { + if (!this[_response]) { + this[_response] = new Response(null, { + url: this.entry.metadata.url, + counter: this.options.counter, + status: this.entry.metadata.status || 200, + headers: { + ...this.entry.metadata.resHeaders, + 'content-length': this.entry.size, + }, + }) + } + + return this[_response] + } + + get policy () { + if (!this[_policy]) { + this[_policy] = new CachePolicy({ + entry: this.entry, + request: this.request, + response: this.response, + options: this.options, + }) + } + + return this[_policy] + } + + // wraps the response in a pipeline that stores the data + // in the cache while the user consumes it + async store (status) { + // if we got a status other than 200, 301, or 308, + // or the CachePolicy forbid storage, append the + // cache status header and return it untouched + if ( + this.request.method !== 'GET' || + ![200, 301, 308].includes(this.response.status) || + !this.policy.storable() + ) { + this.response.headers.set('x-local-cache-status', 'skip') + return this.response + } + + const size = this.response.headers.get('content-length') + const cacheOpts = { + algorithms: this.options.algorithms, + metadata: getMetadata(this.request, this.response, this.options), + size, + integrity: this.options.integrity, + integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body, + } + + let body = null + // we only set a body if the status is a 200, redirects are + // stored as metadata only + if (this.response.status === 200) { + let cacheWriteResolve, cacheWriteReject + const cacheWritePromise = new Promise((resolve, reject) => { + cacheWriteResolve = resolve + cacheWriteReject = reject + }) + + body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({ + flush () { + return cacheWritePromise + }, + })) + // this is always true since if we aren't reusing the one from the remote fetch, we + // are using the one from cacache + body.hasIntegrityEmitter = true + + const onResume = () => { + const tee = new Minipass() + const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) + // re-emit the integrity and size events on our new response body so they can be reused + cacheStream.on('integrity', i => body.emit('integrity', i)) + cacheStream.on('size', s => body.emit('size', s)) + // stick a flag on here so downstream users will know if they can expect integrity events + tee.pipe(cacheStream) + // TODO if the cache write fails, log a warning but return the response anyway + // eslint-disable-next-line promise/catch-or-return + cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) + body.unshift(tee) + body.unshift(this.response.body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + } else { + await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) + } + + // note: we do not set the x-local-cache-hash header because we do not know + // the hash value until after the write to the cache completes, which doesn't + // happen until after the response has been sent and it's too late to write + // the header anyway + this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + this.response.headers.set('x-local-cache-mode', 'stream') + this.response.headers.set('x-local-cache-status', status) + this.response.headers.set('x-local-cache-time', new Date().toISOString()) + const newResponse = new Response(body, { + url: this.response.url, + status: this.response.status, + headers: this.response.headers, + counter: this.options.counter, + }) + return newResponse + } + + // use the cached data to create a response and return it + async respond (method, options, status) { + let response + if (method === 'HEAD' || [301, 308].includes(this.response.status)) { + // if the request is a HEAD, or the response is a redirect, + // then the metadata in the entry already includes everything + // we need to build a response + response = this.response + } else { + // we're responding with a full cached response, so create a body + // that reads from cacache and attach it to a new Response + const body = new Minipass() + const headers = { ...this.policy.responseHeaders() } + + const onResume = () => { + const cacheStream = cacache.get.stream.byDigest( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + cacheStream.on('error', async (err) => { + cacheStream.pause() + if (err.code === 'EINTEGRITY') { + await cacache.rm.content( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + } + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { + await CacheEntry.invalidate(this.request, this.options) + } + body.emit('error', err) + cacheStream.resume() + }) + // emit the integrity and size events based on our metadata so we're consistent + body.emit('integrity', this.entry.integrity) + body.emit('size', Number(headers['content-length'])) + cacheStream.pipe(body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + response = new Response(body, { + url: this.entry.metadata.url, + counter: options.counter, + status: 200, + headers, + }) + } + + response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) + response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + response.headers.set('x-local-cache-mode', 'stream') + response.headers.set('x-local-cache-status', status) + response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) + return response + } + + // use the provided request along with this cache entry to + // revalidate the stored response. returns a response, either + // from the cache or from the update + async revalidate (request, options) { + const revalidateRequest = new Request(request, { + headers: this.policy.revalidationHeaders(request), + }) + + try { + // NOTE: be sure to remove the headers property from the + // user supplied options, since we have already defined + // them on the new request object. if they're still in the + // options then those will overwrite the ones from the policy + var response = await remote(revalidateRequest, { + ...options, + headers: undefined, + }) + } catch (err) { + // if the network fetch fails, return the stale + // cached response unless it has a cache-control + // of 'must-revalidate' + if (!this.policy.mustRevalidate) { + return this.respond(request.method, options, 'stale') + } + + throw err + } + + if (this.policy.revalidated(revalidateRequest, response)) { + // we got a 304, write a new index to the cache and respond from cache + const metadata = getMetadata(request, response, options) + // 304 responses do not include headers that are specific to the response data + // since they do not include a body, so we copy values for headers that were + // in the old cache entry to the new one, if the new metadata does not already + // include that header + for (const name of KEEP_RESPONSE_HEADERS) { + if ( + !hasOwnProperty(metadata.resHeaders, name) && + hasOwnProperty(this.entry.metadata.resHeaders, name) + ) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + } + + for (const name of options.cacheAdditionalHeaders) { + const inMeta = hasOwnProperty(metadata.resHeaders, name) + const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name) + const inPolicy = hasOwnProperty(this.policy.response.headers, name) + + // if the header is in the existing entry, but it is not in the metadata + // then we need to write it to the metadata as this will refresh the on-disk cache + if (!inMeta && inEntry) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + // if the header is in the metadata, but not in the policy, then we need to set + // it in the policy so that it's included in the immediate response. future + // responses will load a new cache entry, so we don't need to change that + if (!inPolicy && inMeta) { + this.policy.response.headers[name] = metadata.resHeaders[name] + } + } + + try { + await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { + size: this.entry.size, + metadata, + }) + } catch (err) { + // if updating the cache index fails, we ignore it and + // respond anyway + } + return this.respond(request.method, options, 'revalidated') + } + + // if we got a modified response, create a new entry based on it + const newEntry = new CacheEntry({ + request, + response, + options, + }) + + // respond with the new entry while writing it to the cache + return newEntry.store('updated') + } +} + +module.exports = CacheEntry diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js new file mode 100644 index 0000000000000..67a66573bebe6 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js @@ -0,0 +1,11 @@ +class NotCachedError extends Error { + constructor (url) { + /* eslint-disable-next-line max-len */ + super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) + this.code = 'ENOTCACHED' + } +} + +module.exports = { + NotCachedError, +} diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js new file mode 100644 index 0000000000000..0de49d23fb933 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js @@ -0,0 +1,49 @@ +const { NotCachedError } = require('./errors.js') +const CacheEntry = require('./entry.js') +const remote = require('../remote.js') + +// do whatever is necessary to get a Response and return it +const cacheFetch = async (request, options) => { + // try to find a cached entry that satisfies this request + const entry = await CacheEntry.find(request, options) + if (!entry) { + // no cached result, if the cache mode is 'only-if-cached' that's a failure + if (options.cache === 'only-if-cached') { + throw new NotCachedError(request.url) + } + + // otherwise, we make a request, store it and return it + const response = await remote(request, options) + const newEntry = new CacheEntry({ request, response, options }) + return newEntry.store('miss') + } + + // we have a cached response that satisfies this request, however if the cache + // mode is 'no-cache' then we send the revalidation request no matter what + if (options.cache === 'no-cache') { + return entry.revalidate(request, options) + } + + // if the cached entry is not stale, or if the cache mode is 'force-cache' or + // 'only-if-cached' we can respond with the cached entry. set the status + // based on the result of needsRevalidation and respond + const _needsRevalidation = entry.policy.needsRevalidation(request) + if (options.cache === 'force-cache' || + options.cache === 'only-if-cached' || + !_needsRevalidation) { + return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') + } + + // if we got here, the cache entry is stale so revalidate it + return entry.revalidate(request, options) +} + +cacheFetch.invalidate = async (request, options) => { + if (!options.cachePath) { + return + } + + return CacheEntry.invalidate(request, options) +} + +module.exports = cacheFetch diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js new file mode 100644 index 0000000000000..f7684d562b7fa --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js @@ -0,0 +1,17 @@ +const { URL, format } = require('url') + +// options passed to url.format() when generating a key +const formatOptions = { + auth: false, + fragment: false, + search: true, + unicode: false, +} + +// returns a string to be used as the cache key for the Request +const cacheKey = (request) => { + const parsed = new URL(request.url) + return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` +} + +module.exports = cacheKey diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js new file mode 100644 index 0000000000000..ada3c8600dae9 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js @@ -0,0 +1,161 @@ +const CacheSemantics = require('http-cache-semantics') +const Negotiator = require('negotiator') +const ssri = require('ssri') + +// options passed to http-cache-semantics constructor +const policyOptions = { + shared: false, + ignoreCargoCult: true, +} + +// a fake empty response, used when only testing the +// request for storability +const emptyResponse = { status: 200, headers: {} } + +// returns a plain object representation of the Request +const requestObject = (request) => { + const _obj = { + method: request.method, + url: request.url, + headers: {}, + compress: request.compress, + } + + request.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +// returns a plain object representation of the Response +const responseObject = (response) => { + const _obj = { + status: response.status, + headers: {}, + } + + response.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +class CachePolicy { + constructor ({ entry, request, response, options }) { + this.entry = entry + this.request = requestObject(request) + this.response = responseObject(response) + this.options = options + this.policy = new CacheSemantics(this.request, this.response, policyOptions) + + if (this.entry) { + // if we have an entry, copy the timestamp to the _responseTime + // this is necessary because the CacheSemantics constructor forces + // the value to Date.now() which means a policy created from a + // cache entry is likely to always identify itself as stale + this.policy._responseTime = this.entry.metadata.time + } + } + + // static method to quickly determine if a request alone is storable + static storable (request, options) { + // no cachePath means no caching + if (!options.cachePath) { + return false + } + + // user explicitly asked not to cache + if (options.cache === 'no-store') { + return false + } + + // we only cache GET and HEAD requests + if (!['GET', 'HEAD'].includes(request.method)) { + return false + } + + // otherwise, let http-cache-semantics make the decision + // based on the request's headers + const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) + return policy.storable() + } + + // returns true if the policy satisfies the request + satisfies (request) { + const _req = requestObject(request) + if (this.request.headers.host !== _req.headers.host) { + return false + } + + if (this.request.compress !== _req.compress) { + return false + } + + const negotiatorA = new Negotiator(this.request) + const negotiatorB = new Negotiator(_req) + + if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { + return false + } + + if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { + return false + } + + if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { + return false + } + + if (this.options.integrity) { + return ssri.parse(this.options.integrity).match(this.entry.integrity) + } + + return true + } + + // returns true if the request and response allow caching + storable () { + return this.policy.storable() + } + + // NOTE: this is a hack to avoid parsing the cache-control + // header ourselves, it returns true if the response's + // cache-control contains must-revalidate + get mustRevalidate () { + return !!this.policy._rescc['must-revalidate'] + } + + // returns true if the cached response requires revalidation + // for the given request + needsRevalidation (request) { + const _req = requestObject(request) + // force method to GET because we only cache GETs + // but can serve a HEAD from a cached GET + _req.method = 'GET' + return !this.policy.satisfiesWithoutRevalidation(_req) + } + + responseHeaders () { + return this.policy.responseHeaders() + } + + // returns a new object containing the appropriate headers + // to send a revalidation request + revalidationHeaders (request) { + const _req = requestObject(request) + return this.policy.revalidationHeaders(_req) + } + + // returns true if the request/response was revalidated + // successfully. returns false if a new response was received + revalidated (request, response) { + const _req = requestObject(request) + const _res = responseObject(response) + const policy = this.policy.revalidatedPolicy(_req, _res) + return !policy.modified + } +} + +module.exports = CachePolicy diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/dns.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/dns.js new file mode 100644 index 0000000000000..13102b57c4aa0 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/dns.js @@ -0,0 +1,49 @@ +const LRUCache = require('lru-cache') +const dns = require('dns') + +const defaultOptions = exports.defaultOptions = { + family: undefined, + hints: dns.ADDRCONFIG, + all: false, + verbatim: undefined, +} + +const lookupCache = exports.lookupCache = new LRUCache({ max: 50 }) + +// this is a factory so that each request can have its own opts (i.e. ttl) +// while still sharing the cache across all requests +exports.getLookup = (dnsOptions) => { + return (hostname, options, callback) => { + if (typeof options === 'function') { + callback = options + options = null + } else if (typeof options === 'number') { + options = { family: options } + } + + options = { ...defaultOptions, ...options } + + const key = JSON.stringify({ + hostname, + family: options.family, + hints: options.hints, + all: options.all, + verbatim: options.verbatim, + }) + + if (lookupCache.has(key)) { + const [address, family] = lookupCache.get(key) + process.nextTick(callback, null, address, family) + return + } + + dnsOptions.lookup(hostname, options, (err, address, family) => { + if (err) { + return callback(err) + } + + lookupCache.set(key, [address, family], { ttl: dnsOptions.ttl }) + return callback(null, address, family) + }) + } +} diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js new file mode 100644 index 0000000000000..233ba67e16550 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js @@ -0,0 +1,118 @@ +'use strict' + +const { FetchError, Request, isRedirect } = require('minipass-fetch') +const url = require('url') + +const CachePolicy = require('./cache/policy.js') +const cache = require('./cache/index.js') +const remote = require('./remote.js') + +// given a Request, a Response and user options +// return true if the response is a redirect that +// can be followed. we throw errors that will result +// in the fetch being rejected if the redirect is +// possible but invalid for some reason +const canFollowRedirect = (request, response, options) => { + if (!isRedirect(response.status)) { + return false + } + + if (options.redirect === 'manual') { + return false + } + + if (options.redirect === 'error') { + throw new FetchError(`redirect mode is set to error: ${request.url}`, + 'no-redirect', { code: 'ENOREDIRECT' }) + } + + if (!response.headers.has('location')) { + throw new FetchError(`redirect location header missing for: ${request.url}`, + 'no-location', { code: 'EINVALIDREDIRECT' }) + } + + if (request.counter >= request.follow) { + throw new FetchError(`maximum redirect reached at: ${request.url}`, + 'max-redirect', { code: 'EMAXREDIRECT' }) + } + + return true +} + +// given a Request, a Response, and the user's options return an object +// with a new Request and a new options object that will be used for +// following the redirect +const getRedirect = (request, response, options) => { + const _opts = { ...options } + const location = response.headers.get('location') + const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) + // Comment below is used under the following license: + /** + * @license + * Copyright (c) 2010-2012 Mikeal Rogers + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an "AS + * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + + // Remove authorization if changing hostnames (but not if just + // changing ports or protocols). This matches the behavior of request: + // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 + if (new url.URL(request.url).hostname !== redirectUrl.hostname) { + request.headers.delete('authorization') + request.headers.delete('cookie') + } + + // for POST request with 301/302 response, or any request with 303 response, + // use GET when following redirect + if ( + response.status === 303 || + (request.method === 'POST' && [301, 302].includes(response.status)) + ) { + _opts.method = 'GET' + _opts.body = null + request.headers.delete('content-length') + } + + _opts.headers = {} + request.headers.forEach((value, key) => { + _opts.headers[key] = value + }) + + _opts.counter = ++request.counter + const redirectReq = new Request(url.format(redirectUrl), _opts) + return { + request: redirectReq, + options: _opts, + } +} + +const fetch = async (request, options) => { + const response = CachePolicy.storable(request, options) + ? await cache(request, options) + : await remote(request, options) + + // if the request wasn't a GET or HEAD, and the response + // status is between 200 and 399 inclusive, invalidate the + // request url + if (!['GET', 'HEAD'].includes(request.method) && + response.status >= 200 && + response.status <= 399) { + await cache.invalidate(request, options) + } + + if (!canFollowRedirect(request, response, options)) { + return response + } + + const redirect = getRedirect(request, response, options) + return fetch(redirect.request, redirect.options) +} + +module.exports = fetch diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js new file mode 100644 index 0000000000000..2f12e8e1b6113 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js @@ -0,0 +1,41 @@ +const { FetchError, Headers, Request, Response } = require('minipass-fetch') + +const configureOptions = require('./options.js') +const fetch = require('./fetch.js') + +const makeFetchHappen = (url, opts) => { + const options = configureOptions(opts) + + const request = new Request(url, options) + return fetch(request, options) +} + +makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => { + if (typeof defaultUrl === 'object') { + defaultOptions = defaultUrl + defaultUrl = null + } + + const defaultedFetch = (url, options = {}) => { + const finalUrl = url || defaultUrl + const finalOptions = { + ...defaultOptions, + ...options, + headers: { + ...defaultOptions.headers, + ...options.headers, + }, + } + return wrappedFetch(finalUrl, finalOptions) + } + + defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) => + makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch) + return defaultedFetch +} + +module.exports = makeFetchHappen +module.exports.FetchError = FetchError +module.exports.Headers = Headers +module.exports.Request = Request +module.exports.Response = Response diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js new file mode 100644 index 0000000000000..f77511279f831 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js @@ -0,0 +1,54 @@ +const dns = require('dns') + +const conditionalHeaders = [ + 'if-modified-since', + 'if-none-match', + 'if-unmodified-since', + 'if-match', + 'if-range', +] + +const configureOptions = (opts) => { + const { strictSSL, ...options } = { ...opts } + options.method = options.method ? options.method.toUpperCase() : 'GET' + options.rejectUnauthorized = strictSSL !== false + + if (!options.retry) { + options.retry = { retries: 0 } + } else if (typeof options.retry === 'string') { + const retries = parseInt(options.retry, 10) + if (isFinite(retries)) { + options.retry = { retries } + } else { + options.retry = { retries: 0 } + } + } else if (typeof options.retry === 'number') { + options.retry = { retries: options.retry } + } else { + options.retry = { retries: 0, ...options.retry } + } + + options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns } + + options.cache = options.cache || 'default' + if (options.cache === 'default') { + const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { + return conditionalHeaders.includes(name.toLowerCase()) + }) + if (hasConditionalHeader) { + options.cache = 'no-store' + } + } + + options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || [] + + // cacheManager is deprecated, but if it's set and + // cachePath is not we should copy it to the new field + if (options.cacheManager && !options.cachePath) { + options.cachePath = options.cacheManager + } + + return options +} + +module.exports = configureOptions diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js new file mode 100644 index 0000000000000..b1d221b2d0ce3 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js @@ -0,0 +1,41 @@ +'use strict' + +const MinipassPipeline = require('minipass-pipeline') + +class CachingMinipassPipeline extends MinipassPipeline { + #events = [] + #data = new Map() + + constructor (opts, ...streams) { + // CRITICAL: do NOT pass the streams to the call to super(), this will start + // the flow of data and potentially cause the events we need to catch to emit + // before we've finished our own setup. instead we call super() with no args, + // finish our setup, and then push the streams into ourselves to start the + // data flow + super() + this.#events = opts.events + + /* istanbul ignore next - coverage disabled because this is pointless to test here */ + if (streams.length) { + this.push(...streams) + } + } + + on (event, handler) { + if (this.#events.includes(event) && this.#data.has(event)) { + return handler(...this.#data.get(event)) + } + + return super.on(event, handler) + } + + emit (event, ...data) { + if (this.#events.includes(event)) { + this.#data.set(event, data) + } + + return super.emit(event, ...data) + } +} + +module.exports = CachingMinipassPipeline diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js new file mode 100644 index 0000000000000..bdbcc79cad908 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js @@ -0,0 +1,121 @@ +const { Minipass } = require('minipass') +const fetch = require('minipass-fetch') +const promiseRetry = require('promise-retry') +const ssri = require('ssri') + +const CachingMinipassPipeline = require('./pipeline.js') +const getAgent = require('./agent.js') +const pkg = require('../package.json') + +const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` + +const RETRY_ERRORS = [ + 'ECONNRESET', // remote socket closed on us + 'ECONNREFUSED', // remote host refused to open connection + 'EADDRINUSE', // failed to bind to a local port (proxy?) + 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW + 'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive + // Known codes we do NOT retry on: + // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) +] + +const RETRY_TYPES = [ + 'request-timeout', +] + +// make a request directly to the remote source, +// retrying certain classes of errors as well as +// following redirects (through the cache if necessary) +// and verifying response integrity +const remoteFetch = (request, options) => { + const agent = getAgent(request.url, options) + if (!request.headers.has('connection')) { + request.headers.set('connection', agent ? 'keep-alive' : 'close') + } + + if (!request.headers.has('user-agent')) { + request.headers.set('user-agent', USER_AGENT) + } + + // keep our own options since we're overriding the agent + // and the redirect mode + const _opts = { + ...options, + agent, + redirect: 'manual', + } + + return promiseRetry(async (retryHandler, attemptNum) => { + const req = new fetch.Request(request, _opts) + try { + let res = await fetch(req, _opts) + if (_opts.integrity && res.status === 200) { + // we got a 200 response and the user has specified an expected + // integrity value, so wrap the response in an ssri stream to verify it + const integrityStream = ssri.integrityStream({ + algorithms: _opts.algorithms, + integrity: _opts.integrity, + size: _opts.size, + }) + const pipeline = new CachingMinipassPipeline({ + events: ['integrity', 'size'], + }, res.body, integrityStream) + // we also propagate the integrity and size events out to the pipeline so we can use + // this new response body as an integrityEmitter for cacache + integrityStream.on('integrity', i => pipeline.emit('integrity', i)) + integrityStream.on('size', s => pipeline.emit('size', s)) + res = new fetch.Response(pipeline, res) + // set an explicit flag so we know if our response body will emit integrity and size + res.body.hasIntegrityEmitter = true + } + + res.headers.set('x-fetch-attempts', attemptNum) + + // do not retry POST requests, or requests with a streaming body + // do retry requests with a 408, 420, 429 or 500+ status in the response + const isStream = Minipass.isStream(req.body) + const isRetriable = req.method !== 'POST' && + !isStream && + ([408, 420, 429].includes(res.status) || res.status >= 500) + + if (isRetriable) { + if (typeof options.onRetry === 'function') { + options.onRetry(res) + } + + return retryHandler(res) + } + + return res + } catch (err) { + const code = (err.code === 'EPROMISERETRY') + ? err.retried.code + : err.code + + // err.retried will be the thing that was thrown from above + // if it's a response, we just got a bad status code and we + // can re-throw to allow the retry + const isRetryError = err.retried instanceof fetch.Response || + (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) + + if (req.method === 'POST' || isRetryError) { + throw err + } + + if (typeof options.onRetry === 'function') { + options.onRetry(err) + } + + return retryHandler(err) + } + }, options.retry).catch((err) => { + // don't reject for http errors, just return them + if (err.status >= 400 && err.type !== 'system') { + return err + } + + throw err + }) +} + +module.exports = remoteFetch diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/package.json b/node_modules/node-gyp/node_modules/make-fetch-happen/package.json new file mode 100644 index 0000000000000..fd415dc9966fa --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/package.json @@ -0,0 +1,78 @@ +{ + "name": "make-fetch-happen", + "version": "11.1.1", + "description": "Opinionated, caching, retrying fetch client", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "posttest": "npm run lint", + "eslint": "eslint", + "lint": "eslint \"**/*.js\"", + "lintfix": "npm run lint -- --fix", + "postlint": "template-oss-check", + "snap": "tap", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/make-fetch-happen.git" + }, + "keywords": [ + "http", + "request", + "fetch", + "mean girls", + "caching", + "cache", + "subresource integrity" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "agentkeepalive": "^4.2.1", + "cacache": "^17.0.0", + "http-cache-semantics": "^4.1.1", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^7.7.1", + "minipass": "^5.0.0", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^7.0.0", + "ssri": "^10.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.14.1", + "nock": "^13.2.4", + "safe-buffer": "^5.2.1", + "standard-version": "^9.3.2", + "tap": "^16.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "tap": { + "color": 1, + "files": "test/*.js", + "check-coverage": true, + "timeout": 60, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.14.1", + "publish": "true" + } +} diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/LICENSE b/node_modules/sigstore/node_modules/make-fetch-happen/LICENSE new file mode 100644 index 0000000000000..1808eb2844231 --- /dev/null +++ b/node_modules/sigstore/node_modules/make-fetch-happen/LICENSE @@ -0,0 +1,16 @@ +ISC License + +Copyright 2017-2022 (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/agent.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/agent.js new file mode 100644 index 0000000000000..dd68492ed7ea7 --- /dev/null +++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/agent.js @@ -0,0 +1,214 @@ +'use strict' +const LRU = require('lru-cache') +const url = require('url') +const isLambda = require('is-lambda') +const dns = require('./dns.js') + +const AGENT_CACHE = new LRU({ max: 50 }) +const HttpAgent = require('agentkeepalive') +const HttpsAgent = HttpAgent.HttpsAgent + +module.exports = getAgent + +const getAgentTimeout = timeout => + typeof timeout !== 'number' || !timeout ? 0 : timeout + 1 + +const getMaxSockets = maxSockets => maxSockets || 15 + +function getAgent (uri, opts) { + const parsedUri = new url.URL(typeof uri === 'string' ? uri : uri.url) + const isHttps = parsedUri.protocol === 'https:' + const pxuri = getProxyUri(parsedUri.href, opts) + + // If opts.timeout is zero, set the agentTimeout to zero as well. A timeout + // of zero disables the timeout behavior (OS limits still apply). Else, if + // opts.timeout is a non-zero value, set it to timeout + 1, to ensure that + // the node-fetch-npm timeout will always fire first, giving us more + // consistent errors. + const agentTimeout = getAgentTimeout(opts.timeout) + const agentMaxSockets = getMaxSockets(opts.maxSockets) + + const key = [ + `https:${isHttps}`, + pxuri + ? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}` + : '>no-proxy<', + `local-address:${opts.localAddress || '>no-local-address<'}`, + `strict-ssl:${isHttps ? opts.rejectUnauthorized : '>no-strict-ssl<'}`, + `ca:${(isHttps && opts.ca) || '>no-ca<'}`, + `cert:${(isHttps && opts.cert) || '>no-cert<'}`, + `key:${(isHttps && opts.key) || '>no-key<'}`, + `timeout:${agentTimeout}`, + `maxSockets:${agentMaxSockets}`, + ].join(':') + + if (opts.agent != null) { // `agent: false` has special behavior! + return opts.agent + } + + // keep alive in AWS lambda makes no sense + const lambdaAgent = !isLambda ? null + : isHttps ? require('https').globalAgent + : require('http').globalAgent + + if (isLambda && !pxuri) { + return lambdaAgent + } + + if (AGENT_CACHE.peek(key)) { + return AGENT_CACHE.get(key) + } + + if (pxuri) { + const pxopts = isLambda ? { + ...opts, + agent: lambdaAgent, + } : opts + const proxy = getProxy(pxuri, pxopts, isHttps) + AGENT_CACHE.set(key, proxy) + return proxy + } + + const agent = isHttps ? new HttpsAgent({ + maxSockets: agentMaxSockets, + ca: opts.ca, + cert: opts.cert, + key: opts.key, + localAddress: opts.localAddress, + rejectUnauthorized: opts.rejectUnauthorized, + timeout: agentTimeout, + freeSocketTimeout: 15000, + lookup: dns.getLookup(opts.dns), + }) : new HttpAgent({ + maxSockets: agentMaxSockets, + localAddress: opts.localAddress, + timeout: agentTimeout, + freeSocketTimeout: 15000, + lookup: dns.getLookup(opts.dns), + }) + AGENT_CACHE.set(key, agent) + return agent +} + +function checkNoProxy (uri, opts) { + const host = new url.URL(uri).hostname.split('.').reverse() + let noproxy = (opts.noProxy || getProcessEnv('no_proxy')) + if (typeof noproxy === 'string') { + noproxy = noproxy.split(',').map(n => n.trim()) + } + + return noproxy && noproxy.some(no => { + const noParts = no.split('.').filter(x => x).reverse() + if (!noParts.length) { + return false + } + for (let i = 0; i < noParts.length; i++) { + if (host[i] !== noParts[i]) { + return false + } + } + return true + }) +} + +module.exports.getProcessEnv = getProcessEnv + +function getProcessEnv (env) { + if (!env) { + return + } + + let value + + if (Array.isArray(env)) { + for (const e of env) { + value = process.env[e] || + process.env[e.toUpperCase()] || + process.env[e.toLowerCase()] + if (typeof value !== 'undefined') { + break + } + } + } + + if (typeof env === 'string') { + value = process.env[env] || + process.env[env.toUpperCase()] || + process.env[env.toLowerCase()] + } + + return value +} + +module.exports.getProxyUri = getProxyUri +function getProxyUri (uri, opts) { + const protocol = new url.URL(uri).protocol + + const proxy = opts.proxy || + ( + protocol === 'https:' && + getProcessEnv('https_proxy') + ) || + ( + protocol === 'http:' && + getProcessEnv(['https_proxy', 'http_proxy', 'proxy']) + ) + if (!proxy) { + return null + } + + const parsedProxy = (typeof proxy === 'string') ? new url.URL(proxy) : proxy + + return !checkNoProxy(uri, opts) && parsedProxy +} + +const getAuth = u => + u.username && u.password ? decodeURIComponent(`${u.username}:${u.password}`) + : u.username ? decodeURIComponent(u.username) + : null + +const getPath = u => u.pathname + u.search + u.hash + +const HttpProxyAgent = require('http-proxy-agent') +const HttpsProxyAgent = require('https-proxy-agent') +const { SocksProxyAgent } = require('socks-proxy-agent') +module.exports.getProxy = getProxy +function getProxy (proxyUrl, opts, isHttps) { + // our current proxy agents do not support an overridden dns lookup method, so will not + // benefit from the dns cache + const popts = { + host: proxyUrl.hostname, + port: proxyUrl.port, + protocol: proxyUrl.protocol, + path: getPath(proxyUrl), + auth: getAuth(proxyUrl), + ca: opts.ca, + cert: opts.cert, + key: opts.key, + timeout: getAgentTimeout(opts.timeout), + localAddress: opts.localAddress, + maxSockets: getMaxSockets(opts.maxSockets), + rejectUnauthorized: opts.rejectUnauthorized, + } + + if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') { + if (!isHttps) { + return new HttpProxyAgent(popts) + } else { + return new HttpsProxyAgent(popts) + } + } else if (proxyUrl.protocol.startsWith('socks')) { + // socks-proxy-agent uses hostname not host + popts.hostname = popts.host + delete popts.host + return new SocksProxyAgent(popts) + } else { + throw Object.assign( + new Error(`unsupported proxy protocol: '${proxyUrl.protocol}'`), + { + code: 'EUNSUPPORTEDPROXY', + url: proxyUrl.href, + } + ) + } +} diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/entry.js new file mode 100644 index 0000000000000..45141095074ec --- /dev/null +++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/entry.js @@ -0,0 +1,469 @@ +const { Request, Response } = require('minipass-fetch') +const { Minipass } = require('minipass') +const MinipassFlush = require('minipass-flush') +const cacache = require('cacache') +const url = require('url') + +const CachingMinipassPipeline = require('../pipeline.js') +const CachePolicy = require('./policy.js') +const cacheKey = require('./key.js') +const remote = require('../remote.js') + +const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) + +// allow list for request headers that will be written to the cache index +// note: we will also store any request headers +// that are named in a response's vary header +const KEEP_REQUEST_HEADERS = [ + 'accept-charset', + 'accept-encoding', + 'accept-language', + 'accept', + 'cache-control', +] + +// allow list for response headers that will be written to the cache index +// note: we must not store the real response's age header, or when we load +// a cache policy based on the metadata it will think the cached response +// is always stale +const KEEP_RESPONSE_HEADERS = [ + 'cache-control', + 'content-encoding', + 'content-language', + 'content-type', + 'date', + 'etag', + 'expires', + 'last-modified', + 'link', + 'location', + 'pragma', + 'vary', +] + +// return an object containing all metadata to be written to the index +const getMetadata = (request, response, options) => { + const metadata = { + time: Date.now(), + url: request.url, + reqHeaders: {}, + resHeaders: {}, + + // options on which we must match the request and vary the response + options: { + compress: options.compress != null ? options.compress : request.compress, + }, + } + + // only save the status if it's not a 200 or 304 + if (response.status !== 200 && response.status !== 304) { + metadata.status = response.status + } + + for (const name of KEEP_REQUEST_HEADERS) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + + // if the request's host header differs from the host in the url + // we need to keep it, otherwise it's just noise and we ignore it + const host = request.headers.get('host') + const parsedUrl = new url.URL(request.url) + if (host && parsedUrl.host !== host) { + metadata.reqHeaders.host = host + } + + // if the response has a vary header, make sure + // we store the relevant request headers too + if (response.headers.has('vary')) { + const vary = response.headers.get('vary') + // a vary of "*" means every header causes a different response. + // in that scenario, we do not include any additional headers + // as the freshness check will always fail anyway and we don't + // want to bloat the cache indexes + if (vary !== '*') { + // copy any other request headers that will vary the response + const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) + for (const name of varyHeaders) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + } + } + + for (const name of KEEP_RESPONSE_HEADERS) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + for (const name of options.cacheAdditionalHeaders) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + return metadata +} + +// symbols used to hide objects that may be lazily evaluated in a getter +const _request = Symbol('request') +const _response = Symbol('response') +const _policy = Symbol('policy') + +class CacheEntry { + constructor ({ entry, request, response, options }) { + if (entry) { + this.key = entry.key + this.entry = entry + // previous versions of this module didn't write an explicit timestamp in + // the metadata, so fall back to the entry's timestamp. we can't use the + // entry timestamp to determine staleness because cacache will update it + // when it verifies its data + this.entry.metadata.time = this.entry.metadata.time || this.entry.time + } else { + this.key = cacheKey(request) + } + + this.options = options + + // these properties are behind getters that lazily evaluate + this[_request] = request + this[_response] = response + this[_policy] = null + } + + // returns a CacheEntry instance that satisfies the given request + // or undefined if no existing entry satisfies + static async find (request, options) { + try { + // compacts the index and returns an array of unique entries + var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { + const entryA = new CacheEntry({ entry: A, options }) + const entryB = new CacheEntry({ entry: B, options }) + return entryA.policy.satisfies(entryB.request) + }, { + validateEntry: (entry) => { + // clean out entries with a buggy content-encoding value + if (entry.metadata && + entry.metadata.resHeaders && + entry.metadata.resHeaders['content-encoding'] === null) { + return false + } + + // if an integrity is null, it needs to have a status specified + if (entry.integrity === null) { + return !!(entry.metadata && entry.metadata.status) + } + + return true + }, + }) + } catch (err) { + // if the compact request fails, ignore the error and return + return + } + + // a cache mode of 'reload' means to behave as though we have no cache + // on the way to the network. return undefined to allow cacheFetch to + // create a brand new request no matter what. + if (options.cache === 'reload') { + return + } + + // find the specific entry that satisfies the request + let match + for (const entry of matches) { + const _entry = new CacheEntry({ + entry, + options, + }) + + if (_entry.policy.satisfies(request)) { + match = _entry + break + } + } + + return match + } + + // if the user made a PUT/POST/PATCH then we invalidate our + // cache for the same url by deleting the index entirely + static async invalidate (request, options) { + const key = cacheKey(request) + try { + await cacache.rm.entry(options.cachePath, key, { removeFully: true }) + } catch (err) { + // ignore errors + } + } + + get request () { + if (!this[_request]) { + this[_request] = new Request(this.entry.metadata.url, { + method: 'GET', + headers: this.entry.metadata.reqHeaders, + ...this.entry.metadata.options, + }) + } + + return this[_request] + } + + get response () { + if (!this[_response]) { + this[_response] = new Response(null, { + url: this.entry.metadata.url, + counter: this.options.counter, + status: this.entry.metadata.status || 200, + headers: { + ...this.entry.metadata.resHeaders, + 'content-length': this.entry.size, + }, + }) + } + + return this[_response] + } + + get policy () { + if (!this[_policy]) { + this[_policy] = new CachePolicy({ + entry: this.entry, + request: this.request, + response: this.response, + options: this.options, + }) + } + + return this[_policy] + } + + // wraps the response in a pipeline that stores the data + // in the cache while the user consumes it + async store (status) { + // if we got a status other than 200, 301, or 308, + // or the CachePolicy forbid storage, append the + // cache status header and return it untouched + if ( + this.request.method !== 'GET' || + ![200, 301, 308].includes(this.response.status) || + !this.policy.storable() + ) { + this.response.headers.set('x-local-cache-status', 'skip') + return this.response + } + + const size = this.response.headers.get('content-length') + const cacheOpts = { + algorithms: this.options.algorithms, + metadata: getMetadata(this.request, this.response, this.options), + size, + integrity: this.options.integrity, + integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body, + } + + let body = null + // we only set a body if the status is a 200, redirects are + // stored as metadata only + if (this.response.status === 200) { + let cacheWriteResolve, cacheWriteReject + const cacheWritePromise = new Promise((resolve, reject) => { + cacheWriteResolve = resolve + cacheWriteReject = reject + }) + + body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({ + flush () { + return cacheWritePromise + }, + })) + // this is always true since if we aren't reusing the one from the remote fetch, we + // are using the one from cacache + body.hasIntegrityEmitter = true + + const onResume = () => { + const tee = new Minipass() + const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) + // re-emit the integrity and size events on our new response body so they can be reused + cacheStream.on('integrity', i => body.emit('integrity', i)) + cacheStream.on('size', s => body.emit('size', s)) + // stick a flag on here so downstream users will know if they can expect integrity events + tee.pipe(cacheStream) + // TODO if the cache write fails, log a warning but return the response anyway + // eslint-disable-next-line promise/catch-or-return + cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) + body.unshift(tee) + body.unshift(this.response.body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + } else { + await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) + } + + // note: we do not set the x-local-cache-hash header because we do not know + // the hash value until after the write to the cache completes, which doesn't + // happen until after the response has been sent and it's too late to write + // the header anyway + this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + this.response.headers.set('x-local-cache-mode', 'stream') + this.response.headers.set('x-local-cache-status', status) + this.response.headers.set('x-local-cache-time', new Date().toISOString()) + const newResponse = new Response(body, { + url: this.response.url, + status: this.response.status, + headers: this.response.headers, + counter: this.options.counter, + }) + return newResponse + } + + // use the cached data to create a response and return it + async respond (method, options, status) { + let response + if (method === 'HEAD' || [301, 308].includes(this.response.status)) { + // if the request is a HEAD, or the response is a redirect, + // then the metadata in the entry already includes everything + // we need to build a response + response = this.response + } else { + // we're responding with a full cached response, so create a body + // that reads from cacache and attach it to a new Response + const body = new Minipass() + const headers = { ...this.policy.responseHeaders() } + + const onResume = () => { + const cacheStream = cacache.get.stream.byDigest( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + cacheStream.on('error', async (err) => { + cacheStream.pause() + if (err.code === 'EINTEGRITY') { + await cacache.rm.content( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + } + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { + await CacheEntry.invalidate(this.request, this.options) + } + body.emit('error', err) + cacheStream.resume() + }) + // emit the integrity and size events based on our metadata so we're consistent + body.emit('integrity', this.entry.integrity) + body.emit('size', Number(headers['content-length'])) + cacheStream.pipe(body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + response = new Response(body, { + url: this.entry.metadata.url, + counter: options.counter, + status: 200, + headers, + }) + } + + response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) + response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + response.headers.set('x-local-cache-mode', 'stream') + response.headers.set('x-local-cache-status', status) + response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) + return response + } + + // use the provided request along with this cache entry to + // revalidate the stored response. returns a response, either + // from the cache or from the update + async revalidate (request, options) { + const revalidateRequest = new Request(request, { + headers: this.policy.revalidationHeaders(request), + }) + + try { + // NOTE: be sure to remove the headers property from the + // user supplied options, since we have already defined + // them on the new request object. if they're still in the + // options then those will overwrite the ones from the policy + var response = await remote(revalidateRequest, { + ...options, + headers: undefined, + }) + } catch (err) { + // if the network fetch fails, return the stale + // cached response unless it has a cache-control + // of 'must-revalidate' + if (!this.policy.mustRevalidate) { + return this.respond(request.method, options, 'stale') + } + + throw err + } + + if (this.policy.revalidated(revalidateRequest, response)) { + // we got a 304, write a new index to the cache and respond from cache + const metadata = getMetadata(request, response, options) + // 304 responses do not include headers that are specific to the response data + // since they do not include a body, so we copy values for headers that were + // in the old cache entry to the new one, if the new metadata does not already + // include that header + for (const name of KEEP_RESPONSE_HEADERS) { + if ( + !hasOwnProperty(metadata.resHeaders, name) && + hasOwnProperty(this.entry.metadata.resHeaders, name) + ) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + } + + for (const name of options.cacheAdditionalHeaders) { + const inMeta = hasOwnProperty(metadata.resHeaders, name) + const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name) + const inPolicy = hasOwnProperty(this.policy.response.headers, name) + + // if the header is in the existing entry, but it is not in the metadata + // then we need to write it to the metadata as this will refresh the on-disk cache + if (!inMeta && inEntry) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + // if the header is in the metadata, but not in the policy, then we need to set + // it in the policy so that it's included in the immediate response. future + // responses will load a new cache entry, so we don't need to change that + if (!inPolicy && inMeta) { + this.policy.response.headers[name] = metadata.resHeaders[name] + } + } + + try { + await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { + size: this.entry.size, + metadata, + }) + } catch (err) { + // if updating the cache index fails, we ignore it and + // respond anyway + } + return this.respond(request.method, options, 'revalidated') + } + + // if we got a modified response, create a new entry based on it + const newEntry = new CacheEntry({ + request, + response, + options, + }) + + // respond with the new entry while writing it to the cache + return newEntry.store('updated') + } +} + +module.exports = CacheEntry diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/errors.js new file mode 100644 index 0000000000000..67a66573bebe6 --- /dev/null +++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/errors.js @@ -0,0 +1,11 @@ +class NotCachedError extends Error { + constructor (url) { + /* eslint-disable-next-line max-len */ + super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) + this.code = 'ENOTCACHED' + } +} + +module.exports = { + NotCachedError, +} diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/index.js new file mode 100644 index 0000000000000..0de49d23fb933 --- /dev/null +++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/index.js @@ -0,0 +1,49 @@ +const { NotCachedError } = require('./errors.js') +const CacheEntry = require('./entry.js') +const remote = require('../remote.js') + +// do whatever is necessary to get a Response and return it +const cacheFetch = async (request, options) => { + // try to find a cached entry that satisfies this request + const entry = await CacheEntry.find(request, options) + if (!entry) { + // no cached result, if the cache mode is 'only-if-cached' that's a failure + if (options.cache === 'only-if-cached') { + throw new NotCachedError(request.url) + } + + // otherwise, we make a request, store it and return it + const response = await remote(request, options) + const newEntry = new CacheEntry({ request, response, options }) + return newEntry.store('miss') + } + + // we have a cached response that satisfies this request, however if the cache + // mode is 'no-cache' then we send the revalidation request no matter what + if (options.cache === 'no-cache') { + return entry.revalidate(request, options) + } + + // if the cached entry is not stale, or if the cache mode is 'force-cache' or + // 'only-if-cached' we can respond with the cached entry. set the status + // based on the result of needsRevalidation and respond + const _needsRevalidation = entry.policy.needsRevalidation(request) + if (options.cache === 'force-cache' || + options.cache === 'only-if-cached' || + !_needsRevalidation) { + return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') + } + + // if we got here, the cache entry is stale so revalidate it + return entry.revalidate(request, options) +} + +cacheFetch.invalidate = async (request, options) => { + if (!options.cachePath) { + return + } + + return CacheEntry.invalidate(request, options) +} + +module.exports = cacheFetch diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/key.js new file mode 100644 index 0000000000000..f7684d562b7fa --- /dev/null +++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/key.js @@ -0,0 +1,17 @@ +const { URL, format } = require('url') + +// options passed to url.format() when generating a key +const formatOptions = { + auth: false, + fragment: false, + search: true, + unicode: false, +} + +// returns a string to be used as the cache key for the Request +const cacheKey = (request) => { + const parsed = new URL(request.url) + return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` +} + +module.exports = cacheKey diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/policy.js new file mode 100644 index 0000000000000..ada3c8600dae9 --- /dev/null +++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/policy.js @@ -0,0 +1,161 @@ +const CacheSemantics = require('http-cache-semantics') +const Negotiator = require('negotiator') +const ssri = require('ssri') + +// options passed to http-cache-semantics constructor +const policyOptions = { + shared: false, + ignoreCargoCult: true, +} + +// a fake empty response, used when only testing the +// request for storability +const emptyResponse = { status: 200, headers: {} } + +// returns a plain object representation of the Request +const requestObject = (request) => { + const _obj = { + method: request.method, + url: request.url, + headers: {}, + compress: request.compress, + } + + request.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +// returns a plain object representation of the Response +const responseObject = (response) => { + const _obj = { + status: response.status, + headers: {}, + } + + response.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +class CachePolicy { + constructor ({ entry, request, response, options }) { + this.entry = entry + this.request = requestObject(request) + this.response = responseObject(response) + this.options = options + this.policy = new CacheSemantics(this.request, this.response, policyOptions) + + if (this.entry) { + // if we have an entry, copy the timestamp to the _responseTime + // this is necessary because the CacheSemantics constructor forces + // the value to Date.now() which means a policy created from a + // cache entry is likely to always identify itself as stale + this.policy._responseTime = this.entry.metadata.time + } + } + + // static method to quickly determine if a request alone is storable + static storable (request, options) { + // no cachePath means no caching + if (!options.cachePath) { + return false + } + + // user explicitly asked not to cache + if (options.cache === 'no-store') { + return false + } + + // we only cache GET and HEAD requests + if (!['GET', 'HEAD'].includes(request.method)) { + return false + } + + // otherwise, let http-cache-semantics make the decision + // based on the request's headers + const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) + return policy.storable() + } + + // returns true if the policy satisfies the request + satisfies (request) { + const _req = requestObject(request) + if (this.request.headers.host !== _req.headers.host) { + return false + } + + if (this.request.compress !== _req.compress) { + return false + } + + const negotiatorA = new Negotiator(this.request) + const negotiatorB = new Negotiator(_req) + + if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { + return false + } + + if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { + return false + } + + if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { + return false + } + + if (this.options.integrity) { + return ssri.parse(this.options.integrity).match(this.entry.integrity) + } + + return true + } + + // returns true if the request and response allow caching + storable () { + return this.policy.storable() + } + + // NOTE: this is a hack to avoid parsing the cache-control + // header ourselves, it returns true if the response's + // cache-control contains must-revalidate + get mustRevalidate () { + return !!this.policy._rescc['must-revalidate'] + } + + // returns true if the cached response requires revalidation + // for the given request + needsRevalidation (request) { + const _req = requestObject(request) + // force method to GET because we only cache GETs + // but can serve a HEAD from a cached GET + _req.method = 'GET' + return !this.policy.satisfiesWithoutRevalidation(_req) + } + + responseHeaders () { + return this.policy.responseHeaders() + } + + // returns a new object containing the appropriate headers + // to send a revalidation request + revalidationHeaders (request) { + const _req = requestObject(request) + return this.policy.revalidationHeaders(_req) + } + + // returns true if the request/response was revalidated + // successfully. returns false if a new response was received + revalidated (request, response) { + const _req = requestObject(request) + const _res = responseObject(response) + const policy = this.policy.revalidatedPolicy(_req, _res) + return !policy.modified + } +} + +module.exports = CachePolicy diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/dns.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/dns.js new file mode 100644 index 0000000000000..13102b57c4aa0 --- /dev/null +++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/dns.js @@ -0,0 +1,49 @@ +const LRUCache = require('lru-cache') +const dns = require('dns') + +const defaultOptions = exports.defaultOptions = { + family: undefined, + hints: dns.ADDRCONFIG, + all: false, + verbatim: undefined, +} + +const lookupCache = exports.lookupCache = new LRUCache({ max: 50 }) + +// this is a factory so that each request can have its own opts (i.e. ttl) +// while still sharing the cache across all requests +exports.getLookup = (dnsOptions) => { + return (hostname, options, callback) => { + if (typeof options === 'function') { + callback = options + options = null + } else if (typeof options === 'number') { + options = { family: options } + } + + options = { ...defaultOptions, ...options } + + const key = JSON.stringify({ + hostname, + family: options.family, + hints: options.hints, + all: options.all, + verbatim: options.verbatim, + }) + + if (lookupCache.has(key)) { + const [address, family] = lookupCache.get(key) + process.nextTick(callback, null, address, family) + return + } + + dnsOptions.lookup(hostname, options, (err, address, family) => { + if (err) { + return callback(err) + } + + lookupCache.set(key, [address, family], { ttl: dnsOptions.ttl }) + return callback(null, address, family) + }) + } +} diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/fetch.js new file mode 100644 index 0000000000000..233ba67e16550 --- /dev/null +++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/fetch.js @@ -0,0 +1,118 @@ +'use strict' + +const { FetchError, Request, isRedirect } = require('minipass-fetch') +const url = require('url') + +const CachePolicy = require('./cache/policy.js') +const cache = require('./cache/index.js') +const remote = require('./remote.js') + +// given a Request, a Response and user options +// return true if the response is a redirect that +// can be followed. we throw errors that will result +// in the fetch being rejected if the redirect is +// possible but invalid for some reason +const canFollowRedirect = (request, response, options) => { + if (!isRedirect(response.status)) { + return false + } + + if (options.redirect === 'manual') { + return false + } + + if (options.redirect === 'error') { + throw new FetchError(`redirect mode is set to error: ${request.url}`, + 'no-redirect', { code: 'ENOREDIRECT' }) + } + + if (!response.headers.has('location')) { + throw new FetchError(`redirect location header missing for: ${request.url}`, + 'no-location', { code: 'EINVALIDREDIRECT' }) + } + + if (request.counter >= request.follow) { + throw new FetchError(`maximum redirect reached at: ${request.url}`, + 'max-redirect', { code: 'EMAXREDIRECT' }) + } + + return true +} + +// given a Request, a Response, and the user's options return an object +// with a new Request and a new options object that will be used for +// following the redirect +const getRedirect = (request, response, options) => { + const _opts = { ...options } + const location = response.headers.get('location') + const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) + // Comment below is used under the following license: + /** + * @license + * Copyright (c) 2010-2012 Mikeal Rogers + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an "AS + * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + + // Remove authorization if changing hostnames (but not if just + // changing ports or protocols). This matches the behavior of request: + // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 + if (new url.URL(request.url).hostname !== redirectUrl.hostname) { + request.headers.delete('authorization') + request.headers.delete('cookie') + } + + // for POST request with 301/302 response, or any request with 303 response, + // use GET when following redirect + if ( + response.status === 303 || + (request.method === 'POST' && [301, 302].includes(response.status)) + ) { + _opts.method = 'GET' + _opts.body = null + request.headers.delete('content-length') + } + + _opts.headers = {} + request.headers.forEach((value, key) => { + _opts.headers[key] = value + }) + + _opts.counter = ++request.counter + const redirectReq = new Request(url.format(redirectUrl), _opts) + return { + request: redirectReq, + options: _opts, + } +} + +const fetch = async (request, options) => { + const response = CachePolicy.storable(request, options) + ? await cache(request, options) + : await remote(request, options) + + // if the request wasn't a GET or HEAD, and the response + // status is between 200 and 399 inclusive, invalidate the + // request url + if (!['GET', 'HEAD'].includes(request.method) && + response.status >= 200 && + response.status <= 399) { + await cache.invalidate(request, options) + } + + if (!canFollowRedirect(request, response, options)) { + return response + } + + const redirect = getRedirect(request, response, options) + return fetch(redirect.request, redirect.options) +} + +module.exports = fetch diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/index.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/index.js new file mode 100644 index 0000000000000..2f12e8e1b6113 --- /dev/null +++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/index.js @@ -0,0 +1,41 @@ +const { FetchError, Headers, Request, Response } = require('minipass-fetch') + +const configureOptions = require('./options.js') +const fetch = require('./fetch.js') + +const makeFetchHappen = (url, opts) => { + const options = configureOptions(opts) + + const request = new Request(url, options) + return fetch(request, options) +} + +makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => { + if (typeof defaultUrl === 'object') { + defaultOptions = defaultUrl + defaultUrl = null + } + + const defaultedFetch = (url, options = {}) => { + const finalUrl = url || defaultUrl + const finalOptions = { + ...defaultOptions, + ...options, + headers: { + ...defaultOptions.headers, + ...options.headers, + }, + } + return wrappedFetch(finalUrl, finalOptions) + } + + defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) => + makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch) + return defaultedFetch +} + +module.exports = makeFetchHappen +module.exports.FetchError = FetchError +module.exports.Headers = Headers +module.exports.Request = Request +module.exports.Response = Response diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/options.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/options.js new file mode 100644 index 0000000000000..f77511279f831 --- /dev/null +++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/options.js @@ -0,0 +1,54 @@ +const dns = require('dns') + +const conditionalHeaders = [ + 'if-modified-since', + 'if-none-match', + 'if-unmodified-since', + 'if-match', + 'if-range', +] + +const configureOptions = (opts) => { + const { strictSSL, ...options } = { ...opts } + options.method = options.method ? options.method.toUpperCase() : 'GET' + options.rejectUnauthorized = strictSSL !== false + + if (!options.retry) { + options.retry = { retries: 0 } + } else if (typeof options.retry === 'string') { + const retries = parseInt(options.retry, 10) + if (isFinite(retries)) { + options.retry = { retries } + } else { + options.retry = { retries: 0 } + } + } else if (typeof options.retry === 'number') { + options.retry = { retries: options.retry } + } else { + options.retry = { retries: 0, ...options.retry } + } + + options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns } + + options.cache = options.cache || 'default' + if (options.cache === 'default') { + const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { + return conditionalHeaders.includes(name.toLowerCase()) + }) + if (hasConditionalHeader) { + options.cache = 'no-store' + } + } + + options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || [] + + // cacheManager is deprecated, but if it's set and + // cachePath is not we should copy it to the new field + if (options.cacheManager && !options.cachePath) { + options.cachePath = options.cacheManager + } + + return options +} + +module.exports = configureOptions diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/pipeline.js new file mode 100644 index 0000000000000..b1d221b2d0ce3 --- /dev/null +++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/pipeline.js @@ -0,0 +1,41 @@ +'use strict' + +const MinipassPipeline = require('minipass-pipeline') + +class CachingMinipassPipeline extends MinipassPipeline { + #events = [] + #data = new Map() + + constructor (opts, ...streams) { + // CRITICAL: do NOT pass the streams to the call to super(), this will start + // the flow of data and potentially cause the events we need to catch to emit + // before we've finished our own setup. instead we call super() with no args, + // finish our setup, and then push the streams into ourselves to start the + // data flow + super() + this.#events = opts.events + + /* istanbul ignore next - coverage disabled because this is pointless to test here */ + if (streams.length) { + this.push(...streams) + } + } + + on (event, handler) { + if (this.#events.includes(event) && this.#data.has(event)) { + return handler(...this.#data.get(event)) + } + + return super.on(event, handler) + } + + emit (event, ...data) { + if (this.#events.includes(event)) { + this.#data.set(event, data) + } + + return super.emit(event, ...data) + } +} + +module.exports = CachingMinipassPipeline diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/remote.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/remote.js new file mode 100644 index 0000000000000..bdbcc79cad908 --- /dev/null +++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/remote.js @@ -0,0 +1,121 @@ +const { Minipass } = require('minipass') +const fetch = require('minipass-fetch') +const promiseRetry = require('promise-retry') +const ssri = require('ssri') + +const CachingMinipassPipeline = require('./pipeline.js') +const getAgent = require('./agent.js') +const pkg = require('../package.json') + +const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` + +const RETRY_ERRORS = [ + 'ECONNRESET', // remote socket closed on us + 'ECONNREFUSED', // remote host refused to open connection + 'EADDRINUSE', // failed to bind to a local port (proxy?) + 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW + 'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive + // Known codes we do NOT retry on: + // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) +] + +const RETRY_TYPES = [ + 'request-timeout', +] + +// make a request directly to the remote source, +// retrying certain classes of errors as well as +// following redirects (through the cache if necessary) +// and verifying response integrity +const remoteFetch = (request, options) => { + const agent = getAgent(request.url, options) + if (!request.headers.has('connection')) { + request.headers.set('connection', agent ? 'keep-alive' : 'close') + } + + if (!request.headers.has('user-agent')) { + request.headers.set('user-agent', USER_AGENT) + } + + // keep our own options since we're overriding the agent + // and the redirect mode + const _opts = { + ...options, + agent, + redirect: 'manual', + } + + return promiseRetry(async (retryHandler, attemptNum) => { + const req = new fetch.Request(request, _opts) + try { + let res = await fetch(req, _opts) + if (_opts.integrity && res.status === 200) { + // we got a 200 response and the user has specified an expected + // integrity value, so wrap the response in an ssri stream to verify it + const integrityStream = ssri.integrityStream({ + algorithms: _opts.algorithms, + integrity: _opts.integrity, + size: _opts.size, + }) + const pipeline = new CachingMinipassPipeline({ + events: ['integrity', 'size'], + }, res.body, integrityStream) + // we also propagate the integrity and size events out to the pipeline so we can use + // this new response body as an integrityEmitter for cacache + integrityStream.on('integrity', i => pipeline.emit('integrity', i)) + integrityStream.on('size', s => pipeline.emit('size', s)) + res = new fetch.Response(pipeline, res) + // set an explicit flag so we know if our response body will emit integrity and size + res.body.hasIntegrityEmitter = true + } + + res.headers.set('x-fetch-attempts', attemptNum) + + // do not retry POST requests, or requests with a streaming body + // do retry requests with a 408, 420, 429 or 500+ status in the response + const isStream = Minipass.isStream(req.body) + const isRetriable = req.method !== 'POST' && + !isStream && + ([408, 420, 429].includes(res.status) || res.status >= 500) + + if (isRetriable) { + if (typeof options.onRetry === 'function') { + options.onRetry(res) + } + + return retryHandler(res) + } + + return res + } catch (err) { + const code = (err.code === 'EPROMISERETRY') + ? err.retried.code + : err.code + + // err.retried will be the thing that was thrown from above + // if it's a response, we just got a bad status code and we + // can re-throw to allow the retry + const isRetryError = err.retried instanceof fetch.Response || + (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) + + if (req.method === 'POST' || isRetryError) { + throw err + } + + if (typeof options.onRetry === 'function') { + options.onRetry(err) + } + + return retryHandler(err) + } + }, options.retry).catch((err) => { + // don't reject for http errors, just return them + if (err.status >= 400 && err.type !== 'system') { + return err + } + + throw err + }) +} + +module.exports = remoteFetch diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/package.json b/node_modules/sigstore/node_modules/make-fetch-happen/package.json new file mode 100644 index 0000000000000..fd415dc9966fa --- /dev/null +++ b/node_modules/sigstore/node_modules/make-fetch-happen/package.json @@ -0,0 +1,78 @@ +{ + "name": "make-fetch-happen", + "version": "11.1.1", + "description": "Opinionated, caching, retrying fetch client", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "posttest": "npm run lint", + "eslint": "eslint", + "lint": "eslint \"**/*.js\"", + "lintfix": "npm run lint -- --fix", + "postlint": "template-oss-check", + "snap": "tap", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/make-fetch-happen.git" + }, + "keywords": [ + "http", + "request", + "fetch", + "mean girls", + "caching", + "cache", + "subresource integrity" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "agentkeepalive": "^4.2.1", + "cacache": "^17.0.0", + "http-cache-semantics": "^4.1.1", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^7.7.1", + "minipass": "^5.0.0", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^7.0.0", + "ssri": "^10.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.14.1", + "nock": "^13.2.4", + "safe-buffer": "^5.2.1", + "standard-version": "^9.3.2", + "tap": "^16.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "tap": { + "color": 1, + "files": "test/*.js", + "check-coverage": true, + "timeout": 60, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.14.1", + "publish": "true" + } +} diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE b/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE new file mode 100644 index 0000000000000..1808eb2844231 --- /dev/null +++ b/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE @@ -0,0 +1,16 @@ +ISC License + +Copyright 2017-2022 (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/agent.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/agent.js new file mode 100644 index 0000000000000..dd68492ed7ea7 --- /dev/null +++ b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/agent.js @@ -0,0 +1,214 @@ +'use strict' +const LRU = require('lru-cache') +const url = require('url') +const isLambda = require('is-lambda') +const dns = require('./dns.js') + +const AGENT_CACHE = new LRU({ max: 50 }) +const HttpAgent = require('agentkeepalive') +const HttpsAgent = HttpAgent.HttpsAgent + +module.exports = getAgent + +const getAgentTimeout = timeout => + typeof timeout !== 'number' || !timeout ? 0 : timeout + 1 + +const getMaxSockets = maxSockets => maxSockets || 15 + +function getAgent (uri, opts) { + const parsedUri = new url.URL(typeof uri === 'string' ? uri : uri.url) + const isHttps = parsedUri.protocol === 'https:' + const pxuri = getProxyUri(parsedUri.href, opts) + + // If opts.timeout is zero, set the agentTimeout to zero as well. A timeout + // of zero disables the timeout behavior (OS limits still apply). Else, if + // opts.timeout is a non-zero value, set it to timeout + 1, to ensure that + // the node-fetch-npm timeout will always fire first, giving us more + // consistent errors. + const agentTimeout = getAgentTimeout(opts.timeout) + const agentMaxSockets = getMaxSockets(opts.maxSockets) + + const key = [ + `https:${isHttps}`, + pxuri + ? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}` + : '>no-proxy<', + `local-address:${opts.localAddress || '>no-local-address<'}`, + `strict-ssl:${isHttps ? opts.rejectUnauthorized : '>no-strict-ssl<'}`, + `ca:${(isHttps && opts.ca) || '>no-ca<'}`, + `cert:${(isHttps && opts.cert) || '>no-cert<'}`, + `key:${(isHttps && opts.key) || '>no-key<'}`, + `timeout:${agentTimeout}`, + `maxSockets:${agentMaxSockets}`, + ].join(':') + + if (opts.agent != null) { // `agent: false` has special behavior! + return opts.agent + } + + // keep alive in AWS lambda makes no sense + const lambdaAgent = !isLambda ? null + : isHttps ? require('https').globalAgent + : require('http').globalAgent + + if (isLambda && !pxuri) { + return lambdaAgent + } + + if (AGENT_CACHE.peek(key)) { + return AGENT_CACHE.get(key) + } + + if (pxuri) { + const pxopts = isLambda ? { + ...opts, + agent: lambdaAgent, + } : opts + const proxy = getProxy(pxuri, pxopts, isHttps) + AGENT_CACHE.set(key, proxy) + return proxy + } + + const agent = isHttps ? new HttpsAgent({ + maxSockets: agentMaxSockets, + ca: opts.ca, + cert: opts.cert, + key: opts.key, + localAddress: opts.localAddress, + rejectUnauthorized: opts.rejectUnauthorized, + timeout: agentTimeout, + freeSocketTimeout: 15000, + lookup: dns.getLookup(opts.dns), + }) : new HttpAgent({ + maxSockets: agentMaxSockets, + localAddress: opts.localAddress, + timeout: agentTimeout, + freeSocketTimeout: 15000, + lookup: dns.getLookup(opts.dns), + }) + AGENT_CACHE.set(key, agent) + return agent +} + +function checkNoProxy (uri, opts) { + const host = new url.URL(uri).hostname.split('.').reverse() + let noproxy = (opts.noProxy || getProcessEnv('no_proxy')) + if (typeof noproxy === 'string') { + noproxy = noproxy.split(',').map(n => n.trim()) + } + + return noproxy && noproxy.some(no => { + const noParts = no.split('.').filter(x => x).reverse() + if (!noParts.length) { + return false + } + for (let i = 0; i < noParts.length; i++) { + if (host[i] !== noParts[i]) { + return false + } + } + return true + }) +} + +module.exports.getProcessEnv = getProcessEnv + +function getProcessEnv (env) { + if (!env) { + return + } + + let value + + if (Array.isArray(env)) { + for (const e of env) { + value = process.env[e] || + process.env[e.toUpperCase()] || + process.env[e.toLowerCase()] + if (typeof value !== 'undefined') { + break + } + } + } + + if (typeof env === 'string') { + value = process.env[env] || + process.env[env.toUpperCase()] || + process.env[env.toLowerCase()] + } + + return value +} + +module.exports.getProxyUri = getProxyUri +function getProxyUri (uri, opts) { + const protocol = new url.URL(uri).protocol + + const proxy = opts.proxy || + ( + protocol === 'https:' && + getProcessEnv('https_proxy') + ) || + ( + protocol === 'http:' && + getProcessEnv(['https_proxy', 'http_proxy', 'proxy']) + ) + if (!proxy) { + return null + } + + const parsedProxy = (typeof proxy === 'string') ? new url.URL(proxy) : proxy + + return !checkNoProxy(uri, opts) && parsedProxy +} + +const getAuth = u => + u.username && u.password ? decodeURIComponent(`${u.username}:${u.password}`) + : u.username ? decodeURIComponent(u.username) + : null + +const getPath = u => u.pathname + u.search + u.hash + +const HttpProxyAgent = require('http-proxy-agent') +const HttpsProxyAgent = require('https-proxy-agent') +const { SocksProxyAgent } = require('socks-proxy-agent') +module.exports.getProxy = getProxy +function getProxy (proxyUrl, opts, isHttps) { + // our current proxy agents do not support an overridden dns lookup method, so will not + // benefit from the dns cache + const popts = { + host: proxyUrl.hostname, + port: proxyUrl.port, + protocol: proxyUrl.protocol, + path: getPath(proxyUrl), + auth: getAuth(proxyUrl), + ca: opts.ca, + cert: opts.cert, + key: opts.key, + timeout: getAgentTimeout(opts.timeout), + localAddress: opts.localAddress, + maxSockets: getMaxSockets(opts.maxSockets), + rejectUnauthorized: opts.rejectUnauthorized, + } + + if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') { + if (!isHttps) { + return new HttpProxyAgent(popts) + } else { + return new HttpsProxyAgent(popts) + } + } else if (proxyUrl.protocol.startsWith('socks')) { + // socks-proxy-agent uses hostname not host + popts.hostname = popts.host + delete popts.host + return new SocksProxyAgent(popts) + } else { + throw Object.assign( + new Error(`unsupported proxy protocol: '${proxyUrl.protocol}'`), + { + code: 'EUNSUPPORTEDPROXY', + url: proxyUrl.href, + } + ) + } +} diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js new file mode 100644 index 0000000000000..45141095074ec --- /dev/null +++ b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js @@ -0,0 +1,469 @@ +const { Request, Response } = require('minipass-fetch') +const { Minipass } = require('minipass') +const MinipassFlush = require('minipass-flush') +const cacache = require('cacache') +const url = require('url') + +const CachingMinipassPipeline = require('../pipeline.js') +const CachePolicy = require('./policy.js') +const cacheKey = require('./key.js') +const remote = require('../remote.js') + +const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) + +// allow list for request headers that will be written to the cache index +// note: we will also store any request headers +// that are named in a response's vary header +const KEEP_REQUEST_HEADERS = [ + 'accept-charset', + 'accept-encoding', + 'accept-language', + 'accept', + 'cache-control', +] + +// allow list for response headers that will be written to the cache index +// note: we must not store the real response's age header, or when we load +// a cache policy based on the metadata it will think the cached response +// is always stale +const KEEP_RESPONSE_HEADERS = [ + 'cache-control', + 'content-encoding', + 'content-language', + 'content-type', + 'date', + 'etag', + 'expires', + 'last-modified', + 'link', + 'location', + 'pragma', + 'vary', +] + +// return an object containing all metadata to be written to the index +const getMetadata = (request, response, options) => { + const metadata = { + time: Date.now(), + url: request.url, + reqHeaders: {}, + resHeaders: {}, + + // options on which we must match the request and vary the response + options: { + compress: options.compress != null ? options.compress : request.compress, + }, + } + + // only save the status if it's not a 200 or 304 + if (response.status !== 200 && response.status !== 304) { + metadata.status = response.status + } + + for (const name of KEEP_REQUEST_HEADERS) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + + // if the request's host header differs from the host in the url + // we need to keep it, otherwise it's just noise and we ignore it + const host = request.headers.get('host') + const parsedUrl = new url.URL(request.url) + if (host && parsedUrl.host !== host) { + metadata.reqHeaders.host = host + } + + // if the response has a vary header, make sure + // we store the relevant request headers too + if (response.headers.has('vary')) { + const vary = response.headers.get('vary') + // a vary of "*" means every header causes a different response. + // in that scenario, we do not include any additional headers + // as the freshness check will always fail anyway and we don't + // want to bloat the cache indexes + if (vary !== '*') { + // copy any other request headers that will vary the response + const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) + for (const name of varyHeaders) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + } + } + + for (const name of KEEP_RESPONSE_HEADERS) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + for (const name of options.cacheAdditionalHeaders) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + return metadata +} + +// symbols used to hide objects that may be lazily evaluated in a getter +const _request = Symbol('request') +const _response = Symbol('response') +const _policy = Symbol('policy') + +class CacheEntry { + constructor ({ entry, request, response, options }) { + if (entry) { + this.key = entry.key + this.entry = entry + // previous versions of this module didn't write an explicit timestamp in + // the metadata, so fall back to the entry's timestamp. we can't use the + // entry timestamp to determine staleness because cacache will update it + // when it verifies its data + this.entry.metadata.time = this.entry.metadata.time || this.entry.time + } else { + this.key = cacheKey(request) + } + + this.options = options + + // these properties are behind getters that lazily evaluate + this[_request] = request + this[_response] = response + this[_policy] = null + } + + // returns a CacheEntry instance that satisfies the given request + // or undefined if no existing entry satisfies + static async find (request, options) { + try { + // compacts the index and returns an array of unique entries + var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { + const entryA = new CacheEntry({ entry: A, options }) + const entryB = new CacheEntry({ entry: B, options }) + return entryA.policy.satisfies(entryB.request) + }, { + validateEntry: (entry) => { + // clean out entries with a buggy content-encoding value + if (entry.metadata && + entry.metadata.resHeaders && + entry.metadata.resHeaders['content-encoding'] === null) { + return false + } + + // if an integrity is null, it needs to have a status specified + if (entry.integrity === null) { + return !!(entry.metadata && entry.metadata.status) + } + + return true + }, + }) + } catch (err) { + // if the compact request fails, ignore the error and return + return + } + + // a cache mode of 'reload' means to behave as though we have no cache + // on the way to the network. return undefined to allow cacheFetch to + // create a brand new request no matter what. + if (options.cache === 'reload') { + return + } + + // find the specific entry that satisfies the request + let match + for (const entry of matches) { + const _entry = new CacheEntry({ + entry, + options, + }) + + if (_entry.policy.satisfies(request)) { + match = _entry + break + } + } + + return match + } + + // if the user made a PUT/POST/PATCH then we invalidate our + // cache for the same url by deleting the index entirely + static async invalidate (request, options) { + const key = cacheKey(request) + try { + await cacache.rm.entry(options.cachePath, key, { removeFully: true }) + } catch (err) { + // ignore errors + } + } + + get request () { + if (!this[_request]) { + this[_request] = new Request(this.entry.metadata.url, { + method: 'GET', + headers: this.entry.metadata.reqHeaders, + ...this.entry.metadata.options, + }) + } + + return this[_request] + } + + get response () { + if (!this[_response]) { + this[_response] = new Response(null, { + url: this.entry.metadata.url, + counter: this.options.counter, + status: this.entry.metadata.status || 200, + headers: { + ...this.entry.metadata.resHeaders, + 'content-length': this.entry.size, + }, + }) + } + + return this[_response] + } + + get policy () { + if (!this[_policy]) { + this[_policy] = new CachePolicy({ + entry: this.entry, + request: this.request, + response: this.response, + options: this.options, + }) + } + + return this[_policy] + } + + // wraps the response in a pipeline that stores the data + // in the cache while the user consumes it + async store (status) { + // if we got a status other than 200, 301, or 308, + // or the CachePolicy forbid storage, append the + // cache status header and return it untouched + if ( + this.request.method !== 'GET' || + ![200, 301, 308].includes(this.response.status) || + !this.policy.storable() + ) { + this.response.headers.set('x-local-cache-status', 'skip') + return this.response + } + + const size = this.response.headers.get('content-length') + const cacheOpts = { + algorithms: this.options.algorithms, + metadata: getMetadata(this.request, this.response, this.options), + size, + integrity: this.options.integrity, + integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body, + } + + let body = null + // we only set a body if the status is a 200, redirects are + // stored as metadata only + if (this.response.status === 200) { + let cacheWriteResolve, cacheWriteReject + const cacheWritePromise = new Promise((resolve, reject) => { + cacheWriteResolve = resolve + cacheWriteReject = reject + }) + + body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({ + flush () { + return cacheWritePromise + }, + })) + // this is always true since if we aren't reusing the one from the remote fetch, we + // are using the one from cacache + body.hasIntegrityEmitter = true + + const onResume = () => { + const tee = new Minipass() + const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) + // re-emit the integrity and size events on our new response body so they can be reused + cacheStream.on('integrity', i => body.emit('integrity', i)) + cacheStream.on('size', s => body.emit('size', s)) + // stick a flag on here so downstream users will know if they can expect integrity events + tee.pipe(cacheStream) + // TODO if the cache write fails, log a warning but return the response anyway + // eslint-disable-next-line promise/catch-or-return + cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) + body.unshift(tee) + body.unshift(this.response.body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + } else { + await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) + } + + // note: we do not set the x-local-cache-hash header because we do not know + // the hash value until after the write to the cache completes, which doesn't + // happen until after the response has been sent and it's too late to write + // the header anyway + this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + this.response.headers.set('x-local-cache-mode', 'stream') + this.response.headers.set('x-local-cache-status', status) + this.response.headers.set('x-local-cache-time', new Date().toISOString()) + const newResponse = new Response(body, { + url: this.response.url, + status: this.response.status, + headers: this.response.headers, + counter: this.options.counter, + }) + return newResponse + } + + // use the cached data to create a response and return it + async respond (method, options, status) { + let response + if (method === 'HEAD' || [301, 308].includes(this.response.status)) { + // if the request is a HEAD, or the response is a redirect, + // then the metadata in the entry already includes everything + // we need to build a response + response = this.response + } else { + // we're responding with a full cached response, so create a body + // that reads from cacache and attach it to a new Response + const body = new Minipass() + const headers = { ...this.policy.responseHeaders() } + + const onResume = () => { + const cacheStream = cacache.get.stream.byDigest( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + cacheStream.on('error', async (err) => { + cacheStream.pause() + if (err.code === 'EINTEGRITY') { + await cacache.rm.content( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + } + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { + await CacheEntry.invalidate(this.request, this.options) + } + body.emit('error', err) + cacheStream.resume() + }) + // emit the integrity and size events based on our metadata so we're consistent + body.emit('integrity', this.entry.integrity) + body.emit('size', Number(headers['content-length'])) + cacheStream.pipe(body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + response = new Response(body, { + url: this.entry.metadata.url, + counter: options.counter, + status: 200, + headers, + }) + } + + response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) + response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + response.headers.set('x-local-cache-mode', 'stream') + response.headers.set('x-local-cache-status', status) + response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) + return response + } + + // use the provided request along with this cache entry to + // revalidate the stored response. returns a response, either + // from the cache or from the update + async revalidate (request, options) { + const revalidateRequest = new Request(request, { + headers: this.policy.revalidationHeaders(request), + }) + + try { + // NOTE: be sure to remove the headers property from the + // user supplied options, since we have already defined + // them on the new request object. if they're still in the + // options then those will overwrite the ones from the policy + var response = await remote(revalidateRequest, { + ...options, + headers: undefined, + }) + } catch (err) { + // if the network fetch fails, return the stale + // cached response unless it has a cache-control + // of 'must-revalidate' + if (!this.policy.mustRevalidate) { + return this.respond(request.method, options, 'stale') + } + + throw err + } + + if (this.policy.revalidated(revalidateRequest, response)) { + // we got a 304, write a new index to the cache and respond from cache + const metadata = getMetadata(request, response, options) + // 304 responses do not include headers that are specific to the response data + // since they do not include a body, so we copy values for headers that were + // in the old cache entry to the new one, if the new metadata does not already + // include that header + for (const name of KEEP_RESPONSE_HEADERS) { + if ( + !hasOwnProperty(metadata.resHeaders, name) && + hasOwnProperty(this.entry.metadata.resHeaders, name) + ) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + } + + for (const name of options.cacheAdditionalHeaders) { + const inMeta = hasOwnProperty(metadata.resHeaders, name) + const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name) + const inPolicy = hasOwnProperty(this.policy.response.headers, name) + + // if the header is in the existing entry, but it is not in the metadata + // then we need to write it to the metadata as this will refresh the on-disk cache + if (!inMeta && inEntry) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + // if the header is in the metadata, but not in the policy, then we need to set + // it in the policy so that it's included in the immediate response. future + // responses will load a new cache entry, so we don't need to change that + if (!inPolicy && inMeta) { + this.policy.response.headers[name] = metadata.resHeaders[name] + } + } + + try { + await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { + size: this.entry.size, + metadata, + }) + } catch (err) { + // if updating the cache index fails, we ignore it and + // respond anyway + } + return this.respond(request.method, options, 'revalidated') + } + + // if we got a modified response, create a new entry based on it + const newEntry = new CacheEntry({ + request, + response, + options, + }) + + // respond with the new entry while writing it to the cache + return newEntry.store('updated') + } +} + +module.exports = CacheEntry diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js new file mode 100644 index 0000000000000..67a66573bebe6 --- /dev/null +++ b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js @@ -0,0 +1,11 @@ +class NotCachedError extends Error { + constructor (url) { + /* eslint-disable-next-line max-len */ + super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) + this.code = 'ENOTCACHED' + } +} + +module.exports = { + NotCachedError, +} diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js new file mode 100644 index 0000000000000..0de49d23fb933 --- /dev/null +++ b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js @@ -0,0 +1,49 @@ +const { NotCachedError } = require('./errors.js') +const CacheEntry = require('./entry.js') +const remote = require('../remote.js') + +// do whatever is necessary to get a Response and return it +const cacheFetch = async (request, options) => { + // try to find a cached entry that satisfies this request + const entry = await CacheEntry.find(request, options) + if (!entry) { + // no cached result, if the cache mode is 'only-if-cached' that's a failure + if (options.cache === 'only-if-cached') { + throw new NotCachedError(request.url) + } + + // otherwise, we make a request, store it and return it + const response = await remote(request, options) + const newEntry = new CacheEntry({ request, response, options }) + return newEntry.store('miss') + } + + // we have a cached response that satisfies this request, however if the cache + // mode is 'no-cache' then we send the revalidation request no matter what + if (options.cache === 'no-cache') { + return entry.revalidate(request, options) + } + + // if the cached entry is not stale, or if the cache mode is 'force-cache' or + // 'only-if-cached' we can respond with the cached entry. set the status + // based on the result of needsRevalidation and respond + const _needsRevalidation = entry.policy.needsRevalidation(request) + if (options.cache === 'force-cache' || + options.cache === 'only-if-cached' || + !_needsRevalidation) { + return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') + } + + // if we got here, the cache entry is stale so revalidate it + return entry.revalidate(request, options) +} + +cacheFetch.invalidate = async (request, options) => { + if (!options.cachePath) { + return + } + + return CacheEntry.invalidate(request, options) +} + +module.exports = cacheFetch diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js new file mode 100644 index 0000000000000..f7684d562b7fa --- /dev/null +++ b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js @@ -0,0 +1,17 @@ +const { URL, format } = require('url') + +// options passed to url.format() when generating a key +const formatOptions = { + auth: false, + fragment: false, + search: true, + unicode: false, +} + +// returns a string to be used as the cache key for the Request +const cacheKey = (request) => { + const parsed = new URL(request.url) + return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` +} + +module.exports = cacheKey diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js new file mode 100644 index 0000000000000..ada3c8600dae9 --- /dev/null +++ b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js @@ -0,0 +1,161 @@ +const CacheSemantics = require('http-cache-semantics') +const Negotiator = require('negotiator') +const ssri = require('ssri') + +// options passed to http-cache-semantics constructor +const policyOptions = { + shared: false, + ignoreCargoCult: true, +} + +// a fake empty response, used when only testing the +// request for storability +const emptyResponse = { status: 200, headers: {} } + +// returns a plain object representation of the Request +const requestObject = (request) => { + const _obj = { + method: request.method, + url: request.url, + headers: {}, + compress: request.compress, + } + + request.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +// returns a plain object representation of the Response +const responseObject = (response) => { + const _obj = { + status: response.status, + headers: {}, + } + + response.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +class CachePolicy { + constructor ({ entry, request, response, options }) { + this.entry = entry + this.request = requestObject(request) + this.response = responseObject(response) + this.options = options + this.policy = new CacheSemantics(this.request, this.response, policyOptions) + + if (this.entry) { + // if we have an entry, copy the timestamp to the _responseTime + // this is necessary because the CacheSemantics constructor forces + // the value to Date.now() which means a policy created from a + // cache entry is likely to always identify itself as stale + this.policy._responseTime = this.entry.metadata.time + } + } + + // static method to quickly determine if a request alone is storable + static storable (request, options) { + // no cachePath means no caching + if (!options.cachePath) { + return false + } + + // user explicitly asked not to cache + if (options.cache === 'no-store') { + return false + } + + // we only cache GET and HEAD requests + if (!['GET', 'HEAD'].includes(request.method)) { + return false + } + + // otherwise, let http-cache-semantics make the decision + // based on the request's headers + const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) + return policy.storable() + } + + // returns true if the policy satisfies the request + satisfies (request) { + const _req = requestObject(request) + if (this.request.headers.host !== _req.headers.host) { + return false + } + + if (this.request.compress !== _req.compress) { + return false + } + + const negotiatorA = new Negotiator(this.request) + const negotiatorB = new Negotiator(_req) + + if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { + return false + } + + if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { + return false + } + + if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { + return false + } + + if (this.options.integrity) { + return ssri.parse(this.options.integrity).match(this.entry.integrity) + } + + return true + } + + // returns true if the request and response allow caching + storable () { + return this.policy.storable() + } + + // NOTE: this is a hack to avoid parsing the cache-control + // header ourselves, it returns true if the response's + // cache-control contains must-revalidate + get mustRevalidate () { + return !!this.policy._rescc['must-revalidate'] + } + + // returns true if the cached response requires revalidation + // for the given request + needsRevalidation (request) { + const _req = requestObject(request) + // force method to GET because we only cache GETs + // but can serve a HEAD from a cached GET + _req.method = 'GET' + return !this.policy.satisfiesWithoutRevalidation(_req) + } + + responseHeaders () { + return this.policy.responseHeaders() + } + + // returns a new object containing the appropriate headers + // to send a revalidation request + revalidationHeaders (request) { + const _req = requestObject(request) + return this.policy.revalidationHeaders(_req) + } + + // returns true if the request/response was revalidated + // successfully. returns false if a new response was received + revalidated (request, response) { + const _req = requestObject(request) + const _res = responseObject(response) + const policy = this.policy.revalidatedPolicy(_req, _res) + return !policy.modified + } +} + +module.exports = CachePolicy diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/dns.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/dns.js new file mode 100644 index 0000000000000..13102b57c4aa0 --- /dev/null +++ b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/dns.js @@ -0,0 +1,49 @@ +const LRUCache = require('lru-cache') +const dns = require('dns') + +const defaultOptions = exports.defaultOptions = { + family: undefined, + hints: dns.ADDRCONFIG, + all: false, + verbatim: undefined, +} + +const lookupCache = exports.lookupCache = new LRUCache({ max: 50 }) + +// this is a factory so that each request can have its own opts (i.e. ttl) +// while still sharing the cache across all requests +exports.getLookup = (dnsOptions) => { + return (hostname, options, callback) => { + if (typeof options === 'function') { + callback = options + options = null + } else if (typeof options === 'number') { + options = { family: options } + } + + options = { ...defaultOptions, ...options } + + const key = JSON.stringify({ + hostname, + family: options.family, + hints: options.hints, + all: options.all, + verbatim: options.verbatim, + }) + + if (lookupCache.has(key)) { + const [address, family] = lookupCache.get(key) + process.nextTick(callback, null, address, family) + return + } + + dnsOptions.lookup(hostname, options, (err, address, family) => { + if (err) { + return callback(err) + } + + lookupCache.set(key, [address, family], { ttl: dnsOptions.ttl }) + return callback(null, address, family) + }) + } +} diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js new file mode 100644 index 0000000000000..233ba67e16550 --- /dev/null +++ b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js @@ -0,0 +1,118 @@ +'use strict' + +const { FetchError, Request, isRedirect } = require('minipass-fetch') +const url = require('url') + +const CachePolicy = require('./cache/policy.js') +const cache = require('./cache/index.js') +const remote = require('./remote.js') + +// given a Request, a Response and user options +// return true if the response is a redirect that +// can be followed. we throw errors that will result +// in the fetch being rejected if the redirect is +// possible but invalid for some reason +const canFollowRedirect = (request, response, options) => { + if (!isRedirect(response.status)) { + return false + } + + if (options.redirect === 'manual') { + return false + } + + if (options.redirect === 'error') { + throw new FetchError(`redirect mode is set to error: ${request.url}`, + 'no-redirect', { code: 'ENOREDIRECT' }) + } + + if (!response.headers.has('location')) { + throw new FetchError(`redirect location header missing for: ${request.url}`, + 'no-location', { code: 'EINVALIDREDIRECT' }) + } + + if (request.counter >= request.follow) { + throw new FetchError(`maximum redirect reached at: ${request.url}`, + 'max-redirect', { code: 'EMAXREDIRECT' }) + } + + return true +} + +// given a Request, a Response, and the user's options return an object +// with a new Request and a new options object that will be used for +// following the redirect +const getRedirect = (request, response, options) => { + const _opts = { ...options } + const location = response.headers.get('location') + const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) + // Comment below is used under the following license: + /** + * @license + * Copyright (c) 2010-2012 Mikeal Rogers + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an "AS + * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + + // Remove authorization if changing hostnames (but not if just + // changing ports or protocols). This matches the behavior of request: + // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 + if (new url.URL(request.url).hostname !== redirectUrl.hostname) { + request.headers.delete('authorization') + request.headers.delete('cookie') + } + + // for POST request with 301/302 response, or any request with 303 response, + // use GET when following redirect + if ( + response.status === 303 || + (request.method === 'POST' && [301, 302].includes(response.status)) + ) { + _opts.method = 'GET' + _opts.body = null + request.headers.delete('content-length') + } + + _opts.headers = {} + request.headers.forEach((value, key) => { + _opts.headers[key] = value + }) + + _opts.counter = ++request.counter + const redirectReq = new Request(url.format(redirectUrl), _opts) + return { + request: redirectReq, + options: _opts, + } +} + +const fetch = async (request, options) => { + const response = CachePolicy.storable(request, options) + ? await cache(request, options) + : await remote(request, options) + + // if the request wasn't a GET or HEAD, and the response + // status is between 200 and 399 inclusive, invalidate the + // request url + if (!['GET', 'HEAD'].includes(request.method) && + response.status >= 200 && + response.status <= 399) { + await cache.invalidate(request, options) + } + + if (!canFollowRedirect(request, response, options)) { + return response + } + + const redirect = getRedirect(request, response, options) + return fetch(redirect.request, redirect.options) +} + +module.exports = fetch diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js new file mode 100644 index 0000000000000..2f12e8e1b6113 --- /dev/null +++ b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js @@ -0,0 +1,41 @@ +const { FetchError, Headers, Request, Response } = require('minipass-fetch') + +const configureOptions = require('./options.js') +const fetch = require('./fetch.js') + +const makeFetchHappen = (url, opts) => { + const options = configureOptions(opts) + + const request = new Request(url, options) + return fetch(request, options) +} + +makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => { + if (typeof defaultUrl === 'object') { + defaultOptions = defaultUrl + defaultUrl = null + } + + const defaultedFetch = (url, options = {}) => { + const finalUrl = url || defaultUrl + const finalOptions = { + ...defaultOptions, + ...options, + headers: { + ...defaultOptions.headers, + ...options.headers, + }, + } + return wrappedFetch(finalUrl, finalOptions) + } + + defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) => + makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch) + return defaultedFetch +} + +module.exports = makeFetchHappen +module.exports.FetchError = FetchError +module.exports.Headers = Headers +module.exports.Request = Request +module.exports.Response = Response diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js new file mode 100644 index 0000000000000..f77511279f831 --- /dev/null +++ b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js @@ -0,0 +1,54 @@ +const dns = require('dns') + +const conditionalHeaders = [ + 'if-modified-since', + 'if-none-match', + 'if-unmodified-since', + 'if-match', + 'if-range', +] + +const configureOptions = (opts) => { + const { strictSSL, ...options } = { ...opts } + options.method = options.method ? options.method.toUpperCase() : 'GET' + options.rejectUnauthorized = strictSSL !== false + + if (!options.retry) { + options.retry = { retries: 0 } + } else if (typeof options.retry === 'string') { + const retries = parseInt(options.retry, 10) + if (isFinite(retries)) { + options.retry = { retries } + } else { + options.retry = { retries: 0 } + } + } else if (typeof options.retry === 'number') { + options.retry = { retries: options.retry } + } else { + options.retry = { retries: 0, ...options.retry } + } + + options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns } + + options.cache = options.cache || 'default' + if (options.cache === 'default') { + const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { + return conditionalHeaders.includes(name.toLowerCase()) + }) + if (hasConditionalHeader) { + options.cache = 'no-store' + } + } + + options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || [] + + // cacheManager is deprecated, but if it's set and + // cachePath is not we should copy it to the new field + if (options.cacheManager && !options.cachePath) { + options.cachePath = options.cacheManager + } + + return options +} + +module.exports = configureOptions diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js new file mode 100644 index 0000000000000..b1d221b2d0ce3 --- /dev/null +++ b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js @@ -0,0 +1,41 @@ +'use strict' + +const MinipassPipeline = require('minipass-pipeline') + +class CachingMinipassPipeline extends MinipassPipeline { + #events = [] + #data = new Map() + + constructor (opts, ...streams) { + // CRITICAL: do NOT pass the streams to the call to super(), this will start + // the flow of data and potentially cause the events we need to catch to emit + // before we've finished our own setup. instead we call super() with no args, + // finish our setup, and then push the streams into ourselves to start the + // data flow + super() + this.#events = opts.events + + /* istanbul ignore next - coverage disabled because this is pointless to test here */ + if (streams.length) { + this.push(...streams) + } + } + + on (event, handler) { + if (this.#events.includes(event) && this.#data.has(event)) { + return handler(...this.#data.get(event)) + } + + return super.on(event, handler) + } + + emit (event, ...data) { + if (this.#events.includes(event)) { + this.#data.set(event, data) + } + + return super.emit(event, ...data) + } +} + +module.exports = CachingMinipassPipeline diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js new file mode 100644 index 0000000000000..bdbcc79cad908 --- /dev/null +++ b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js @@ -0,0 +1,121 @@ +const { Minipass } = require('minipass') +const fetch = require('minipass-fetch') +const promiseRetry = require('promise-retry') +const ssri = require('ssri') + +const CachingMinipassPipeline = require('./pipeline.js') +const getAgent = require('./agent.js') +const pkg = require('../package.json') + +const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` + +const RETRY_ERRORS = [ + 'ECONNRESET', // remote socket closed on us + 'ECONNREFUSED', // remote host refused to open connection + 'EADDRINUSE', // failed to bind to a local port (proxy?) + 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW + 'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive + // Known codes we do NOT retry on: + // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) +] + +const RETRY_TYPES = [ + 'request-timeout', +] + +// make a request directly to the remote source, +// retrying certain classes of errors as well as +// following redirects (through the cache if necessary) +// and verifying response integrity +const remoteFetch = (request, options) => { + const agent = getAgent(request.url, options) + if (!request.headers.has('connection')) { + request.headers.set('connection', agent ? 'keep-alive' : 'close') + } + + if (!request.headers.has('user-agent')) { + request.headers.set('user-agent', USER_AGENT) + } + + // keep our own options since we're overriding the agent + // and the redirect mode + const _opts = { + ...options, + agent, + redirect: 'manual', + } + + return promiseRetry(async (retryHandler, attemptNum) => { + const req = new fetch.Request(request, _opts) + try { + let res = await fetch(req, _opts) + if (_opts.integrity && res.status === 200) { + // we got a 200 response and the user has specified an expected + // integrity value, so wrap the response in an ssri stream to verify it + const integrityStream = ssri.integrityStream({ + algorithms: _opts.algorithms, + integrity: _opts.integrity, + size: _opts.size, + }) + const pipeline = new CachingMinipassPipeline({ + events: ['integrity', 'size'], + }, res.body, integrityStream) + // we also propagate the integrity and size events out to the pipeline so we can use + // this new response body as an integrityEmitter for cacache + integrityStream.on('integrity', i => pipeline.emit('integrity', i)) + integrityStream.on('size', s => pipeline.emit('size', s)) + res = new fetch.Response(pipeline, res) + // set an explicit flag so we know if our response body will emit integrity and size + res.body.hasIntegrityEmitter = true + } + + res.headers.set('x-fetch-attempts', attemptNum) + + // do not retry POST requests, or requests with a streaming body + // do retry requests with a 408, 420, 429 or 500+ status in the response + const isStream = Minipass.isStream(req.body) + const isRetriable = req.method !== 'POST' && + !isStream && + ([408, 420, 429].includes(res.status) || res.status >= 500) + + if (isRetriable) { + if (typeof options.onRetry === 'function') { + options.onRetry(res) + } + + return retryHandler(res) + } + + return res + } catch (err) { + const code = (err.code === 'EPROMISERETRY') + ? err.retried.code + : err.code + + // err.retried will be the thing that was thrown from above + // if it's a response, we just got a bad status code and we + // can re-throw to allow the retry + const isRetryError = err.retried instanceof fetch.Response || + (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) + + if (req.method === 'POST' || isRetryError) { + throw err + } + + if (typeof options.onRetry === 'function') { + options.onRetry(err) + } + + return retryHandler(err) + } + }, options.retry).catch((err) => { + // don't reject for http errors, just return them + if (err.status >= 400 && err.type !== 'system') { + return err + } + + throw err + }) +} + +module.exports = remoteFetch diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/package.json b/node_modules/tuf-js/node_modules/make-fetch-happen/package.json new file mode 100644 index 0000000000000..fd415dc9966fa --- /dev/null +++ b/node_modules/tuf-js/node_modules/make-fetch-happen/package.json @@ -0,0 +1,78 @@ +{ + "name": "make-fetch-happen", + "version": "11.1.1", + "description": "Opinionated, caching, retrying fetch client", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "posttest": "npm run lint", + "eslint": "eslint", + "lint": "eslint \"**/*.js\"", + "lintfix": "npm run lint -- --fix", + "postlint": "template-oss-check", + "snap": "tap", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/make-fetch-happen.git" + }, + "keywords": [ + "http", + "request", + "fetch", + "mean girls", + "caching", + "cache", + "subresource integrity" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "agentkeepalive": "^4.2.1", + "cacache": "^17.0.0", + "http-cache-semantics": "^4.1.1", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^7.7.1", + "minipass": "^5.0.0", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^7.0.0", + "ssri": "^10.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.14.1", + "nock": "^13.2.4", + "safe-buffer": "^5.2.1", + "standard-version": "^9.3.2", + "tap": "^16.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "tap": { + "color": 1, + "files": "test/*.js", + "check-coverage": true, + "timeout": 60, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.14.1", + "publish": "true" + } +} diff --git a/package-lock.json b/package-lock.json index b0439e2edec9f..1ea38c4cc674d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -122,7 +122,7 @@ "libnpmsearch": "^6.0.2", "libnpmteam": "^5.0.3", "libnpmversion": "^4.0.2", - "make-fetch-happen": "^11.1.1", + "make-fetch-happen": "^12.0.0", "minimatch": "^9.0.3", "minipass": "^5.0.0", "minipass-pipeline": "^1.2.4", @@ -2473,6 +2473,31 @@ "node": "^16.13.0 || >=18.0.0" } }, + "node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen": { + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz", + "integrity": "sha512-rLWS7GCSTcEujjVBs2YqG7Y4643u8ucvCJeSRqiLYhesrDuzeuFIk37xREzAsfQaqzl8b9rNCE4m6J8tvX4Q8w==", + "dependencies": { + "agentkeepalive": "^4.2.1", + "cacache": "^17.0.0", + "http-cache-semantics": "^4.1.1", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^7.7.1", + "minipass": "^5.0.0", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^7.0.0", + "ssri": "^10.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch": { "version": "14.0.5", "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-14.0.5.tgz", @@ -3090,13 +3115,11 @@ } }, "node_modules/agentkeepalive": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.3.0.tgz", - "integrity": "sha512-7Epl1Blf4Sy37j4v9f9FjICCh4+KAQOyXgHEwlyBiAQLbhKdq/i2QQU3amQalS/wPhdPzDXPL5DMR5bkn+YeWg==", + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.5.0.tgz", + "integrity": "sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew==", "inBundle": true, "dependencies": { - "debug": "^4.1.0", - "depd": "^2.0.0", "humanize-ms": "^1.2.1" }, "engines": { @@ -4548,15 +4571,6 @@ "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==", "inBundle": true }, - "node_modules/depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", - "inBundle": true, - "engines": { - "node": ">= 0.8" - } - }, "node_modules/deprecation": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", @@ -7956,29 +7970,34 @@ "dev": true }, "node_modules/make-fetch-happen": { - "version": "11.1.1", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz", - "integrity": "sha512-rLWS7GCSTcEujjVBs2YqG7Y4643u8ucvCJeSRqiLYhesrDuzeuFIk37xREzAsfQaqzl8b9rNCE4m6J8tvX4Q8w==", + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-12.0.0.tgz", + "integrity": "sha512-xpuA2kA8Z66uGQjaSXd7rffqJOv60iYpP8X0TsZl3uwXlqxUVmHETImjM71JOPA694TlcX37GhlaCsl6z6fNVg==", "inBundle": true, "dependencies": { - "agentkeepalive": "^4.2.1", + "@npmcli/agent": "^1.1.0", "cacache": "^17.0.0", "http-cache-semantics": "^4.1.1", - "http-proxy-agent": "^5.0.0", - "https-proxy-agent": "^5.0.0", "is-lambda": "^1.0.1", - "lru-cache": "^7.7.1", - "minipass": "^5.0.0", + "minipass": "^7.0.2", "minipass-fetch": "^3.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^0.6.3", "promise-retry": "^2.0.1", - "socks-proxy-agent": "^7.0.0", "ssri": "^10.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.13.0 || >=18.0.0" + } + }, + "node_modules/make-fetch-happen/node_modules/minipass": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz", + "integrity": "sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg==", + "inBundle": true, + "engines": { + "node": ">=16 || 14 >=14.17" } }, "node_modules/map-obj": { @@ -9437,6 +9456,32 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/node-gyp/node_modules/make-fetch-happen": { + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz", + "integrity": "sha512-rLWS7GCSTcEujjVBs2YqG7Y4643u8ucvCJeSRqiLYhesrDuzeuFIk37xREzAsfQaqzl8b9rNCE4m6J8tvX4Q8w==", + "inBundle": true, + "dependencies": { + "agentkeepalive": "^4.2.1", + "cacache": "^17.0.0", + "http-cache-semantics": "^4.1.1", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^7.7.1", + "minipass": "^5.0.0", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^7.0.0", + "ssri": "^10.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/node-gyp/node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", @@ -9702,28 +9747,6 @@ "node": "^16.13.0 || >=18.0.0" } }, - "node_modules/npm-registry-fetch/node_modules/make-fetch-happen": { - "version": "12.0.0", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-12.0.0.tgz", - "integrity": "sha512-xpuA2kA8Z66uGQjaSXd7rffqJOv60iYpP8X0TsZl3uwXlqxUVmHETImjM71JOPA694TlcX37GhlaCsl6z6fNVg==", - "inBundle": true, - "dependencies": { - "@npmcli/agent": "^1.1.0", - "cacache": "^17.0.0", - "http-cache-semantics": "^4.1.1", - "is-lambda": "^1.0.1", - "minipass": "^7.0.2", - "minipass-fetch": "^3.0.0", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", - "promise-retry": "^2.0.1", - "ssri": "^10.0.0" - }, - "engines": { - "node": "^16.13.0 || >=18.0.0" - } - }, "node_modules/npm-registry-fetch/node_modules/minipass": { "version": "7.0.2", "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.2.tgz", @@ -11655,6 +11678,32 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/sigstore/node_modules/make-fetch-happen": { + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz", + "integrity": "sha512-rLWS7GCSTcEujjVBs2YqG7Y4643u8ucvCJeSRqiLYhesrDuzeuFIk37xREzAsfQaqzl8b9rNCE4m6J8tvX4Q8w==", + "inBundle": true, + "dependencies": { + "agentkeepalive": "^4.2.1", + "cacache": "^17.0.0", + "http-cache-semantics": "^4.1.1", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^7.7.1", + "minipass": "^5.0.0", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^7.0.0", + "ssri": "^10.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/slash": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", @@ -14772,6 +14821,32 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/tuf-js/node_modules/make-fetch-happen": { + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz", + "integrity": "sha512-rLWS7GCSTcEujjVBs2YqG7Y4643u8ucvCJeSRqiLYhesrDuzeuFIk37xREzAsfQaqzl8b9rNCE4m6J8tvX4Q8w==", + "inBundle": true, + "dependencies": { + "agentkeepalive": "^4.2.1", + "cacache": "^17.0.0", + "http-cache-semantics": "^4.1.1", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^7.7.1", + "minipass": "^5.0.0", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^7.0.0", + "ssri": "^10.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/tunnel": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", diff --git a/package.json b/package.json index 1ec72091c4c11..6f416e2ab0289 100644 --- a/package.json +++ b/package.json @@ -87,7 +87,7 @@ "libnpmsearch": "^6.0.2", "libnpmteam": "^5.0.3", "libnpmversion": "^4.0.2", - "make-fetch-happen": "^11.1.1", + "make-fetch-happen": "^12.0.0", "minimatch": "^9.0.3", "minipass": "^5.0.0", "minipass-pipeline": "^1.2.4", From 5902b4bcc698667fec909d3b5437050545968515 Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Mon, 14 Aug 2023 12:10:41 -0700 Subject: [PATCH 21/68] deps: ssri@10.0.5 --- node_modules/.gitignore | 3 + .../ssri/node_modules/minipass/LICENSE | 15 + .../node_modules/minipass/dist/cjs/index.js | 1028 +++++++++++++++++ .../minipass/dist/cjs/package.json | 3 + .../node_modules/minipass/dist/mjs/index.js | 1018 ++++++++++++++++ .../minipass/dist/mjs/package.json | 3 + .../ssri/node_modules/minipass/package.json | 82 ++ node_modules/ssri/package.json | 8 +- package-lock.json | 23 +- package.json | 2 +- workspaces/arborist/package.json | 2 +- workspaces/libnpmpublish/package.json | 2 +- 12 files changed, 2175 insertions(+), 14 deletions(-) create mode 100644 node_modules/ssri/node_modules/minipass/LICENSE create mode 100644 node_modules/ssri/node_modules/minipass/dist/cjs/index.js create mode 100644 node_modules/ssri/node_modules/minipass/dist/cjs/package.json create mode 100644 node_modules/ssri/node_modules/minipass/dist/mjs/index.js create mode 100644 node_modules/ssri/node_modules/minipass/dist/mjs/package.json create mode 100644 node_modules/ssri/node_modules/minipass/package.json diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 52d4301bed174..1fd9ff59b3c03 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -270,6 +270,9 @@ !/spdx-expression-parse !/spdx-license-ids !/ssri +!/ssri/node_modules/ +/ssri/node_modules/* +!/ssri/node_modules/minipass !/string_decoder !/string-width-cjs !/string-width diff --git a/node_modules/ssri/node_modules/minipass/LICENSE b/node_modules/ssri/node_modules/minipass/LICENSE new file mode 100644 index 0000000000000..97f8e32ed82e4 --- /dev/null +++ b/node_modules/ssri/node_modules/minipass/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/ssri/node_modules/minipass/dist/cjs/index.js b/node_modules/ssri/node_modules/minipass/dist/cjs/index.js new file mode 100644 index 0000000000000..b6cdae8eb514b --- /dev/null +++ b/node_modules/ssri/node_modules/minipass/dist/cjs/index.js @@ -0,0 +1,1028 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0; +const proc = typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + }; +const events_1 = require("events"); +const stream_1 = __importDefault(require("stream")); +const string_decoder_1 = require("string_decoder"); +/** + * Return true if the argument is a Minipass stream, Node stream, or something + * else that Minipass can interact with. + */ +const isStream = (s) => !!s && + typeof s === 'object' && + (s instanceof Minipass || + s instanceof stream_1.default || + (0, exports.isReadable)(s) || + (0, exports.isWritable)(s)); +exports.isStream = isStream; +/** + * Return true if the argument is a valid {@link Minipass.Readable} + */ +const isReadable = (s) => !!s && + typeof s === 'object' && + s instanceof events_1.EventEmitter && + typeof s.pipe === 'function' && + // node core Writable streams have a pipe() method, but it throws + s.pipe !== stream_1.default.Writable.prototype.pipe; +exports.isReadable = isReadable; +/** + * Return true if the argument is a valid {@link Minipass.Writable} + */ +const isWritable = (s) => !!s && + typeof s === 'object' && + s instanceof events_1.EventEmitter && + typeof s.write === 'function' && + typeof s.end === 'function'; +exports.isWritable = isWritable; +const EOF = Symbol('EOF'); +const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); +const EMITTED_END = Symbol('emittedEnd'); +const EMITTING_END = Symbol('emittingEnd'); +const EMITTED_ERROR = Symbol('emittedError'); +const CLOSED = Symbol('closed'); +const READ = Symbol('read'); +const FLUSH = Symbol('flush'); +const FLUSHCHUNK = Symbol('flushChunk'); +const ENCODING = Symbol('encoding'); +const DECODER = Symbol('decoder'); +const FLOWING = Symbol('flowing'); +const PAUSED = Symbol('paused'); +const RESUME = Symbol('resume'); +const BUFFER = Symbol('buffer'); +const PIPES = Symbol('pipes'); +const BUFFERLENGTH = Symbol('bufferLength'); +const BUFFERPUSH = Symbol('bufferPush'); +const BUFFERSHIFT = Symbol('bufferShift'); +const OBJECTMODE = Symbol('objectMode'); +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed'); +// internal event when stream has an error +const ERROR = Symbol('error'); +const EMITDATA = Symbol('emitData'); +const EMITEND = Symbol('emitEnd'); +const EMITEND2 = Symbol('emitEnd2'); +const ASYNC = Symbol('async'); +const ABORT = Symbol('abort'); +const ABORTED = Symbol('aborted'); +const SIGNAL = Symbol('signal'); +const DATALISTENERS = Symbol('dataListeners'); +const DISCARDED = Symbol('discarded'); +const defer = (fn) => Promise.resolve().then(fn); +const nodefer = (fn) => fn(); +const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; +const isArrayBufferLike = (b) => b instanceof ArrayBuffer || + (!!b && + typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0); +const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); +/** + * Internal class representing a pipe to a destination stream. + * + * @internal + */ +class Pipe { + src; + dest; + opts; + ondrain; + constructor(src, dest, opts) { + this.src = src; + this.dest = dest; + this.opts = opts; + this.ondrain = () => src[RESUME](); + this.dest.on('drain', this.ondrain); + } + unpipe() { + this.dest.removeListener('drain', this.ondrain); + } + // only here for the prototype + /* c8 ignore start */ + proxyErrors(_er) { } + /* c8 ignore stop */ + end() { + this.unpipe(); + if (this.opts.end) + this.dest.end(); + } +} +/** + * Internal class representing a pipe to a destination stream where + * errors are proxied. + * + * @internal + */ +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors); + super.unpipe(); + } + constructor(src, dest, opts) { + super(src, dest, opts); + this.proxyErrors = er => dest.emit('error', er); + src.on('error', this.proxyErrors); + } +} +const isObjectModeOptions = (o) => !!o.objectMode; +const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; +/** + * Main export, the Minipass class + * + * `RType` is the type of data emitted, defaults to Buffer + * + * `WType` is the type of data to be written, if RType is buffer or string, + * then any {@link Minipass.ContiguousData} is allowed. + * + * `Events` is the set of event handler signatures that this object + * will emit, see {@link Minipass.Events} + */ +class Minipass extends events_1.EventEmitter { + [FLOWING] = false; + [PAUSED] = false; + [PIPES] = []; + [BUFFER] = []; + [OBJECTMODE]; + [ENCODING]; + [ASYNC]; + [DECODER]; + [EOF] = false; + [EMITTED_END] = false; + [EMITTING_END] = false; + [CLOSED] = false; + [EMITTED_ERROR] = null; + [BUFFERLENGTH] = 0; + [DESTROYED] = false; + [SIGNAL]; + [ABORTED] = false; + [DATALISTENERS] = 0; + [DISCARDED] = false; + /** + * true if the stream can be written + */ + writable = true; + /** + * true if the stream can be read + */ + readable = true; + /** + * If `RType` is Buffer, then options do not need to be provided. + * Otherwise, an options object must be provided to specify either + * {@link Minipass.SharedOptions.objectMode} or + * {@link Minipass.SharedOptions.encoding}, as appropriate. + */ + constructor(...args) { + const options = (args[0] || + {}); + super(); + if (options.objectMode && typeof options.encoding === 'string') { + throw new TypeError('Encoding and objectMode may not be used together'); + } + if (isObjectModeOptions(options)) { + this[OBJECTMODE] = true; + this[ENCODING] = null; + } + else if (isEncodingOptions(options)) { + this[ENCODING] = options.encoding; + this[OBJECTMODE] = false; + } + else { + this[OBJECTMODE] = false; + this[ENCODING] = null; + } + this[ASYNC] = !!options.async; + this[DECODER] = this[ENCODING] + ? new string_decoder_1.StringDecoder(this[ENCODING]) + : null; + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); + } + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); + } + const { signal } = options; + if (signal) { + this[SIGNAL] = signal; + if (signal.aborted) { + this[ABORT](); + } + else { + signal.addEventListener('abort', () => this[ABORT]()); + } + } + } + /** + * The amount of data stored in the buffer waiting to be read. + * + * For Buffer strings, this will be the total byte length. + * For string encoding streams, this will be the string character length, + * according to JavaScript's `string.length` logic. + * For objectMode streams, this is a count of the items waiting to be + * emitted. + */ + get bufferLength() { + return this[BUFFERLENGTH]; + } + /** + * The `BufferEncoding` currently in use, or `null` + */ + get encoding() { + return this[ENCODING]; + } + /** + * @deprecated - This is a read only property + */ + set encoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * @deprecated - Encoding may only be set at instantiation time + */ + setEncoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * True if this is an objectMode stream + */ + get objectMode() { + return this[OBJECTMODE]; + } + /** + * @deprecated - This is a read-only property + */ + set objectMode(_om) { + throw new Error('objectMode must be set at instantiation time'); + } + /** + * true if this is an async stream + */ + get ['async']() { + return this[ASYNC]; + } + /** + * Set to true to make this stream async. + * + * Once set, it cannot be unset, as this would potentially cause incorrect + * behavior. Ie, a sync stream can be made async, but an async stream + * cannot be safely made sync. + */ + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a; + } + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true; + this.emit('abort', this[SIGNAL]?.reason); + this.destroy(this[SIGNAL]?.reason); + } + /** + * True if the stream has been aborted. + */ + get aborted() { + return this[ABORTED]; + } + /** + * No-op setter. Stream aborted status is set via the AbortSignal provided + * in the constructor options. + */ + set aborted(_) { } + write(chunk, encoding, cb) { + if (this[ABORTED]) + return false; + if (this[EOF]) + throw new Error('write after end'); + if (this[DESTROYED]) { + this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); + return true; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (!encoding) + encoding = 'utf8'; + const fn = this[ASYNC] ? defer : nodefer; + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything is only allowed if in object mode, so throw + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); + } + else if (isArrayBufferLike(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk); + } + else if (typeof chunk !== 'string') { + throw new Error('Non-contiguous data written to non-objectMode stream'); + } + } + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + // maybe impossible? + /* c8 ignore start */ + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + /* c8 ignore stop */ + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { + //@ts-ignore - sinful unsafe type change + chunk = Buffer.from(chunk, encoding); + } + if (Buffer.isBuffer(chunk) && this[ENCODING]) { + //@ts-ignore - sinful unsafe type change + chunk = this[DECODER].write(chunk); + } + // Note: flushing CAN potentially switch us into not-flowing mode + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + /** + * Low-level explicit read method. + * + * In objectMode, the argument is ignored, and one item is returned if + * available. + * + * `n` is the number of bytes (or in the case of encoding streams, + * characters) to consume. If `n` is not provided, then the entire buffer + * is returned, or `null` is returned if no data is available. + * + * If `n` is greater that the amount of data in the internal buffer, + * then `null` is returned. + */ + read(n) { + if (this[DESTROYED]) + return null; + this[DISCARDED] = false; + if (this[BUFFERLENGTH] === 0 || + n === 0 || + (n && n > this[BUFFERLENGTH])) { + this[MAYBE_EMIT_END](); + return null; + } + if (this[OBJECTMODE]) + n = null; + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + // not object mode, so if we have an encoding, then RType is string + // otherwise, must be Buffer + this[BUFFER] = [ + (this[ENCODING] + ? this[BUFFER].join('') + : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), + ]; + } + const ret = this[READ](n || null, this[BUFFER][0]); + this[MAYBE_EMIT_END](); + return ret; + } + [READ](n, chunk) { + if (this[OBJECTMODE]) + this[BUFFERSHIFT](); + else { + const c = chunk; + if (n === c.length || n === null) + this[BUFFERSHIFT](); + else if (typeof c === 'string') { + this[BUFFER][0] = c.slice(n); + chunk = c.slice(0, n); + this[BUFFERLENGTH] -= n; + } + else { + this[BUFFER][0] = c.subarray(n); + chunk = c.subarray(0, n); + this[BUFFERLENGTH] -= n; + } + } + this.emit('data', chunk); + if (!this[BUFFER].length && !this[EOF]) + this.emit('drain'); + return chunk; + } + end(chunk, encoding, cb) { + if (typeof chunk === 'function') { + cb = chunk; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (chunk !== undefined) + this.write(chunk, encoding); + if (cb) + this.once('end', cb); + this[EOF] = true; + this.writable = false; + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this[FLOWING] || !this[PAUSED]) + this[MAYBE_EMIT_END](); + return this; + } + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) + return; + if (!this[DATALISTENERS] && !this[PIPES].length) { + this[DISCARDED] = true; + } + this[PAUSED] = false; + this[FLOWING] = true; + this.emit('resume'); + if (this[BUFFER].length) + this[FLUSH](); + else if (this[EOF]) + this[MAYBE_EMIT_END](); + else + this.emit('drain'); + } + /** + * Resume the stream if it is currently in a paused state + * + * If called when there are no pipe destinations or `data` event listeners, + * this will place the stream in a "discarded" state, where all data will + * be thrown away. The discarded state is removed if a pipe destination or + * data handler is added, if pause() is called, or if any synchronous or + * asynchronous iteration is started. + */ + resume() { + return this[RESUME](); + } + /** + * Pause the stream + */ + pause() { + this[FLOWING] = false; + this[PAUSED] = true; + this[DISCARDED] = false; + } + /** + * true if the stream has been forcibly destroyed + */ + get destroyed() { + return this[DESTROYED]; + } + /** + * true if the stream is currently in a flowing state, meaning that + * any writes will be immediately emitted. + */ + get flowing() { + return this[FLOWING]; + } + /** + * true if the stream is currently in a paused state + */ + get paused() { + return this[PAUSED]; + } + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1; + else + this[BUFFERLENGTH] += chunk.length; + this[BUFFER].push(chunk); + } + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1; + else + this[BUFFERLENGTH] -= this[BUFFER][0].length; + return this[BUFFER].shift(); + } + [FLUSH](noDrain = false) { + do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && + this[BUFFER].length); + if (!noDrain && !this[BUFFER].length && !this[EOF]) + this.emit('drain'); + } + [FLUSHCHUNK](chunk) { + this.emit('data', chunk); + return this[FLOWING]; + } + /** + * Pipe all data emitted by this stream into the destination provided. + * + * Triggers the flow of data. + */ + pipe(dest, opts) { + if (this[DESTROYED]) + return dest; + this[DISCARDED] = false; + const ended = this[EMITTED_END]; + opts = opts || {}; + if (dest === proc.stdout || dest === proc.stderr) + opts.end = false; + else + opts.end = opts.end !== false; + opts.proxyErrors = !!opts.proxyErrors; + // piping an ended stream ends immediately + if (ended) { + if (opts.end) + dest.end(); + } + else { + // "as" here just ignores the WType, which pipes don't care about, + // since they're only consuming from us, and writing to the dest + this[PIPES].push(!opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts)); + if (this[ASYNC]) + defer(() => this[RESUME]()); + else + this[RESUME](); + } + return dest; + } + /** + * Fully unhook a piped destination stream. + * + * If the destination stream was the only consumer of this stream (ie, + * there are no other piped destinations or `'data'` event listeners) + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest); + if (p) { + if (this[PIPES].length === 1) { + if (this[FLOWING] && this[DATALISTENERS] === 0) { + this[FLOWING] = false; + } + this[PIPES] = []; + } + else + this[PIPES].splice(this[PIPES].indexOf(p), 1); + p.unpipe(); + } + } + /** + * Alias for {@link Minipass#on} + */ + addListener(ev, handler) { + return this.on(ev, handler); + } + /** + * Mostly identical to `EventEmitter.on`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * - Adding a 'data' event handler will trigger the flow of data + * + * - Adding a 'readable' event handler when there is data waiting to be read + * will cause 'readable' to be emitted immediately. + * + * - Adding an 'endish' event handler ('end', 'finish', etc.) which has + * already passed will cause the event to be emitted immediately and all + * handlers removed. + * + * - Adding an 'error' event handler after an error has been emitted will + * cause the event to be re-emitted immediately with the error previously + * raised. + */ + on(ev, handler) { + const ret = super.on(ev, handler); + if (ev === 'data') { + this[DISCARDED] = false; + this[DATALISTENERS]++; + if (!this[PIPES].length && !this[FLOWING]) { + this[RESUME](); + } + } + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { + super.emit('readable'); + } + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev); + this.removeAllListeners(ev); + } + else if (ev === 'error' && this[EMITTED_ERROR]) { + const h = handler; + if (this[ASYNC]) + defer(() => h.call(this, this[EMITTED_ERROR])); + else + h.call(this, this[EMITTED_ERROR]); + } + return ret; + } + /** + * Alias for {@link Minipass#off} + */ + removeListener(ev, handler) { + return this.off(ev, handler); + } + /** + * Mostly identical to `EventEmitter.off` + * + * If a 'data' event handler is removed, and it was the last consumer + * (ie, there are no pipe destinations or other 'data' event listeners), + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + off(ev, handler) { + const ret = super.off(ev, handler); + // if we previously had listeners, and now we don't, and we don't + // have any pipes, then stop the flow, unless it's been explicitly + // put in a discarded flowing state via stream.resume(). + if (ev === 'data') { + this[DATALISTENERS] = this.listeners('data').length; + if (this[DATALISTENERS] === 0 && + !this[DISCARDED] && + !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * Mostly identical to `EventEmitter.removeAllListeners` + * + * If all 'data' event handlers are removed, and they were the last consumer + * (ie, there are no pipe destinations), then the flow of data will stop + * until there is another consumer or {@link Minipass#resume} is explicitly + * called. + */ + removeAllListeners(ev) { + const ret = super.removeAllListeners(ev); + if (ev === 'data' || ev === undefined) { + this[DATALISTENERS] = 0; + if (!this[DISCARDED] && !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * true if the 'end' event has been emitted + */ + get emittedEnd() { + return this[EMITTED_END]; + } + [MAYBE_EMIT_END]() { + if (!this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF]) { + this[EMITTING_END] = true; + this.emit('end'); + this.emit('prefinish'); + this.emit('finish'); + if (this[CLOSED]) + this.emit('close'); + this[EMITTING_END] = false; + } + } + /** + * Mostly identical to `EventEmitter.emit`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * If the stream has been destroyed, and the event is something other + * than 'close' or 'error', then `false` is returned and no handlers + * are called. + * + * If the event is 'end', and has already been emitted, then the event + * is ignored. If the stream is in a paused or non-flowing state, then + * the event will be deferred until data flow resumes. If the stream is + * async, then handlers will be called on the next tick rather than + * immediately. + * + * If the event is 'close', and 'end' has not yet been emitted, then + * the event will be deferred until after 'end' is emitted. + * + * If the event is 'error', and an AbortSignal was provided for the stream, + * and there are no listeners, then the event is ignored, matching the + * behavior of node core streams in the presense of an AbortSignal. + * + * If the event is 'finish' or 'prefinish', then all listeners will be + * removed after emitting the event, to prevent double-firing. + */ + emit(ev, ...args) { + const data = args[0]; + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && + ev !== 'close' && + ev !== DESTROYED && + this[DESTROYED]) { + return false; + } + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? (defer(() => this[EMITDATA](data)), true) + : this[EMITDATA](data); + } + else if (ev === 'end') { + return this[EMITEND](); + } + else if (ev === 'close') { + this[CLOSED] = true; + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) + return false; + const ret = super.emit('close'); + this.removeAllListeners('close'); + return ret; + } + else if (ev === 'error') { + this[EMITTED_ERROR] = data; + super.emit(ERROR, data); + const ret = !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false; + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'resume') { + const ret = super.emit('resume'); + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev); + this.removeAllListeners(ev); + return ret; + } + // Some other unknown event + const ret = super.emit(ev, ...args); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) + this.pause(); + } + const ret = this[DISCARDED] ? false : super.emit('data', data); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITEND]() { + if (this[EMITTED_END]) + return false; + this[EMITTED_END] = true; + this.readable = false; + return this[ASYNC] + ? (defer(() => this[EMITEND2]()), true) + : this[EMITEND2](); + } + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end(); + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data); + } + if (!this[DISCARDED]) + super.emit('data', data); + } + } + for (const p of this[PIPES]) { + p.end(); + } + const ret = super.emit('end'); + this.removeAllListeners('end'); + return ret; + } + /** + * Return a Promise that resolves to an array of all emitted data once + * the stream ends. + */ + async collect() { + const buf = Object.assign([], { + dataLength: 0, + }); + if (!this[OBJECTMODE]) + buf.dataLength = 0; + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise(); + this.on('data', c => { + buf.push(c); + if (!this[OBJECTMODE]) + buf.dataLength += c.length; + }); + await p; + return buf; + } + /** + * Return a Promise that resolves to the concatenation of all emitted data + * once the stream ends. + * + * Not allowed on objectMode streams. + */ + async concat() { + if (this[OBJECTMODE]) { + throw new Error('cannot concat in objectMode'); + } + const buf = await this.collect(); + return (this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength)); + } + /** + * Return a void Promise that resolves once the stream ends. + */ + async promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))); + this.on('error', er => reject(er)); + this.on('end', () => resolve()); + }); + } + /** + * Asynchronous `for await of` iteration. + * + * This will continue emitting all chunks until the stream terminates. + */ + [Symbol.asyncIterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = async () => { + this.pause(); + stopped = true; + return { value: undefined, done: true }; + }; + const next = () => { + if (stopped) + return stop(); + const res = this.read(); + if (res !== null) + return Promise.resolve({ done: false, value: res }); + if (this[EOF]) + return stop(); + let resolve; + let reject; + const onerr = (er) => { + this.off('data', ondata); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + stop(); + reject(er); + }; + const ondata = (value) => { + this.off('error', onerr); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + this.pause(); + resolve({ value, done: !!this[EOF] }); + }; + const onend = () => { + this.off('error', onerr); + this.off('data', ondata); + this.off(DESTROYED, ondestroy); + stop(); + resolve({ done: true, value: undefined }); + }; + const ondestroy = () => onerr(new Error('stream destroyed')); + return new Promise((res, rej) => { + reject = rej; + resolve = res; + this.once(DESTROYED, ondestroy); + this.once('error', onerr); + this.once('end', onend); + this.once('data', ondata); + }); + }; + return { + next, + throw: stop, + return: stop, + [Symbol.asyncIterator]() { + return this; + }, + }; + } + /** + * Synchronous `for of` iteration. + * + * The iteration will terminate when the internal buffer runs out, even + * if the stream has not yet terminated. + */ + [Symbol.iterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = () => { + this.pause(); + this.off(ERROR, stop); + this.off(DESTROYED, stop); + this.off('end', stop); + stopped = true; + return { done: true, value: undefined }; + }; + const next = () => { + if (stopped) + return stop(); + const value = this.read(); + return value === null ? stop() : { done: false, value }; + }; + this.once('end', stop); + this.once(ERROR, stop); + this.once(DESTROYED, stop); + return { + next, + throw: stop, + return: stop, + [Symbol.iterator]() { + return this; + }, + }; + } + /** + * Destroy a stream, preventing it from being used for any further purpose. + * + * If the stream has a `close()` method, then it will be called on + * destruction. + * + * After destruction, any attempt to write data, read data, or emit most + * events will be ignored. + * + * If an error argument is provided, then it will be emitted in an + * 'error' event. + */ + destroy(er) { + if (this[DESTROYED]) { + if (er) + this.emit('error', er); + else + this.emit(DESTROYED); + return this; + } + this[DESTROYED] = true; + this[DISCARDED] = true; + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0; + this[BUFFERLENGTH] = 0; + const wc = this; + if (typeof wc.close === 'function' && !this[CLOSED]) + wc.close(); + if (er) + this.emit('error', er); + // if no error to emit, still reject pending promises + else + this.emit(DESTROYED); + return this; + } + /** + * Alias for {@link isStream} + * + * Former export location, maintained for backwards compatibility. + * + * @deprecated + */ + static get isStream() { + return exports.isStream; + } +} +exports.Minipass = Minipass; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/ssri/node_modules/minipass/dist/cjs/package.json b/node_modules/ssri/node_modules/minipass/dist/cjs/package.json new file mode 100644 index 0000000000000..5bbefffbabee3 --- /dev/null +++ b/node_modules/ssri/node_modules/minipass/dist/cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/ssri/node_modules/minipass/dist/mjs/index.js b/node_modules/ssri/node_modules/minipass/dist/mjs/index.js new file mode 100644 index 0000000000000..b65fafbae43a4 --- /dev/null +++ b/node_modules/ssri/node_modules/minipass/dist/mjs/index.js @@ -0,0 +1,1018 @@ +const proc = typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + }; +import { EventEmitter } from 'events'; +import Stream from 'stream'; +import { StringDecoder } from 'string_decoder'; +/** + * Return true if the argument is a Minipass stream, Node stream, or something + * else that Minipass can interact with. + */ +export const isStream = (s) => !!s && + typeof s === 'object' && + (s instanceof Minipass || + s instanceof Stream || + isReadable(s) || + isWritable(s)); +/** + * Return true if the argument is a valid {@link Minipass.Readable} + */ +export const isReadable = (s) => !!s && + typeof s === 'object' && + s instanceof EventEmitter && + typeof s.pipe === 'function' && + // node core Writable streams have a pipe() method, but it throws + s.pipe !== Stream.Writable.prototype.pipe; +/** + * Return true if the argument is a valid {@link Minipass.Writable} + */ +export const isWritable = (s) => !!s && + typeof s === 'object' && + s instanceof EventEmitter && + typeof s.write === 'function' && + typeof s.end === 'function'; +const EOF = Symbol('EOF'); +const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); +const EMITTED_END = Symbol('emittedEnd'); +const EMITTING_END = Symbol('emittingEnd'); +const EMITTED_ERROR = Symbol('emittedError'); +const CLOSED = Symbol('closed'); +const READ = Symbol('read'); +const FLUSH = Symbol('flush'); +const FLUSHCHUNK = Symbol('flushChunk'); +const ENCODING = Symbol('encoding'); +const DECODER = Symbol('decoder'); +const FLOWING = Symbol('flowing'); +const PAUSED = Symbol('paused'); +const RESUME = Symbol('resume'); +const BUFFER = Symbol('buffer'); +const PIPES = Symbol('pipes'); +const BUFFERLENGTH = Symbol('bufferLength'); +const BUFFERPUSH = Symbol('bufferPush'); +const BUFFERSHIFT = Symbol('bufferShift'); +const OBJECTMODE = Symbol('objectMode'); +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed'); +// internal event when stream has an error +const ERROR = Symbol('error'); +const EMITDATA = Symbol('emitData'); +const EMITEND = Symbol('emitEnd'); +const EMITEND2 = Symbol('emitEnd2'); +const ASYNC = Symbol('async'); +const ABORT = Symbol('abort'); +const ABORTED = Symbol('aborted'); +const SIGNAL = Symbol('signal'); +const DATALISTENERS = Symbol('dataListeners'); +const DISCARDED = Symbol('discarded'); +const defer = (fn) => Promise.resolve().then(fn); +const nodefer = (fn) => fn(); +const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; +const isArrayBufferLike = (b) => b instanceof ArrayBuffer || + (!!b && + typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0); +const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); +/** + * Internal class representing a pipe to a destination stream. + * + * @internal + */ +class Pipe { + src; + dest; + opts; + ondrain; + constructor(src, dest, opts) { + this.src = src; + this.dest = dest; + this.opts = opts; + this.ondrain = () => src[RESUME](); + this.dest.on('drain', this.ondrain); + } + unpipe() { + this.dest.removeListener('drain', this.ondrain); + } + // only here for the prototype + /* c8 ignore start */ + proxyErrors(_er) { } + /* c8 ignore stop */ + end() { + this.unpipe(); + if (this.opts.end) + this.dest.end(); + } +} +/** + * Internal class representing a pipe to a destination stream where + * errors are proxied. + * + * @internal + */ +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors); + super.unpipe(); + } + constructor(src, dest, opts) { + super(src, dest, opts); + this.proxyErrors = er => dest.emit('error', er); + src.on('error', this.proxyErrors); + } +} +const isObjectModeOptions = (o) => !!o.objectMode; +const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; +/** + * Main export, the Minipass class + * + * `RType` is the type of data emitted, defaults to Buffer + * + * `WType` is the type of data to be written, if RType is buffer or string, + * then any {@link Minipass.ContiguousData} is allowed. + * + * `Events` is the set of event handler signatures that this object + * will emit, see {@link Minipass.Events} + */ +export class Minipass extends EventEmitter { + [FLOWING] = false; + [PAUSED] = false; + [PIPES] = []; + [BUFFER] = []; + [OBJECTMODE]; + [ENCODING]; + [ASYNC]; + [DECODER]; + [EOF] = false; + [EMITTED_END] = false; + [EMITTING_END] = false; + [CLOSED] = false; + [EMITTED_ERROR] = null; + [BUFFERLENGTH] = 0; + [DESTROYED] = false; + [SIGNAL]; + [ABORTED] = false; + [DATALISTENERS] = 0; + [DISCARDED] = false; + /** + * true if the stream can be written + */ + writable = true; + /** + * true if the stream can be read + */ + readable = true; + /** + * If `RType` is Buffer, then options do not need to be provided. + * Otherwise, an options object must be provided to specify either + * {@link Minipass.SharedOptions.objectMode} or + * {@link Minipass.SharedOptions.encoding}, as appropriate. + */ + constructor(...args) { + const options = (args[0] || + {}); + super(); + if (options.objectMode && typeof options.encoding === 'string') { + throw new TypeError('Encoding and objectMode may not be used together'); + } + if (isObjectModeOptions(options)) { + this[OBJECTMODE] = true; + this[ENCODING] = null; + } + else if (isEncodingOptions(options)) { + this[ENCODING] = options.encoding; + this[OBJECTMODE] = false; + } + else { + this[OBJECTMODE] = false; + this[ENCODING] = null; + } + this[ASYNC] = !!options.async; + this[DECODER] = this[ENCODING] + ? new StringDecoder(this[ENCODING]) + : null; + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); + } + //@ts-ignore - private option for debugging and testing + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); + } + const { signal } = options; + if (signal) { + this[SIGNAL] = signal; + if (signal.aborted) { + this[ABORT](); + } + else { + signal.addEventListener('abort', () => this[ABORT]()); + } + } + } + /** + * The amount of data stored in the buffer waiting to be read. + * + * For Buffer strings, this will be the total byte length. + * For string encoding streams, this will be the string character length, + * according to JavaScript's `string.length` logic. + * For objectMode streams, this is a count of the items waiting to be + * emitted. + */ + get bufferLength() { + return this[BUFFERLENGTH]; + } + /** + * The `BufferEncoding` currently in use, or `null` + */ + get encoding() { + return this[ENCODING]; + } + /** + * @deprecated - This is a read only property + */ + set encoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * @deprecated - Encoding may only be set at instantiation time + */ + setEncoding(_enc) { + throw new Error('Encoding must be set at instantiation time'); + } + /** + * True if this is an objectMode stream + */ + get objectMode() { + return this[OBJECTMODE]; + } + /** + * @deprecated - This is a read-only property + */ + set objectMode(_om) { + throw new Error('objectMode must be set at instantiation time'); + } + /** + * true if this is an async stream + */ + get ['async']() { + return this[ASYNC]; + } + /** + * Set to true to make this stream async. + * + * Once set, it cannot be unset, as this would potentially cause incorrect + * behavior. Ie, a sync stream can be made async, but an async stream + * cannot be safely made sync. + */ + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a; + } + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true; + this.emit('abort', this[SIGNAL]?.reason); + this.destroy(this[SIGNAL]?.reason); + } + /** + * True if the stream has been aborted. + */ + get aborted() { + return this[ABORTED]; + } + /** + * No-op setter. Stream aborted status is set via the AbortSignal provided + * in the constructor options. + */ + set aborted(_) { } + write(chunk, encoding, cb) { + if (this[ABORTED]) + return false; + if (this[EOF]) + throw new Error('write after end'); + if (this[DESTROYED]) { + this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); + return true; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (!encoding) + encoding = 'utf8'; + const fn = this[ASYNC] ? defer : nodefer; + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything is only allowed if in object mode, so throw + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); + } + else if (isArrayBufferLike(chunk)) { + //@ts-ignore - sinful unsafe type changing + chunk = Buffer.from(chunk); + } + else if (typeof chunk !== 'string') { + throw new Error('Non-contiguous data written to non-objectMode stream'); + } + } + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + // maybe impossible? + /* c8 ignore start */ + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + /* c8 ignore stop */ + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { + //@ts-ignore - sinful unsafe type change + chunk = Buffer.from(chunk, encoding); + } + if (Buffer.isBuffer(chunk) && this[ENCODING]) { + //@ts-ignore - sinful unsafe type change + chunk = this[DECODER].write(chunk); + } + // Note: flushing CAN potentially switch us into not-flowing mode + if (this[FLOWING] && this[BUFFERLENGTH] !== 0) + this[FLUSH](true); + if (this[FLOWING]) + this.emit('data', chunk); + else + this[BUFFERPUSH](chunk); + if (this[BUFFERLENGTH] !== 0) + this.emit('readable'); + if (cb) + fn(cb); + return this[FLOWING]; + } + /** + * Low-level explicit read method. + * + * In objectMode, the argument is ignored, and one item is returned if + * available. + * + * `n` is the number of bytes (or in the case of encoding streams, + * characters) to consume. If `n` is not provided, then the entire buffer + * is returned, or `null` is returned if no data is available. + * + * If `n` is greater that the amount of data in the internal buffer, + * then `null` is returned. + */ + read(n) { + if (this[DESTROYED]) + return null; + this[DISCARDED] = false; + if (this[BUFFERLENGTH] === 0 || + n === 0 || + (n && n > this[BUFFERLENGTH])) { + this[MAYBE_EMIT_END](); + return null; + } + if (this[OBJECTMODE]) + n = null; + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + // not object mode, so if we have an encoding, then RType is string + // otherwise, must be Buffer + this[BUFFER] = [ + (this[ENCODING] + ? this[BUFFER].join('') + : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), + ]; + } + const ret = this[READ](n || null, this[BUFFER][0]); + this[MAYBE_EMIT_END](); + return ret; + } + [READ](n, chunk) { + if (this[OBJECTMODE]) + this[BUFFERSHIFT](); + else { + const c = chunk; + if (n === c.length || n === null) + this[BUFFERSHIFT](); + else if (typeof c === 'string') { + this[BUFFER][0] = c.slice(n); + chunk = c.slice(0, n); + this[BUFFERLENGTH] -= n; + } + else { + this[BUFFER][0] = c.subarray(n); + chunk = c.subarray(0, n); + this[BUFFERLENGTH] -= n; + } + } + this.emit('data', chunk); + if (!this[BUFFER].length && !this[EOF]) + this.emit('drain'); + return chunk; + } + end(chunk, encoding, cb) { + if (typeof chunk === 'function') { + cb = chunk; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = 'utf8'; + } + if (chunk !== undefined) + this.write(chunk, encoding); + if (cb) + this.once('end', cb); + this[EOF] = true; + this.writable = false; + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this[FLOWING] || !this[PAUSED]) + this[MAYBE_EMIT_END](); + return this; + } + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) + return; + if (!this[DATALISTENERS] && !this[PIPES].length) { + this[DISCARDED] = true; + } + this[PAUSED] = false; + this[FLOWING] = true; + this.emit('resume'); + if (this[BUFFER].length) + this[FLUSH](); + else if (this[EOF]) + this[MAYBE_EMIT_END](); + else + this.emit('drain'); + } + /** + * Resume the stream if it is currently in a paused state + * + * If called when there are no pipe destinations or `data` event listeners, + * this will place the stream in a "discarded" state, where all data will + * be thrown away. The discarded state is removed if a pipe destination or + * data handler is added, if pause() is called, or if any synchronous or + * asynchronous iteration is started. + */ + resume() { + return this[RESUME](); + } + /** + * Pause the stream + */ + pause() { + this[FLOWING] = false; + this[PAUSED] = true; + this[DISCARDED] = false; + } + /** + * true if the stream has been forcibly destroyed + */ + get destroyed() { + return this[DESTROYED]; + } + /** + * true if the stream is currently in a flowing state, meaning that + * any writes will be immediately emitted. + */ + get flowing() { + return this[FLOWING]; + } + /** + * true if the stream is currently in a paused state + */ + get paused() { + return this[PAUSED]; + } + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1; + else + this[BUFFERLENGTH] += chunk.length; + this[BUFFER].push(chunk); + } + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1; + else + this[BUFFERLENGTH] -= this[BUFFER][0].length; + return this[BUFFER].shift(); + } + [FLUSH](noDrain = false) { + do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && + this[BUFFER].length); + if (!noDrain && !this[BUFFER].length && !this[EOF]) + this.emit('drain'); + } + [FLUSHCHUNK](chunk) { + this.emit('data', chunk); + return this[FLOWING]; + } + /** + * Pipe all data emitted by this stream into the destination provided. + * + * Triggers the flow of data. + */ + pipe(dest, opts) { + if (this[DESTROYED]) + return dest; + this[DISCARDED] = false; + const ended = this[EMITTED_END]; + opts = opts || {}; + if (dest === proc.stdout || dest === proc.stderr) + opts.end = false; + else + opts.end = opts.end !== false; + opts.proxyErrors = !!opts.proxyErrors; + // piping an ended stream ends immediately + if (ended) { + if (opts.end) + dest.end(); + } + else { + // "as" here just ignores the WType, which pipes don't care about, + // since they're only consuming from us, and writing to the dest + this[PIPES].push(!opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts)); + if (this[ASYNC]) + defer(() => this[RESUME]()); + else + this[RESUME](); + } + return dest; + } + /** + * Fully unhook a piped destination stream. + * + * If the destination stream was the only consumer of this stream (ie, + * there are no other piped destinations or `'data'` event listeners) + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest); + if (p) { + if (this[PIPES].length === 1) { + if (this[FLOWING] && this[DATALISTENERS] === 0) { + this[FLOWING] = false; + } + this[PIPES] = []; + } + else + this[PIPES].splice(this[PIPES].indexOf(p), 1); + p.unpipe(); + } + } + /** + * Alias for {@link Minipass#on} + */ + addListener(ev, handler) { + return this.on(ev, handler); + } + /** + * Mostly identical to `EventEmitter.on`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * - Adding a 'data' event handler will trigger the flow of data + * + * - Adding a 'readable' event handler when there is data waiting to be read + * will cause 'readable' to be emitted immediately. + * + * - Adding an 'endish' event handler ('end', 'finish', etc.) which has + * already passed will cause the event to be emitted immediately and all + * handlers removed. + * + * - Adding an 'error' event handler after an error has been emitted will + * cause the event to be re-emitted immediately with the error previously + * raised. + */ + on(ev, handler) { + const ret = super.on(ev, handler); + if (ev === 'data') { + this[DISCARDED] = false; + this[DATALISTENERS]++; + if (!this[PIPES].length && !this[FLOWING]) { + this[RESUME](); + } + } + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { + super.emit('readable'); + } + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev); + this.removeAllListeners(ev); + } + else if (ev === 'error' && this[EMITTED_ERROR]) { + const h = handler; + if (this[ASYNC]) + defer(() => h.call(this, this[EMITTED_ERROR])); + else + h.call(this, this[EMITTED_ERROR]); + } + return ret; + } + /** + * Alias for {@link Minipass#off} + */ + removeListener(ev, handler) { + return this.off(ev, handler); + } + /** + * Mostly identical to `EventEmitter.off` + * + * If a 'data' event handler is removed, and it was the last consumer + * (ie, there are no pipe destinations or other 'data' event listeners), + * then the flow of data will stop until there is another consumer or + * {@link Minipass#resume} is explicitly called. + */ + off(ev, handler) { + const ret = super.off(ev, handler); + // if we previously had listeners, and now we don't, and we don't + // have any pipes, then stop the flow, unless it's been explicitly + // put in a discarded flowing state via stream.resume(). + if (ev === 'data') { + this[DATALISTENERS] = this.listeners('data').length; + if (this[DATALISTENERS] === 0 && + !this[DISCARDED] && + !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * Mostly identical to `EventEmitter.removeAllListeners` + * + * If all 'data' event handlers are removed, and they were the last consumer + * (ie, there are no pipe destinations), then the flow of data will stop + * until there is another consumer or {@link Minipass#resume} is explicitly + * called. + */ + removeAllListeners(ev) { + const ret = super.removeAllListeners(ev); + if (ev === 'data' || ev === undefined) { + this[DATALISTENERS] = 0; + if (!this[DISCARDED] && !this[PIPES].length) { + this[FLOWING] = false; + } + } + return ret; + } + /** + * true if the 'end' event has been emitted + */ + get emittedEnd() { + return this[EMITTED_END]; + } + [MAYBE_EMIT_END]() { + if (!this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF]) { + this[EMITTING_END] = true; + this.emit('end'); + this.emit('prefinish'); + this.emit('finish'); + if (this[CLOSED]) + this.emit('close'); + this[EMITTING_END] = false; + } + } + /** + * Mostly identical to `EventEmitter.emit`, with the following + * behavior differences to prevent data loss and unnecessary hangs: + * + * If the stream has been destroyed, and the event is something other + * than 'close' or 'error', then `false` is returned and no handlers + * are called. + * + * If the event is 'end', and has already been emitted, then the event + * is ignored. If the stream is in a paused or non-flowing state, then + * the event will be deferred until data flow resumes. If the stream is + * async, then handlers will be called on the next tick rather than + * immediately. + * + * If the event is 'close', and 'end' has not yet been emitted, then + * the event will be deferred until after 'end' is emitted. + * + * If the event is 'error', and an AbortSignal was provided for the stream, + * and there are no listeners, then the event is ignored, matching the + * behavior of node core streams in the presense of an AbortSignal. + * + * If the event is 'finish' or 'prefinish', then all listeners will be + * removed after emitting the event, to prevent double-firing. + */ + emit(ev, ...args) { + const data = args[0]; + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && + ev !== 'close' && + ev !== DESTROYED && + this[DESTROYED]) { + return false; + } + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? (defer(() => this[EMITDATA](data)), true) + : this[EMITDATA](data); + } + else if (ev === 'end') { + return this[EMITEND](); + } + else if (ev === 'close') { + this[CLOSED] = true; + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) + return false; + const ret = super.emit('close'); + this.removeAllListeners('close'); + return ret; + } + else if (ev === 'error') { + this[EMITTED_ERROR] = data; + super.emit(ERROR, data); + const ret = !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false; + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'resume') { + const ret = super.emit('resume'); + this[MAYBE_EMIT_END](); + return ret; + } + else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev); + this.removeAllListeners(ev); + return ret; + } + // Some other unknown event + const ret = super.emit(ev, ...args); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) + this.pause(); + } + const ret = this[DISCARDED] ? false : super.emit('data', data); + this[MAYBE_EMIT_END](); + return ret; + } + [EMITEND]() { + if (this[EMITTED_END]) + return false; + this[EMITTED_END] = true; + this.readable = false; + return this[ASYNC] + ? (defer(() => this[EMITEND2]()), true) + : this[EMITEND2](); + } + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end(); + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data); + } + if (!this[DISCARDED]) + super.emit('data', data); + } + } + for (const p of this[PIPES]) { + p.end(); + } + const ret = super.emit('end'); + this.removeAllListeners('end'); + return ret; + } + /** + * Return a Promise that resolves to an array of all emitted data once + * the stream ends. + */ + async collect() { + const buf = Object.assign([], { + dataLength: 0, + }); + if (!this[OBJECTMODE]) + buf.dataLength = 0; + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise(); + this.on('data', c => { + buf.push(c); + if (!this[OBJECTMODE]) + buf.dataLength += c.length; + }); + await p; + return buf; + } + /** + * Return a Promise that resolves to the concatenation of all emitted data + * once the stream ends. + * + * Not allowed on objectMode streams. + */ + async concat() { + if (this[OBJECTMODE]) { + throw new Error('cannot concat in objectMode'); + } + const buf = await this.collect(); + return (this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength)); + } + /** + * Return a void Promise that resolves once the stream ends. + */ + async promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))); + this.on('error', er => reject(er)); + this.on('end', () => resolve()); + }); + } + /** + * Asynchronous `for await of` iteration. + * + * This will continue emitting all chunks until the stream terminates. + */ + [Symbol.asyncIterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = async () => { + this.pause(); + stopped = true; + return { value: undefined, done: true }; + }; + const next = () => { + if (stopped) + return stop(); + const res = this.read(); + if (res !== null) + return Promise.resolve({ done: false, value: res }); + if (this[EOF]) + return stop(); + let resolve; + let reject; + const onerr = (er) => { + this.off('data', ondata); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + stop(); + reject(er); + }; + const ondata = (value) => { + this.off('error', onerr); + this.off('end', onend); + this.off(DESTROYED, ondestroy); + this.pause(); + resolve({ value, done: !!this[EOF] }); + }; + const onend = () => { + this.off('error', onerr); + this.off('data', ondata); + this.off(DESTROYED, ondestroy); + stop(); + resolve({ done: true, value: undefined }); + }; + const ondestroy = () => onerr(new Error('stream destroyed')); + return new Promise((res, rej) => { + reject = rej; + resolve = res; + this.once(DESTROYED, ondestroy); + this.once('error', onerr); + this.once('end', onend); + this.once('data', ondata); + }); + }; + return { + next, + throw: stop, + return: stop, + [Symbol.asyncIterator]() { + return this; + }, + }; + } + /** + * Synchronous `for of` iteration. + * + * The iteration will terminate when the internal buffer runs out, even + * if the stream has not yet terminated. + */ + [Symbol.iterator]() { + // set this up front, in case the consumer doesn't call next() + // right away. + this[DISCARDED] = false; + let stopped = false; + const stop = () => { + this.pause(); + this.off(ERROR, stop); + this.off(DESTROYED, stop); + this.off('end', stop); + stopped = true; + return { done: true, value: undefined }; + }; + const next = () => { + if (stopped) + return stop(); + const value = this.read(); + return value === null ? stop() : { done: false, value }; + }; + this.once('end', stop); + this.once(ERROR, stop); + this.once(DESTROYED, stop); + return { + next, + throw: stop, + return: stop, + [Symbol.iterator]() { + return this; + }, + }; + } + /** + * Destroy a stream, preventing it from being used for any further purpose. + * + * If the stream has a `close()` method, then it will be called on + * destruction. + * + * After destruction, any attempt to write data, read data, or emit most + * events will be ignored. + * + * If an error argument is provided, then it will be emitted in an + * 'error' event. + */ + destroy(er) { + if (this[DESTROYED]) { + if (er) + this.emit('error', er); + else + this.emit(DESTROYED); + return this; + } + this[DESTROYED] = true; + this[DISCARDED] = true; + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0; + this[BUFFERLENGTH] = 0; + const wc = this; + if (typeof wc.close === 'function' && !this[CLOSED]) + wc.close(); + if (er) + this.emit('error', er); + // if no error to emit, still reject pending promises + else + this.emit(DESTROYED); + return this; + } + /** + * Alias for {@link isStream} + * + * Former export location, maintained for backwards compatibility. + * + * @deprecated + */ + static get isStream() { + return isStream; + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/ssri/node_modules/minipass/dist/mjs/package.json b/node_modules/ssri/node_modules/minipass/dist/mjs/package.json new file mode 100644 index 0000000000000..3dbc1ca591c05 --- /dev/null +++ b/node_modules/ssri/node_modules/minipass/dist/mjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/ssri/node_modules/minipass/package.json b/node_modules/ssri/node_modules/minipass/package.json new file mode 100644 index 0000000000000..6faaa247a5bc6 --- /dev/null +++ b/node_modules/ssri/node_modules/minipass/package.json @@ -0,0 +1,82 @@ +{ + "name": "minipass", + "version": "7.0.3", + "description": "minimal implementation of a PassThrough stream", + "main": "./dist/cjs/index.js", + "module": "./dist/mjs/index.js", + "types": "./dist/cjs/index.js", + "exports": { + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + }, + "./package.json": "./package.json" + }, + "files": [ + "dist" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" + }, + "tap": { + "coverage": false, + "node-arg": [ + "--enable-source-maps", + "--no-warnings", + "--loader", + "ts-node/esm" + ], + "ts": false + }, + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "devDependencies": { + "@types/node": "^20.1.2", + "@types/tap": "^15.0.8", + "c8": "^7.13.0", + "prettier": "^2.6.2", + "tap": "^16.3.0", + "ts-node": "^10.9.1", + "typedoc": "^0.24.8", + "typescript": "^5.1.3", + "end-of-stream": "^1.4.0", + "node-abort-controller": "^3.1.1", + "sync-content": "^1.0.2", + "through2": "^2.0.3" + }, + "repository": "https://github.com/isaacs/minipass", + "keywords": [ + "passthrough", + "stream" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } +} diff --git a/node_modules/ssri/package.json b/node_modules/ssri/package.json index 815c7f3ed03ae..8750bd744d28b 100644 --- a/node_modules/ssri/package.json +++ b/node_modules/ssri/package.json @@ -1,6 +1,6 @@ { "name": "ssri", - "version": "10.0.4", + "version": "10.0.5", "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.", "main": "lib/index.js", "files": [ @@ -47,11 +47,11 @@ "author": "GitHub Inc.", "license": "ISC", "dependencies": { - "minipass": "^5.0.0" + "minipass": "^7.0.3" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/template-oss": "4.18.0", "tap": "^16.0.1" }, "engines": { @@ -59,7 +59,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", + "version": "4.18.0", "publish": "true" } } diff --git a/package-lock.json b/package-lock.json index 1ea38c4cc674d..1e20a0d2db064 100644 --- a/package-lock.json +++ b/package-lock.json @@ -145,7 +145,7 @@ "read": "^2.1.0", "semver": "^7.5.4", "sigstore": "^1.7.0", - "ssri": "^10.0.4", + "ssri": "^10.0.5", "supports-color": "^9.4.0", "tar": "^6.1.15", "text-table": "~0.2.0", @@ -11968,17 +11968,26 @@ "dev": true }, "node_modules/ssri": { - "version": "10.0.4", - "resolved": "https://registry.npmjs.org/ssri/-/ssri-10.0.4.tgz", - "integrity": "sha512-12+IR2CB2C28MMAw0Ncqwj5QbTcs0nGIhgJzYWzDkb21vWmfNI83KS4f3Ci6GI98WreIfG7o9UXp3C0qbpA8nQ==", + "version": "10.0.5", + "resolved": "https://registry.npmjs.org/ssri/-/ssri-10.0.5.tgz", + "integrity": "sha512-bSf16tAFkGeRlUNDjXu8FzaMQt6g2HZJrun7mtMbIPOddxt3GLMSz5VWUWcqTJUPfLEaDIepGxv+bYQW49596A==", "inBundle": true, "dependencies": { - "minipass": "^5.0.0" + "minipass": "^7.0.3" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/ssri/node_modules/minipass": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz", + "integrity": "sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg==", + "inBundle": true, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, "node_modules/stack-utils": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", @@ -15833,7 +15842,7 @@ "promise-call-limit": "^1.0.2", "read-package-json-fast": "^3.0.2", "semver": "^7.3.7", - "ssri": "^10.0.1", + "ssri": "^10.0.5", "treeverse": "^3.0.0", "walk-up-path": "^3.0.1" }, @@ -16030,7 +16039,7 @@ "proc-log": "^3.0.0", "semver": "^7.3.7", "sigstore": "^1.4.0", - "ssri": "^10.0.1" + "ssri": "^10.0.5" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", diff --git a/package.json b/package.json index 6f416e2ab0289..581cfaaf106a3 100644 --- a/package.json +++ b/package.json @@ -110,7 +110,7 @@ "read": "^2.1.0", "semver": "^7.5.4", "sigstore": "^1.7.0", - "ssri": "^10.0.4", + "ssri": "^10.0.5", "supports-color": "^9.4.0", "tar": "^6.1.15", "text-table": "~0.2.0", diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json index 80aa885c0857b..49971f3fdf2ec 100644 --- a/workspaces/arborist/package.json +++ b/workspaces/arborist/package.json @@ -33,7 +33,7 @@ "promise-call-limit": "^1.0.2", "read-package-json-fast": "^3.0.2", "semver": "^7.3.7", - "ssri": "^10.0.1", + "ssri": "^10.0.5", "treeverse": "^3.0.0", "walk-up-path": "^3.0.1" }, diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json index ba634dfbbb2b7..6c0edc5be9246 100644 --- a/workspaces/libnpmpublish/package.json +++ b/workspaces/libnpmpublish/package.json @@ -45,7 +45,7 @@ "proc-log": "^3.0.0", "semver": "^7.3.7", "sigstore": "^1.4.0", - "ssri": "^10.0.1" + "ssri": "^10.0.5" }, "engines": { "node": "^16.13.0 || >=18.0.0" From e1362b5ba05ad95987eb0b4f5587991398d43b6f Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Mon, 14 Aug 2023 12:11:40 -0700 Subject: [PATCH 22/68] deps: npm-pick-manifest@8.0.2 --- node_modules/npm-pick-manifest/lib/index.js | 2 +- node_modules/npm-pick-manifest/package.json | 7 ++++--- package-lock.json | 10 +++++----- package.json | 2 +- workspaces/arborist/package.json | 2 +- 5 files changed, 12 insertions(+), 11 deletions(-) diff --git a/node_modules/npm-pick-manifest/lib/index.js b/node_modules/npm-pick-manifest/lib/index.js index f2934e9ca1822..8dbd2721c8996 100644 --- a/node_modules/npm-pick-manifest/lib/index.js +++ b/node_modules/npm-pick-manifest/lib/index.js @@ -210,7 +210,7 @@ module.exports = (packument, wanted, opts = {}) => { code, type: npa.resolve(packument.name, wanted).type, wanted, - versions: Object.keys(packument.versions), + versions: Object.keys(packument.versions ?? {}), name, distTags: packument['dist-tags'], defaultTag, diff --git a/node_modules/npm-pick-manifest/package.json b/node_modules/npm-pick-manifest/package.json index 89ff8966f1a39..feff81f5b2fee 100644 --- a/node_modules/npm-pick-manifest/package.json +++ b/node_modules/npm-pick-manifest/package.json @@ -1,6 +1,6 @@ { "name": "npm-pick-manifest", - "version": "8.0.1", + "version": "8.0.2", "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.", "main": "./lib", "files": [ @@ -36,7 +36,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.6.1", + "@npmcli/template-oss": "4.18.0", "tap": "^16.0.1" }, "tap": { @@ -51,6 +51,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.6.1" + "version": "4.18.0", + "publish": true } } diff --git a/package-lock.json b/package-lock.json index 1e20a0d2db064..1ec44a4ef3744 100644 --- a/package-lock.json +++ b/package-lock.json @@ -132,7 +132,7 @@ "npm-audit-report": "^5.0.0", "npm-install-checks": "^6.1.1", "npm-package-arg": "^10.1.0", - "npm-pick-manifest": "^8.0.1", + "npm-pick-manifest": "^8.0.2", "npm-profile": "^8.0.0", "npm-registry-fetch": "^15.0.0", "npm-user-validate": "^2.0.0", @@ -9702,9 +9702,9 @@ } }, "node_modules/npm-pick-manifest": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-8.0.1.tgz", - "integrity": "sha512-mRtvlBjTsJvfCCdmPtiu2bdlx8d/KXtF7yNXNWe7G0Z36qWA9Ny5zXsI2PfBZEv7SXgoxTmNaTzGSbbzDZChoA==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-8.0.2.tgz", + "integrity": "sha512-1dKY+86/AIiq1tkKVD3l0WI+Gd3vkknVGAggsFeBkTvbhMQ1OND/LKkYv4JtXPKUJ8bOTCyLiqEg2P6QNdK+Gg==", "inBundle": true, "dependencies": { "npm-install-checks": "^6.0.0", @@ -15832,7 +15832,7 @@ "nopt": "^7.0.0", "npm-install-checks": "^6.0.0", "npm-package-arg": "^10.1.0", - "npm-pick-manifest": "^8.0.1", + "npm-pick-manifest": "^8.0.2", "npm-registry-fetch": "^15.0.0", "npmlog": "^7.0.1", "pacote": "^16.0.0", diff --git a/package.json b/package.json index 581cfaaf106a3..f2566322556ba 100644 --- a/package.json +++ b/package.json @@ -97,7 +97,7 @@ "npm-audit-report": "^5.0.0", "npm-install-checks": "^6.1.1", "npm-package-arg": "^10.1.0", - "npm-pick-manifest": "^8.0.1", + "npm-pick-manifest": "^8.0.2", "npm-profile": "^8.0.0", "npm-registry-fetch": "^15.0.0", "npm-user-validate": "^2.0.0", diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json index 49971f3fdf2ec..bb752cb0410e1 100644 --- a/workspaces/arborist/package.json +++ b/workspaces/arborist/package.json @@ -23,7 +23,7 @@ "nopt": "^7.0.0", "npm-install-checks": "^6.0.0", "npm-package-arg": "^10.1.0", - "npm-pick-manifest": "^8.0.1", + "npm-pick-manifest": "^8.0.2", "npm-registry-fetch": "^15.0.0", "npmlog": "^7.0.1", "pacote": "^16.0.0", From f6deaf8d4360aec459d491c086999ffd1406b8da Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Mon, 14 Aug 2023 12:12:35 -0700 Subject: [PATCH 23/68] deps: npm-install-checks@6.2.0 --- node_modules/npm-install-checks/lib/index.js | 6 +++--- node_modules/npm-install-checks/package.json | 6 +++--- package-lock.json | 10 +++++----- package.json | 2 +- workspaces/arborist/package.json | 2 +- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/node_modules/npm-install-checks/lib/index.js b/node_modules/npm-install-checks/lib/index.js index fa5f593aaac64..f0ba2c07ad081 100644 --- a/node_modules/npm-install-checks/lib/index.js +++ b/node_modules/npm-install-checks/lib/index.js @@ -22,13 +22,13 @@ const checkEngine = (target, npmVer, nodeVer, force = false) => { const isMusl = (file) => file.includes('libc.musl-') || file.includes('ld-musl-') -const checkPlatform = (target, force = false) => { +const checkPlatform = (target, force = false, environment = {}) => { if (force) { return } - const platform = process.platform - const arch = process.arch + const platform = environment.os || process.platform + const arch = environment.cpu || process.arch const osOk = target.os ? checkList(platform, target.os) : true const cpuOk = target.cpu ? checkList(arch, target.cpu) : true diff --git a/node_modules/npm-install-checks/package.json b/node_modules/npm-install-checks/package.json index 192cf68837146..50378808d75d0 100644 --- a/node_modules/npm-install-checks/package.json +++ b/node_modules/npm-install-checks/package.json @@ -1,6 +1,6 @@ { "name": "npm-install-checks", - "version": "6.1.1", + "version": "6.2.0", "description": "Check the engines and platform fields in package.json", "main": "lib/index.js", "dependencies": { @@ -8,7 +8,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.13.0", + "@npmcli/template-oss": "4.18.0", "tap": "^16.0.1" }, "scripts": { @@ -39,7 +39,7 @@ "author": "GitHub Inc.", "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.13.0", + "version": "4.18.0", "publish": "true" }, "tap": { diff --git a/package-lock.json b/package-lock.json index 1ec44a4ef3744..235a8cde69a93 100644 --- a/package-lock.json +++ b/package-lock.json @@ -130,7 +130,7 @@ "node-gyp": "^9.4.0", "nopt": "^7.2.0", "npm-audit-report": "^5.0.0", - "npm-install-checks": "^6.1.1", + "npm-install-checks": "^6.2.0", "npm-package-arg": "^10.1.0", "npm-pick-manifest": "^8.0.2", "npm-profile": "^8.0.0", @@ -9648,9 +9648,9 @@ } }, "node_modules/npm-install-checks": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-6.1.1.tgz", - "integrity": "sha512-dH3GmQL4vsPtld59cOn8uY0iOqRmqKvV+DLGwNXV/Q7MDgD2QfOADWd/mFXcIE5LVhYYGjA3baz6W9JneqnuCw==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-6.2.0.tgz", + "integrity": "sha512-744wat5wAAHsxa4590mWO0tJ8PKxR8ORZsH9wGpQc3nWTzozMAgBN/XyqYw7mg3yqLM8dLwEnwSfKMmXAjF69g==", "inBundle": true, "dependencies": { "semver": "^7.1.1" @@ -15830,7 +15830,7 @@ "json-stringify-nice": "^1.1.4", "minimatch": "^9.0.0", "nopt": "^7.0.0", - "npm-install-checks": "^6.0.0", + "npm-install-checks": "^6.2.0", "npm-package-arg": "^10.1.0", "npm-pick-manifest": "^8.0.2", "npm-registry-fetch": "^15.0.0", diff --git a/package.json b/package.json index f2566322556ba..fd1d933953352 100644 --- a/package.json +++ b/package.json @@ -95,7 +95,7 @@ "node-gyp": "^9.4.0", "nopt": "^7.2.0", "npm-audit-report": "^5.0.0", - "npm-install-checks": "^6.1.1", + "npm-install-checks": "^6.2.0", "npm-package-arg": "^10.1.0", "npm-pick-manifest": "^8.0.2", "npm-profile": "^8.0.0", diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json index bb752cb0410e1..975a91f030007 100644 --- a/workspaces/arborist/package.json +++ b/workspaces/arborist/package.json @@ -21,7 +21,7 @@ "json-stringify-nice": "^1.1.4", "minimatch": "^9.0.0", "nopt": "^7.0.0", - "npm-install-checks": "^6.0.0", + "npm-install-checks": "^6.2.0", "npm-package-arg": "^10.1.0", "npm-pick-manifest": "^8.0.2", "npm-registry-fetch": "^15.0.0", From 959a46368bcd8df2cbff09b0142ff2ccf7973876 Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Mon, 14 Aug 2023 12:13:23 -0700 Subject: [PATCH 24/68] deps: glob@10.3.3 --- node_modules/glob/dist/cjs/package.json | 2 +- node_modules/glob/dist/cjs/src/bin.js | 16 +++++++++-- node_modules/glob/dist/cjs/src/glob.js | 4 +++ node_modules/glob/dist/mjs/glob.js | 4 +++ node_modules/glob/dist/mjs/package.json | 2 +- node_modules/glob/package.json | 8 +++--- node_modules/path-scurry/dist/cjs/index.js | 23 +++++++++++++++ node_modules/path-scurry/dist/mjs/index.js | 23 +++++++++++++++ .../node_modules/lru-cache/dist/cjs/index.js | 13 +++++++-- .../lru-cache/dist/cjs/index.min.js | 2 +- .../node_modules/lru-cache/dist/mjs/index.js | 13 +++++++-- .../lru-cache/dist/mjs/index.min.js | 2 +- .../node_modules/lru-cache/package.json | 15 +++++----- node_modules/path-scurry/package.json | 10 +++---- package-lock.json | 28 +++++++++---------- package.json | 2 +- 16 files changed, 124 insertions(+), 43 deletions(-) diff --git a/node_modules/glob/dist/cjs/package.json b/node_modules/glob/dist/cjs/package.json index 44b67c307f1c8..c15df94a3582b 100644 --- a/node_modules/glob/dist/cjs/package.json +++ b/node_modules/glob/dist/cjs/package.json @@ -1,4 +1,4 @@ { - "version": "10.2.7", + "version": "10.3.3", "type": "commonjs" } diff --git a/node_modules/glob/dist/cjs/src/bin.js b/node_modules/glob/dist/cjs/src/bin.js index 733358c7365be..4a8a88f2734d2 100755 --- a/node_modules/glob/dist/cjs/src/bin.js +++ b/node_modules/glob/dist/cjs/src/bin.js @@ -4,10 +4,10 @@ Object.defineProperty(exports, "__esModule", { value: true }); const foreground_child_1 = require("foreground-child"); const fs_1 = require("fs"); const jackspeak_1 = require("jackspeak"); -const index_js_1 = require("./index.js"); const package_json_1 = require("../package.json"); +const index_js_1 = require("./index.js"); const j = (0, jackspeak_1.jack)({ - usage: 'glob [options] [ [ ...]]' + usage: 'glob [options] [ [ ...]]', }) .description(` Glob v${package_json_1.version} @@ -22,6 +22,14 @@ const j = (0, jackspeak_1.jack)({ description: `Run the command provided, passing the glob expression matches as arguments.`, }, +}) + .opt({ + default: { + short: 'p', + hint: 'pattern', + description: `If no positional arguments are provided, glob will use + this pattern`, + }, }) .flag({ all: { @@ -214,8 +222,10 @@ try { console.log(j.usage()); process.exit(0); } - if (positionals.length === 0) + if (positionals.length === 0 && !values.default) throw 'No patterns provided'; + if (positionals.length === 0 && values.default) + positionals.push(values.default); const patterns = values.all ? positionals : positionals.filter(p => !(0, fs_1.existsSync)(p)); diff --git a/node_modules/glob/dist/cjs/src/glob.js b/node_modules/glob/dist/cjs/src/glob.js index e7ad4deb980d3..eb37c6b9a6601 100644 --- a/node_modules/glob/dist/cjs/src/glob.js +++ b/node_modules/glob/dist/cjs/src/glob.js @@ -62,6 +62,10 @@ class Glob { * again. */ constructor(pattern, opts) { + /* c8 ignore start */ + if (!opts) + throw new TypeError('glob options required'); + /* c8 ignore stop */ this.withFileTypes = !!opts.withFileTypes; this.signal = opts.signal; this.follow = !!opts.follow; diff --git a/node_modules/glob/dist/mjs/glob.js b/node_modules/glob/dist/mjs/glob.js index f158065746e58..8ff26154427be 100644 --- a/node_modules/glob/dist/mjs/glob.js +++ b/node_modules/glob/dist/mjs/glob.js @@ -59,6 +59,10 @@ export class Glob { * again. */ constructor(pattern, opts) { + /* c8 ignore start */ + if (!opts) + throw new TypeError('glob options required'); + /* c8 ignore stop */ this.withFileTypes = !!opts.withFileTypes; this.signal = opts.signal; this.follow = !!opts.follow; diff --git a/node_modules/glob/dist/mjs/package.json b/node_modules/glob/dist/mjs/package.json index ac4c42f81fbd8..5cc80943d565b 100644 --- a/node_modules/glob/dist/mjs/package.json +++ b/node_modules/glob/dist/mjs/package.json @@ -1,4 +1,4 @@ { - "version": "10.2.7", + "version": "10.3.3", "type": "module" } diff --git a/node_modules/glob/package.json b/node_modules/glob/package.json index ba9732c0f6de5..2d25985d2bbb5 100644 --- a/node_modules/glob/package.json +++ b/node_modules/glob/package.json @@ -2,7 +2,7 @@ "author": "Isaac Z. Schlueter (https://blog.izs.me/)", "name": "glob", "description": "the most correct and second fastest glob implementation in JavaScript", - "version": "10.2.7", + "version": "10.3.3", "bin": "./dist/cjs/src/bin.js", "repository": { "type": "git", @@ -62,11 +62,11 @@ "foreground-child": "^3.1.0", "jackspeak": "^2.0.3", "minimatch": "^9.0.1", - "minipass": "^5.0.0 || ^6.0.2", - "path-scurry": "^1.7.0" + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", + "path-scurry": "^1.10.1" }, "devDependencies": { - "@types/node": "^20.2.1", + "@types/node": "^20.3.2", "@types/tap": "^15.0.7", "c8": "^7.12.0", "memfs": "^3.4.13", diff --git a/node_modules/path-scurry/dist/cjs/index.js b/node_modules/path-scurry/dist/cjs/index.js index 8044c7e581d2e..23eb5b0853ff2 100644 --- a/node_modules/path-scurry/dist/cjs/index.js +++ b/node_modules/path-scurry/dist/cjs/index.js @@ -521,6 +521,29 @@ class PathBase { isUnknown() { return (this.#type & IFMT) === UNKNOWN; } + isType(type) { + return this[`is${type}`](); + } + getType() { + return this.isUnknown() + ? 'Unknown' + : this.isDirectory() + ? 'Directory' + : this.isFile() + ? 'File' + : this.isSymbolicLink() + ? 'SymbolicLink' + : this.isFIFO() + ? 'FIFO' + : this.isCharacterDevice() + ? 'CharacterDevice' + : this.isBlockDevice() + ? 'BlockDevice' + : /* c8 ignore start */ this.isSocket() + ? 'Socket' + : 'Unknown'; + /* c8 ignore stop */ + } /** * Is the Path a regular file? */ diff --git a/node_modules/path-scurry/dist/mjs/index.js b/node_modules/path-scurry/dist/mjs/index.js index 957f087c86514..079253a6aee96 100644 --- a/node_modules/path-scurry/dist/mjs/index.js +++ b/node_modules/path-scurry/dist/mjs/index.js @@ -493,6 +493,29 @@ export class PathBase { isUnknown() { return (this.#type & IFMT) === UNKNOWN; } + isType(type) { + return this[`is${type}`](); + } + getType() { + return this.isUnknown() + ? 'Unknown' + : this.isDirectory() + ? 'Directory' + : this.isFile() + ? 'File' + : this.isSymbolicLink() + ? 'SymbolicLink' + : this.isFIFO() + ? 'FIFO' + : this.isCharacterDevice() + ? 'CharacterDevice' + : this.isBlockDevice() + ? 'BlockDevice' + : /* c8 ignore start */ this.isSocket() + ? 'Socket' + : 'Unknown'; + /* c8 ignore stop */ + } /** * Is the Path a regular file? */ diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.js b/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.js index 51c9f236cbfd3..02d76ec800a92 100644 --- a/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.js +++ b/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.js @@ -837,6 +837,15 @@ class LRUCache { if (v !== oldVal) { if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { oldVal.__abortController.abort(new Error('replaced')); + const { __staleWhileFetching: s } = oldVal; + if (s !== undefined && !noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(s, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([s, k, 'set']); + } + } } else if (!noDisposeOnSet) { if (this.#hasDispose) { @@ -1090,7 +1099,7 @@ class LRUCache { const pcall = (res, rej) => { const fmp = this.#fetchMethod?.(k, v, fetchOpts); if (fmp && fmp instanceof Promise) { - fmp.then(v => res(v), rej); + fmp.then(v => res(v === undefined ? undefined : v), rej); } // ignored, we go until we finish, regardless. // defer check until we are actually aborting, @@ -1098,7 +1107,7 @@ class LRUCache { ac.signal.addEventListener('abort', () => { if (!options.ignoreFetchAbort || options.allowStaleOnFetchAbort) { - res(); + res(undefined); // when it eventually resolves, update the cache. if (options.allowStaleOnFetchAbort) { res = v => cb(v, true); diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.min.js b/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.min.js index d854bf570d346..8d34a03041d25 100644 --- a/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.min.js +++ b/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.min.js @@ -1,2 +1,2 @@ -"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),k=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=k(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#_;#g;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#p;#n;#i;#t;#l;#c;#o;#h;#w;#r;#m;#F;#S;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#S,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#w,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#p}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#_}get disposeAfter(){return this.#g}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:u,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:d,maxSize:p=0,maxEntrySize:F=0,sizeCalculation:c,fetchMethod:w,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:S,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:g,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?k(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=p,this.maxEntrySize=F||this.#f,this.sizeCalculation=c,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=w,this.#T=!!w,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#w=R.create(e),this.#s=0,this.#p=0,typeof u=="function"&&(this.#_=u),typeof b=="function"?(this.#g=b,this.#r=[]):(this.#g=void 0,this.#r=void 0),this.#b=!!this.#_,this.#a=!!this.#g,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!d,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!g,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!S,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#S=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let u=n.now-r;n.remainingTTL=a-u}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let u=(i||s())-r;return a-u},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#p=0,this.#m=t,this.#E=e=>{this.#p-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#p>n;)this.#W(!0)}this.#p+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#p)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#S&&this.#F){h.ttl=this.#S[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:u=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#w.length!==0?this.#w.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),u=!1;else{this.#v(f);let d=this.#t[f];if(e!==d){if(this.#T&&this.#e(d)?d.__abortController.abort(new Error("replaced")):h||(this.#b&&this.#_?.(d,t,"set"),this.#a&&this.#r?.push([d,t,"set"])),this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let p=d&&this.#e(d)?d.__staleWhileFetching:d;p!==void 0&&(r.oldValue=p)}}else r&&(r.set="update")}if(s!==0&&!this.#S&&this.#L(),this.#S&&(u||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let d=this.#r,p;for(;p=d?.shift();)this.#g?.(...p)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#g?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#_?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#w.push(e)),this.#s===1?(this.#o=this.#h=0,this.#w.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},u=(c,w=!1)=>{let{aborted:l}=h.signal,S=i.ignoreFetchAbort&&c!==void 0;if(i.status&&(l&&!w?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,S&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!S&&!w)return f(h.signal.reason);let y=p;return this.#t[e]===p&&(c===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,c,r.options))),c},b=c=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=c),f(c)),f=c=>{let{aborted:w}=h.signal,l=w&&i.allowStaleOnFetchAbort,S=l||i.allowStaleOnFetchRejection,y=S||i.noDeleteOnFetchRejection,g=p;if(this.#t[e]===p&&(!y||g.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=g.__staleWhileFetching)),S)return i.status&&g.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),g.__staleWhileFetching;if(g.__returned===g)throw c},d=(c,w)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(S=>c(S),w),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(c(),i.allowStaleOnFetchAbort&&(c=S=>u(S,!0)))})};i.status&&(i.status.fetchDispatched=!0);let p=new Promise(d).then(u,b),F=Object.assign(p,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:u=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:d=this.allowStaleOnFetchRejection,ignoreFetchAbort:p=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:c,forceRefresh:w=!1,status:l,signal:S}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:u,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:d,allowStaleOnFetchAbort:F,ignoreFetchAbort:p,status:l,signal:S},g=this.#n.get(t);if(g===void 0){l&&(l.fetch="miss");let _=this.#D(t,g,y,c);return _.__returned=_}else{let _=this.#t[g];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(g);if(!w&&!O)return l&&(l.fetch="hit"),this.#v(g),s&&this.#z(g),l&&this.#O(l,g),_;let A=this.#D(t,g,y,c),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],u=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),u?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),u?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#_?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#w.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#g?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#_?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#S&&this.#F&&(this.#S.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#w.length=0,this.#p=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#g?.(...e)}}};exports.LRUCache=C; +"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=R.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#W(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};exports.LRUCache=C; //# sourceMappingURL=index.min.js.map diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.js b/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.js index 8af17c0845343..23b9754ad6c76 100644 --- a/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.js +++ b/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.js @@ -834,6 +834,15 @@ export class LRUCache { if (v !== oldVal) { if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { oldVal.__abortController.abort(new Error('replaced')); + const { __staleWhileFetching: s } = oldVal; + if (s !== undefined && !noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(s, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([s, k, 'set']); + } + } } else if (!noDisposeOnSet) { if (this.#hasDispose) { @@ -1087,7 +1096,7 @@ export class LRUCache { const pcall = (res, rej) => { const fmp = this.#fetchMethod?.(k, v, fetchOpts); if (fmp && fmp instanceof Promise) { - fmp.then(v => res(v), rej); + fmp.then(v => res(v === undefined ? undefined : v), rej); } // ignored, we go until we finish, regardless. // defer check until we are actually aborting, @@ -1095,7 +1104,7 @@ export class LRUCache { ac.signal.addEventListener('abort', () => { if (!options.ignoreFetchAbort || options.allowStaleOnFetchAbort) { - res(); + res(undefined); // when it eventually resolves, update the cache. if (options.allowStaleOnFetchAbort) { res = v => cb(v, true); diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.min.js b/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.min.js index 44bd1c23b86e7..5a16b3940d6df 100644 --- a/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.min.js +++ b/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.min.js @@ -1,2 +1,2 @@ -var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},R=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof R>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},R=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),k=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=k(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var W=class{#d;#f;#_;#g;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#p;#n;#i;#t;#l;#c;#o;#h;#w;#r;#m;#F;#S;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#S,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#w,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#p}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#_}get disposeAfter(){return this.#g}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:u,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:d,maxSize:p=0,maxEntrySize:F=0,sizeCalculation:c,fetchMethod:w,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:S,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:g,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?k(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=p,this.maxEntrySize=F||this.#f,this.sizeCalculation=c,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=w,this.#T=!!w,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#w=C.create(e),this.#s=0,this.#p=0,typeof u=="function"&&(this.#_=u),typeof b=="function"?(this.#g=b,this.#r=[]):(this.#g=void 0,this.#r=void 0),this.#b=!!this.#_,this.#a=!!this.#g,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!d,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!g,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!S,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,W))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#S=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let u=n.now-r;n.remainingTTL=a-u}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let u=(i||s())-r;return a-u},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#p=0,this.#m=t,this.#E=e=>{this.#p-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#p>n;)this.#W(!0)}this.#p+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#p)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#S&&this.#F){h.ttl=this.#S[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:u=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#w.length!==0?this.#w.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),u=!1;else{this.#v(f);let d=this.#t[f];if(e!==d){if(this.#T&&this.#e(d)?d.__abortController.abort(new Error("replaced")):h||(this.#b&&this.#_?.(d,t,"set"),this.#a&&this.#r?.push([d,t,"set"])),this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let p=d&&this.#e(d)?d.__staleWhileFetching:d;p!==void 0&&(r.oldValue=p)}}else r&&(r.set="update")}if(s!==0&&!this.#S&&this.#L(),this.#S&&(u||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let d=this.#r,p;for(;p=d?.shift();)this.#g?.(...p)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#g?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#_?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#w.push(e)),this.#s===1?(this.#o=this.#h=0,this.#w.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new R,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},u=(c,w=!1)=>{let{aborted:l}=h.signal,S=i.ignoreFetchAbort&&c!==void 0;if(i.status&&(l&&!w?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,S&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!S&&!w)return f(h.signal.reason);let y=p;return this.#t[e]===p&&(c===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,c,r.options))),c},b=c=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=c),f(c)),f=c=>{let{aborted:w}=h.signal,l=w&&i.allowStaleOnFetchAbort,S=l||i.allowStaleOnFetchRejection,y=S||i.noDeleteOnFetchRejection,g=p;if(this.#t[e]===p&&(!y||g.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=g.__staleWhileFetching)),S)return i.status&&g.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),g.__staleWhileFetching;if(g.__returned===g)throw c},d=(c,w)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(S=>c(S),w),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(c(),i.allowStaleOnFetchAbort&&(c=S=>u(S,!0)))})};i.status&&(i.status.fetchDispatched=!0);let p=new Promise(d).then(u,b),F=Object.assign(p,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof R}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:u=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:d=this.allowStaleOnFetchRejection,ignoreFetchAbort:p=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:c,forceRefresh:w=!1,status:l,signal:S}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:u,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:d,allowStaleOnFetchAbort:F,ignoreFetchAbort:p,status:l,signal:S},g=this.#n.get(t);if(g===void 0){l&&(l.fetch="miss");let _=this.#D(t,g,y,c);return _.__returned=_}else{let _=this.#t[g];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(g);if(!w&&!O)return l&&(l.fetch="hit"),this.#v(g),s&&this.#z(g),l&&this.#O(l,g),_;let A=this.#D(t,g,y,c),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],u=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),u?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),u?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#_?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#w.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#g?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#_?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#S&&this.#F&&(this.#S.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#w.length=0,this.#p=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#g?.(...e)}}};export{W as LRUCache}; +var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var R=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=C.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,R))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#W=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#R(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#W=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#R(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#W(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#W(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#R(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#R(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};export{R as LRUCache}; //# sourceMappingURL=index.min.js.map diff --git a/node_modules/path-scurry/node_modules/lru-cache/package.json b/node_modules/path-scurry/node_modules/lru-cache/package.json index 69a20582ff9b6..bae4a04839d1f 100644 --- a/node_modules/path-scurry/node_modules/lru-cache/package.json +++ b/node_modules/path-scurry/node_modules/lru-cache/package.json @@ -1,7 +1,7 @@ { "name": "lru-cache", "description": "A cache object that deletes the least-recently-used items.", - "version": "9.1.1", + "version": "10.0.1", "author": "Isaac Z. Schlueter ", "keywords": [ "mru", @@ -56,7 +56,7 @@ "repository": "git://github.com/isaacs/node-lru-cache.git", "devDependencies": { "@size-limit/preset-small-lib": "^7.0.8", - "@types/node": "^17.0.31", + "@types/node": "^20.2.5", "@types/tap": "^15.0.6", "benchmark": "^2.1.4", "c8": "^7.11.2", @@ -68,10 +68,10 @@ "prettier": "^2.6.2", "size-limit": "^7.0.8", "tap": "^16.3.4", - "ts-node": "^10.7.0", + "ts-node": "^10.9.1", "tslib": "^2.4.0", - "typedoc": "^0.23.24", - "typescript": "^4.6.4" + "typedoc": "^0.24.6", + "typescript": "^5.0.4" }, "license": "ISC", "files": [ @@ -95,9 +95,8 @@ "coverage": false, "node-arg": [ "--expose-gc", - "--no-warnings", - "--loader", - "ts-node/esm" + "-r", + "ts-node/register" ], "ts": false }, diff --git a/node_modules/path-scurry/package.json b/node_modules/path-scurry/package.json index 5b900825e44e0..af04f807fed2b 100644 --- a/node_modules/path-scurry/package.json +++ b/node_modules/path-scurry/package.json @@ -1,6 +1,6 @@ { "name": "path-scurry", - "version": "1.9.2", + "version": "1.10.1", "description": "walk paths fast and efficiently", "author": "Isaac Z. Schlueter (https://blog.izs.me)", "main": "./dist/cjs/index.js", @@ -64,7 +64,7 @@ "eslint-config-prettier": "^8.6.0", "mkdirp": "^3.0.0", "prettier": "^2.8.3", - "rimraf": "^4.1.2", + "rimraf": "^5.0.1", "tap": "^16.3.4", "ts-node": "^10.9.1", "typedoc": "^0.23.24", @@ -78,10 +78,10 @@ }, "repository": { "type": "git", - "url": "git+https://github.com/isaacs/path-walker" + "url": "git+https://github.com/isaacs/path-scurry" }, "dependencies": { - "lru-cache": "^9.1.1", - "minipass": "^5.0.0 || ^6.0.2" + "lru-cache": "^9.1.1 || ^10.0.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" } } diff --git a/package-lock.json b/package-lock.json index 235a8cde69a93..7b695e11b2876 100644 --- a/package-lock.json +++ b/package-lock.json @@ -104,7 +104,7 @@ "columnify": "^1.6.0", "fastest-levenshtein": "^1.0.16", "fs-minipass": "^3.0.3", - "glob": "^10.2.7", + "glob": "^10.3.3", "graceful-fs": "^4.2.11", "hosted-git-info": "^6.1.1", "ini": "^4.1.1", @@ -6053,16 +6053,16 @@ "dev": true }, "node_modules/glob": { - "version": "10.2.7", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.2.7.tgz", - "integrity": "sha512-jTKehsravOJo8IJxUGfZILnkvVJM/MOfHRs8QcXolVef2zNI9Tqyy5+SeuOAZd3upViEZQLyFpQhYiHLrMUNmA==", + "version": "10.3.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.3.tgz", + "integrity": "sha512-92vPiMb/iqpmEgsOoIDvTjc50wf9CCCvMzsi6W0JLPeUKE8TWP1a73PgqSrqy7iAZxaSD1YdzU7QZR5LF51MJw==", "inBundle": true, "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^2.0.3", "minimatch": "^9.0.1", - "minipass": "^5.0.0 || ^6.0.2", - "path-scurry": "^1.7.0" + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", + "path-scurry": "^1.10.1" }, "bin": { "glob": "dist/cjs/src/bin.js" @@ -10450,13 +10450,13 @@ "dev": true }, "node_modules/path-scurry": { - "version": "1.9.2", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.9.2.tgz", - "integrity": "sha512-qSDLy2aGFPm8i4rsbHd4MNyTcrzHFsLQykrtbuGRknZZCBBVXSv2tSCDN2Cg6Rt/GFRw8GoW9y9Ecw5rIPG1sg==", + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.1.tgz", + "integrity": "sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ==", "inBundle": true, "dependencies": { - "lru-cache": "^9.1.1", - "minipass": "^5.0.0 || ^6.0.2" + "lru-cache": "^9.1.1 || ^10.0.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" }, "engines": { "node": ">=16 || 14 >=14.17" @@ -10466,9 +10466,9 @@ } }, "node_modules/path-scurry/node_modules/lru-cache": { - "version": "9.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-9.1.1.tgz", - "integrity": "sha512-65/Jky17UwSb0BuB9V+MyDpsOtXKmYwzhyl+cOa9XUiI4uV2Ouy/2voFP3+al0BjZbJgMBD8FojMpAf+Z+qn4A==", + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz", + "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==", "inBundle": true, "engines": { "node": "14 || >=16.14" diff --git a/package.json b/package.json index fd1d933953352..c3df86759868a 100644 --- a/package.json +++ b/package.json @@ -69,7 +69,7 @@ "columnify": "^1.6.0", "fastest-levenshtein": "^1.0.16", "fs-minipass": "^3.0.3", - "glob": "^10.2.7", + "glob": "^10.3.3", "graceful-fs": "^4.2.11", "hosted-git-info": "^6.1.1", "ini": "^4.1.1", From 72ffaa31bda7581e3e50022748272efe7e4f7b86 Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Mon, 14 Aug 2023 12:14:22 -0700 Subject: [PATCH 25/68] deps: minipass@7.0.3 --- node_modules/.gitignore | 26 +- .../node_modules/minipass/LICENSE | 0 .../node_modules}/minipass/index.js | 0 .../node_modules}/minipass/index.mjs | 0 .../node_modules/minipass/package.json | 76 ++ .../node_modules/minipass/package.json | 82 -- .../node_modules/minipass/dist/cjs/index.js | 1028 ----------------- .../minipass/dist/cjs/package.json | 3 - .../node_modules/minipass/dist/mjs/index.js | 1018 ---------------- .../minipass/dist/mjs/package.json | 3 - .../node_modules/minipass/package.json | 82 -- .../node_modules/minipass/dist/cjs/index.js | 1028 ----------------- .../minipass/dist/cjs/package.json | 3 - .../node_modules/minipass/dist/mjs/index.js | 1018 ---------------- .../minipass/dist/mjs/package.json | 3 - .../node_modules/minipass/package.json | 82 -- .../node_modules/minipass/LICENSE | 0 .../node_modules/minipass/index.js | 702 +++++++++++ .../node_modules/minipass/index.mjs | 702 +++++++++++ .../node_modules/minipass/package.json | 76 ++ .../minipass/dist/cjs/index.js | 0 .../minipass/dist/cjs/package.json | 0 .../minipass/dist/mjs/index.js | 0 .../minipass/dist/mjs/package.json | 0 node_modules/minipass/package.json | 100 +- .../node_modules/minipass/LICENSE | 0 .../node-gyp/node_modules/minipass/index.js | 702 +++++++++++ .../node-gyp/node_modules/minipass/index.mjs | 702 +++++++++++ .../node_modules/minipass/package.json | 76 ++ .../node_modules/minipass/dist/cjs/index.js | 1028 ----------------- .../minipass/dist/cjs/package.json | 3 - .../node_modules/minipass/dist/mjs/index.js | 1018 ---------------- .../minipass/dist/mjs/package.json | 3 - .../node_modules/minipass/package.json | 82 -- .../node_modules/minipass/dist/cjs/index.js | 1028 ----------------- .../minipass/dist/cjs/package.json | 3 - .../node_modules/minipass/dist/mjs/index.js | 1018 ---------------- .../minipass/dist/mjs/package.json | 3 - .../pacote/node_modules/minipass/package.json | 82 -- .../node_modules/minipass/LICENSE | 0 .../sigstore/node_modules/minipass/index.js | 702 +++++++++++ .../sigstore/node_modules/minipass/index.mjs | 702 +++++++++++ .../node_modules/minipass/package.json | 76 ++ .../node_modules/minipass/dist/cjs/index.js | 1028 ----------------- .../minipass/dist/cjs/package.json | 3 - .../node_modules/minipass/dist/mjs/index.js | 1018 ---------------- .../minipass/dist/mjs/package.json | 3 - .../ssri/node_modules/minipass/package.json | 82 -- .../node_modules/minipass/LICENSE | 0 .../tar/node_modules/minipass/index.js | 702 +++++++++++ .../tar/node_modules/minipass/index.mjs | 702 +++++++++++ .../tar/node_modules/minipass/package.json | 76 ++ .../node_modules/minipass/LICENSE | 0 .../tuf-js/node_modules/minipass/index.js | 702 +++++++++++ .../tuf-js/node_modules/minipass/index.mjs | 702 +++++++++++ .../tuf-js/node_modules/minipass/package.json | 76 ++ package-lock.json | 119 +- package.json | 2 +- workspaces/libnpmorg/package.json | 2 +- 59 files changed, 7598 insertions(+), 10879 deletions(-) rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/minipass/LICENSE (100%) rename node_modules/{ => @npmcli/metavuln-calculator/node_modules}/minipass/index.js (100%) rename node_modules/{ => @npmcli/metavuln-calculator/node_modules}/minipass/index.mjs (100%) create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/minipass/package.json delete mode 100644 node_modules/cacache/node_modules/minipass/package.json delete mode 100644 node_modules/fs-minipass/node_modules/minipass/dist/cjs/index.js delete mode 100644 node_modules/fs-minipass/node_modules/minipass/dist/cjs/package.json delete mode 100644 node_modules/fs-minipass/node_modules/minipass/dist/mjs/index.js delete mode 100644 node_modules/fs-minipass/node_modules/minipass/dist/mjs/package.json delete mode 100644 node_modules/fs-minipass/node_modules/minipass/package.json delete mode 100644 node_modules/make-fetch-happen/node_modules/minipass/dist/cjs/index.js delete mode 100644 node_modules/make-fetch-happen/node_modules/minipass/dist/cjs/package.json delete mode 100644 node_modules/make-fetch-happen/node_modules/minipass/dist/mjs/index.js delete mode 100644 node_modules/make-fetch-happen/node_modules/minipass/dist/mjs/package.json delete mode 100644 node_modules/make-fetch-happen/node_modules/minipass/package.json rename node_modules/{fs-minipass => minipass-fetch}/node_modules/minipass/LICENSE (100%) create mode 100644 node_modules/minipass-fetch/node_modules/minipass/index.js create mode 100644 node_modules/minipass-fetch/node_modules/minipass/index.mjs create mode 100644 node_modules/minipass-fetch/node_modules/minipass/package.json rename node_modules/{cacache/node_modules => }/minipass/dist/cjs/index.js (100%) rename node_modules/{cacache/node_modules => }/minipass/dist/cjs/package.json (100%) rename node_modules/{cacache/node_modules => }/minipass/dist/mjs/index.js (100%) rename node_modules/{cacache/node_modules => }/minipass/dist/mjs/package.json (100%) rename node_modules/{make-fetch-happen => node-gyp}/node_modules/minipass/LICENSE (100%) create mode 100644 node_modules/node-gyp/node_modules/minipass/index.js create mode 100644 node_modules/node-gyp/node_modules/minipass/index.mjs create mode 100644 node_modules/node-gyp/node_modules/minipass/package.json delete mode 100644 node_modules/npm-registry-fetch/node_modules/minipass/dist/cjs/index.js delete mode 100644 node_modules/npm-registry-fetch/node_modules/minipass/dist/cjs/package.json delete mode 100644 node_modules/npm-registry-fetch/node_modules/minipass/dist/mjs/index.js delete mode 100644 node_modules/npm-registry-fetch/node_modules/minipass/dist/mjs/package.json delete mode 100644 node_modules/npm-registry-fetch/node_modules/minipass/package.json delete mode 100644 node_modules/pacote/node_modules/minipass/dist/cjs/index.js delete mode 100644 node_modules/pacote/node_modules/minipass/dist/cjs/package.json delete mode 100644 node_modules/pacote/node_modules/minipass/dist/mjs/index.js delete mode 100644 node_modules/pacote/node_modules/minipass/dist/mjs/package.json delete mode 100644 node_modules/pacote/node_modules/minipass/package.json rename node_modules/{npm-registry-fetch => sigstore}/node_modules/minipass/LICENSE (100%) create mode 100644 node_modules/sigstore/node_modules/minipass/index.js create mode 100644 node_modules/sigstore/node_modules/minipass/index.mjs create mode 100644 node_modules/sigstore/node_modules/minipass/package.json delete mode 100644 node_modules/ssri/node_modules/minipass/dist/cjs/index.js delete mode 100644 node_modules/ssri/node_modules/minipass/dist/cjs/package.json delete mode 100644 node_modules/ssri/node_modules/minipass/dist/mjs/index.js delete mode 100644 node_modules/ssri/node_modules/minipass/dist/mjs/package.json delete mode 100644 node_modules/ssri/node_modules/minipass/package.json rename node_modules/{pacote => tar}/node_modules/minipass/LICENSE (100%) create mode 100644 node_modules/tar/node_modules/minipass/index.js create mode 100644 node_modules/tar/node_modules/minipass/index.mjs create mode 100644 node_modules/tar/node_modules/minipass/package.json rename node_modules/{ssri => tuf-js}/node_modules/minipass/LICENSE (100%) create mode 100644 node_modules/tuf-js/node_modules/minipass/index.js create mode 100644 node_modules/tuf-js/node_modules/minipass/index.mjs create mode 100644 node_modules/tuf-js/node_modules/minipass/package.json diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 1fd9ff59b3c03..4c5a8843caff7 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -28,6 +28,7 @@ !/@npmcli/metavuln-calculator/node_modules/ /@npmcli/metavuln-calculator/node_modules/* !/@npmcli/metavuln-calculator/node_modules/make-fetch-happen +!/@npmcli/metavuln-calculator/node_modules/minipass !/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch !/@npmcli/metavuln-calculator/node_modules/pacote !/@npmcli/name-from-folder @@ -68,9 +69,6 @@ !/buffer !/builtins !/cacache -!/cacache/node_modules/ -/cacache/node_modules/* -!/cacache/node_modules/minipass !/chalk !/chownr !/ci-info @@ -110,9 +108,6 @@ !/fastest-levenshtein !/foreground-child !/fs-minipass -!/fs-minipass/node_modules/ -/fs-minipass/node_modules/* -!/fs-minipass/node_modules/minipass !/fs.realpath !/function-bind !/gauge @@ -149,15 +144,15 @@ !/just-diff !/lru-cache !/make-fetch-happen -!/make-fetch-happen/node_modules/ -/make-fetch-happen/node_modules/* -!/make-fetch-happen/node_modules/minipass !/minimatch !/minipass-collect !/minipass-collect/node_modules/ /minipass-collect/node_modules/* !/minipass-collect/node_modules/minipass !/minipass-fetch +!/minipass-fetch/node_modules/ +/minipass-fetch/node_modules/* +!/minipass-fetch/node_modules/minipass !/minipass-flush !/minipass-flush/node_modules/ /minipass-flush/node_modules/* @@ -193,6 +188,7 @@ !/node-gyp/node_modules/glob !/node-gyp/node_modules/make-fetch-happen !/node-gyp/node_modules/minimatch +!/node-gyp/node_modules/minipass !/node-gyp/node_modules/nopt !/node-gyp/node_modules/npmlog !/node-gyp/node_modules/readable-stream @@ -209,17 +205,11 @@ !/npm-pick-manifest !/npm-profile !/npm-registry-fetch -!/npm-registry-fetch/node_modules/ -/npm-registry-fetch/node_modules/* -!/npm-registry-fetch/node_modules/minipass !/npm-user-validate !/npmlog !/once !/p-map !/pacote -!/pacote/node_modules/ -/pacote/node_modules/* -!/pacote/node_modules/minipass !/parse-conflict-json !/path-is-absolute !/path-key @@ -262,6 +252,7 @@ !/sigstore/node_modules/ /sigstore/node_modules/* !/sigstore/node_modules/make-fetch-happen +!/sigstore/node_modules/minipass !/smart-buffer !/socks-proxy-agent !/socks @@ -270,9 +261,6 @@ !/spdx-expression-parse !/spdx-license-ids !/ssri -!/ssri/node_modules/ -/ssri/node_modules/* -!/ssri/node_modules/minipass !/string_decoder !/string-width-cjs !/string-width @@ -286,6 +274,7 @@ !/tar/node_modules/fs-minipass/node_modules/ /tar/node_modules/fs-minipass/node_modules/* !/tar/node_modules/fs-minipass/node_modules/minipass +!/tar/node_modules/minipass !/text-table !/tiny-relative-date !/treeverse @@ -293,6 +282,7 @@ !/tuf-js/node_modules/ /tuf-js/node_modules/* !/tuf-js/node_modules/make-fetch-happen +!/tuf-js/node_modules/minipass !/unique-filename !/unique-slug !/util-deprecate diff --git a/node_modules/cacache/node_modules/minipass/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/minipass/LICENSE similarity index 100% rename from node_modules/cacache/node_modules/minipass/LICENSE rename to node_modules/@npmcli/metavuln-calculator/node_modules/minipass/LICENSE diff --git a/node_modules/minipass/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/minipass/index.js similarity index 100% rename from node_modules/minipass/index.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/minipass/index.js diff --git a/node_modules/minipass/index.mjs b/node_modules/@npmcli/metavuln-calculator/node_modules/minipass/index.mjs similarity index 100% rename from node_modules/minipass/index.mjs rename to node_modules/@npmcli/metavuln-calculator/node_modules/minipass/index.mjs diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/minipass/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/minipass/package.json new file mode 100644 index 0000000000000..0e20e988047f2 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/minipass/package.json @@ -0,0 +1,76 @@ +{ + "name": "minipass", + "version": "5.0.0", + "description": "minimal implementation of a PassThrough stream", + "main": "./index.js", + "module": "./index.mjs", + "types": "./index.d.ts", + "exports": { + ".": { + "import": { + "types": "./index.d.ts", + "default": "./index.mjs" + }, + "require": { + "types": "./index.d.ts", + "default": "./index.js" + } + }, + "./package.json": "./package.json" + }, + "devDependencies": { + "@types/node": "^17.0.41", + "end-of-stream": "^1.4.0", + "node-abort-controller": "^3.1.1", + "prettier": "^2.6.2", + "tap": "^16.2.0", + "through2": "^2.0.3", + "ts-node": "^10.8.1", + "typedoc": "^0.23.24", + "typescript": "^4.7.3" + }, + "scripts": { + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "prepare": "node ./scripts/transpile-to-esm.js", + "snap": "tap", + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags", + "typedoc": "typedoc ./index.d.ts", + "format": "prettier --write . --loglevel warn" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minipass.git" + }, + "keywords": [ + "passthrough", + "stream" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "files": [ + "index.d.ts", + "index.js", + "index.mjs" + ], + "tap": { + "check-coverage": true + }, + "engines": { + "node": ">=8" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/node_modules/cacache/node_modules/minipass/package.json b/node_modules/cacache/node_modules/minipass/package.json deleted file mode 100644 index 6faaa247a5bc6..0000000000000 --- a/node_modules/cacache/node_modules/minipass/package.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "name": "minipass", - "version": "7.0.3", - "description": "minimal implementation of a PassThrough stream", - "main": "./dist/cjs/index.js", - "module": "./dist/mjs/index.js", - "types": "./dist/cjs/index.js", - "exports": { - ".": { - "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.js" - }, - "require": { - "types": "./dist/cjs/index.d.ts", - "default": "./dist/cjs/index.js" - } - }, - "./package.json": "./package.json" - }, - "files": [ - "dist" - ], - "scripts": { - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "preprepare": "rm -rf dist", - "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh", - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "test": "c8 tap", - "snap": "c8 tap", - "format": "prettier --write . --loglevel warn", - "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" - }, - "tap": { - "coverage": false, - "node-arg": [ - "--enable-source-maps", - "--no-warnings", - "--loader", - "ts-node/esm" - ], - "ts": false - }, - "prettier": { - "semi": false, - "printWidth": 75, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - }, - "devDependencies": { - "@types/node": "^20.1.2", - "@types/tap": "^15.0.8", - "c8": "^7.13.0", - "prettier": "^2.6.2", - "tap": "^16.3.0", - "ts-node": "^10.9.1", - "typedoc": "^0.24.8", - "typescript": "^5.1.3", - "end-of-stream": "^1.4.0", - "node-abort-controller": "^3.1.1", - "sync-content": "^1.0.2", - "through2": "^2.0.3" - }, - "repository": "https://github.com/isaacs/minipass", - "keywords": [ - "passthrough", - "stream" - ], - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } -} diff --git a/node_modules/fs-minipass/node_modules/minipass/dist/cjs/index.js b/node_modules/fs-minipass/node_modules/minipass/dist/cjs/index.js deleted file mode 100644 index b6cdae8eb514b..0000000000000 --- a/node_modules/fs-minipass/node_modules/minipass/dist/cjs/index.js +++ /dev/null @@ -1,1028 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0; -const proc = typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - }; -const events_1 = require("events"); -const stream_1 = __importDefault(require("stream")); -const string_decoder_1 = require("string_decoder"); -/** - * Return true if the argument is a Minipass stream, Node stream, or something - * else that Minipass can interact with. - */ -const isStream = (s) => !!s && - typeof s === 'object' && - (s instanceof Minipass || - s instanceof stream_1.default || - (0, exports.isReadable)(s) || - (0, exports.isWritable)(s)); -exports.isStream = isStream; -/** - * Return true if the argument is a valid {@link Minipass.Readable} - */ -const isReadable = (s) => !!s && - typeof s === 'object' && - s instanceof events_1.EventEmitter && - typeof s.pipe === 'function' && - // node core Writable streams have a pipe() method, but it throws - s.pipe !== stream_1.default.Writable.prototype.pipe; -exports.isReadable = isReadable; -/** - * Return true if the argument is a valid {@link Minipass.Writable} - */ -const isWritable = (s) => !!s && - typeof s === 'object' && - s instanceof events_1.EventEmitter && - typeof s.write === 'function' && - typeof s.end === 'function'; -exports.isWritable = isWritable; -const EOF = Symbol('EOF'); -const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); -const EMITTED_END = Symbol('emittedEnd'); -const EMITTING_END = Symbol('emittingEnd'); -const EMITTED_ERROR = Symbol('emittedError'); -const CLOSED = Symbol('closed'); -const READ = Symbol('read'); -const FLUSH = Symbol('flush'); -const FLUSHCHUNK = Symbol('flushChunk'); -const ENCODING = Symbol('encoding'); -const DECODER = Symbol('decoder'); -const FLOWING = Symbol('flowing'); -const PAUSED = Symbol('paused'); -const RESUME = Symbol('resume'); -const BUFFER = Symbol('buffer'); -const PIPES = Symbol('pipes'); -const BUFFERLENGTH = Symbol('bufferLength'); -const BUFFERPUSH = Symbol('bufferPush'); -const BUFFERSHIFT = Symbol('bufferShift'); -const OBJECTMODE = Symbol('objectMode'); -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed'); -// internal event when stream has an error -const ERROR = Symbol('error'); -const EMITDATA = Symbol('emitData'); -const EMITEND = Symbol('emitEnd'); -const EMITEND2 = Symbol('emitEnd2'); -const ASYNC = Symbol('async'); -const ABORT = Symbol('abort'); -const ABORTED = Symbol('aborted'); -const SIGNAL = Symbol('signal'); -const DATALISTENERS = Symbol('dataListeners'); -const DISCARDED = Symbol('discarded'); -const defer = (fn) => Promise.resolve().then(fn); -const nodefer = (fn) => fn(); -const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; -const isArrayBufferLike = (b) => b instanceof ArrayBuffer || - (!!b && - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0); -const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); -/** - * Internal class representing a pipe to a destination stream. - * - * @internal - */ -class Pipe { - src; - dest; - opts; - ondrain; - constructor(src, dest, opts) { - this.src = src; - this.dest = dest; - this.opts = opts; - this.ondrain = () => src[RESUME](); - this.dest.on('drain', this.ondrain); - } - unpipe() { - this.dest.removeListener('drain', this.ondrain); - } - // only here for the prototype - /* c8 ignore start */ - proxyErrors(_er) { } - /* c8 ignore stop */ - end() { - this.unpipe(); - if (this.opts.end) - this.dest.end(); - } -} -/** - * Internal class representing a pipe to a destination stream where - * errors are proxied. - * - * @internal - */ -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors); - super.unpipe(); - } - constructor(src, dest, opts) { - super(src, dest, opts); - this.proxyErrors = er => dest.emit('error', er); - src.on('error', this.proxyErrors); - } -} -const isObjectModeOptions = (o) => !!o.objectMode; -const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; -/** - * Main export, the Minipass class - * - * `RType` is the type of data emitted, defaults to Buffer - * - * `WType` is the type of data to be written, if RType is buffer or string, - * then any {@link Minipass.ContiguousData} is allowed. - * - * `Events` is the set of event handler signatures that this object - * will emit, see {@link Minipass.Events} - */ -class Minipass extends events_1.EventEmitter { - [FLOWING] = false; - [PAUSED] = false; - [PIPES] = []; - [BUFFER] = []; - [OBJECTMODE]; - [ENCODING]; - [ASYNC]; - [DECODER]; - [EOF] = false; - [EMITTED_END] = false; - [EMITTING_END] = false; - [CLOSED] = false; - [EMITTED_ERROR] = null; - [BUFFERLENGTH] = 0; - [DESTROYED] = false; - [SIGNAL]; - [ABORTED] = false; - [DATALISTENERS] = 0; - [DISCARDED] = false; - /** - * true if the stream can be written - */ - writable = true; - /** - * true if the stream can be read - */ - readable = true; - /** - * If `RType` is Buffer, then options do not need to be provided. - * Otherwise, an options object must be provided to specify either - * {@link Minipass.SharedOptions.objectMode} or - * {@link Minipass.SharedOptions.encoding}, as appropriate. - */ - constructor(...args) { - const options = (args[0] || - {}); - super(); - if (options.objectMode && typeof options.encoding === 'string') { - throw new TypeError('Encoding and objectMode may not be used together'); - } - if (isObjectModeOptions(options)) { - this[OBJECTMODE] = true; - this[ENCODING] = null; - } - else if (isEncodingOptions(options)) { - this[ENCODING] = options.encoding; - this[OBJECTMODE] = false; - } - else { - this[OBJECTMODE] = false; - this[ENCODING] = null; - } - this[ASYNC] = !!options.async; - this[DECODER] = this[ENCODING] - ? new string_decoder_1.StringDecoder(this[ENCODING]) - : null; - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); - } - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); - } - const { signal } = options; - if (signal) { - this[SIGNAL] = signal; - if (signal.aborted) { - this[ABORT](); - } - else { - signal.addEventListener('abort', () => this[ABORT]()); - } - } - } - /** - * The amount of data stored in the buffer waiting to be read. - * - * For Buffer strings, this will be the total byte length. - * For string encoding streams, this will be the string character length, - * according to JavaScript's `string.length` logic. - * For objectMode streams, this is a count of the items waiting to be - * emitted. - */ - get bufferLength() { - return this[BUFFERLENGTH]; - } - /** - * The `BufferEncoding` currently in use, or `null` - */ - get encoding() { - return this[ENCODING]; - } - /** - * @deprecated - This is a read only property - */ - set encoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * @deprecated - Encoding may only be set at instantiation time - */ - setEncoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * True if this is an objectMode stream - */ - get objectMode() { - return this[OBJECTMODE]; - } - /** - * @deprecated - This is a read-only property - */ - set objectMode(_om) { - throw new Error('objectMode must be set at instantiation time'); - } - /** - * true if this is an async stream - */ - get ['async']() { - return this[ASYNC]; - } - /** - * Set to true to make this stream async. - * - * Once set, it cannot be unset, as this would potentially cause incorrect - * behavior. Ie, a sync stream can be made async, but an async stream - * cannot be safely made sync. - */ - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a; - } - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true; - this.emit('abort', this[SIGNAL]?.reason); - this.destroy(this[SIGNAL]?.reason); - } - /** - * True if the stream has been aborted. - */ - get aborted() { - return this[ABORTED]; - } - /** - * No-op setter. Stream aborted status is set via the AbortSignal provided - * in the constructor options. - */ - set aborted(_) { } - write(chunk, encoding, cb) { - if (this[ABORTED]) - return false; - if (this[EOF]) - throw new Error('write after end'); - if (this[DESTROYED]) { - this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); - return true; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (!encoding) - encoding = 'utf8'; - const fn = this[ASYNC] ? defer : nodefer; - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything is only allowed if in object mode, so throw - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); - } - else if (isArrayBufferLike(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk); - } - else if (typeof chunk !== 'string') { - throw new Error('Non-contiguous data written to non-objectMode stream'); - } - } - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - // maybe impossible? - /* c8 ignore start */ - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - /* c8 ignore stop */ - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { - //@ts-ignore - sinful unsafe type change - chunk = Buffer.from(chunk, encoding); - } - if (Buffer.isBuffer(chunk) && this[ENCODING]) { - //@ts-ignore - sinful unsafe type change - chunk = this[DECODER].write(chunk); - } - // Note: flushing CAN potentially switch us into not-flowing mode - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - /** - * Low-level explicit read method. - * - * In objectMode, the argument is ignored, and one item is returned if - * available. - * - * `n` is the number of bytes (or in the case of encoding streams, - * characters) to consume. If `n` is not provided, then the entire buffer - * is returned, or `null` is returned if no data is available. - * - * If `n` is greater that the amount of data in the internal buffer, - * then `null` is returned. - */ - read(n) { - if (this[DESTROYED]) - return null; - this[DISCARDED] = false; - if (this[BUFFERLENGTH] === 0 || - n === 0 || - (n && n > this[BUFFERLENGTH])) { - this[MAYBE_EMIT_END](); - return null; - } - if (this[OBJECTMODE]) - n = null; - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - // not object mode, so if we have an encoding, then RType is string - // otherwise, must be Buffer - this[BUFFER] = [ - (this[ENCODING] - ? this[BUFFER].join('') - : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), - ]; - } - const ret = this[READ](n || null, this[BUFFER][0]); - this[MAYBE_EMIT_END](); - return ret; - } - [READ](n, chunk) { - if (this[OBJECTMODE]) - this[BUFFERSHIFT](); - else { - const c = chunk; - if (n === c.length || n === null) - this[BUFFERSHIFT](); - else if (typeof c === 'string') { - this[BUFFER][0] = c.slice(n); - chunk = c.slice(0, n); - this[BUFFERLENGTH] -= n; - } - else { - this[BUFFER][0] = c.subarray(n); - chunk = c.subarray(0, n); - this[BUFFERLENGTH] -= n; - } - } - this.emit('data', chunk); - if (!this[BUFFER].length && !this[EOF]) - this.emit('drain'); - return chunk; - } - end(chunk, encoding, cb) { - if (typeof chunk === 'function') { - cb = chunk; - chunk = undefined; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (chunk !== undefined) - this.write(chunk, encoding); - if (cb) - this.once('end', cb); - this[EOF] = true; - this.writable = false; - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this[FLOWING] || !this[PAUSED]) - this[MAYBE_EMIT_END](); - return this; - } - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) - return; - if (!this[DATALISTENERS] && !this[PIPES].length) { - this[DISCARDED] = true; - } - this[PAUSED] = false; - this[FLOWING] = true; - this.emit('resume'); - if (this[BUFFER].length) - this[FLUSH](); - else if (this[EOF]) - this[MAYBE_EMIT_END](); - else - this.emit('drain'); - } - /** - * Resume the stream if it is currently in a paused state - * - * If called when there are no pipe destinations or `data` event listeners, - * this will place the stream in a "discarded" state, where all data will - * be thrown away. The discarded state is removed if a pipe destination or - * data handler is added, if pause() is called, or if any synchronous or - * asynchronous iteration is started. - */ - resume() { - return this[RESUME](); - } - /** - * Pause the stream - */ - pause() { - this[FLOWING] = false; - this[PAUSED] = true; - this[DISCARDED] = false; - } - /** - * true if the stream has been forcibly destroyed - */ - get destroyed() { - return this[DESTROYED]; - } - /** - * true if the stream is currently in a flowing state, meaning that - * any writes will be immediately emitted. - */ - get flowing() { - return this[FLOWING]; - } - /** - * true if the stream is currently in a paused state - */ - get paused() { - return this[PAUSED]; - } - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1; - else - this[BUFFERLENGTH] += chunk.length; - this[BUFFER].push(chunk); - } - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1; - else - this[BUFFERLENGTH] -= this[BUFFER][0].length; - return this[BUFFER].shift(); - } - [FLUSH](noDrain = false) { - do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && - this[BUFFER].length); - if (!noDrain && !this[BUFFER].length && !this[EOF]) - this.emit('drain'); - } - [FLUSHCHUNK](chunk) { - this.emit('data', chunk); - return this[FLOWING]; - } - /** - * Pipe all data emitted by this stream into the destination provided. - * - * Triggers the flow of data. - */ - pipe(dest, opts) { - if (this[DESTROYED]) - return dest; - this[DISCARDED] = false; - const ended = this[EMITTED_END]; - opts = opts || {}; - if (dest === proc.stdout || dest === proc.stderr) - opts.end = false; - else - opts.end = opts.end !== false; - opts.proxyErrors = !!opts.proxyErrors; - // piping an ended stream ends immediately - if (ended) { - if (opts.end) - dest.end(); - } - else { - // "as" here just ignores the WType, which pipes don't care about, - // since they're only consuming from us, and writing to the dest - this[PIPES].push(!opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts)); - if (this[ASYNC]) - defer(() => this[RESUME]()); - else - this[RESUME](); - } - return dest; - } - /** - * Fully unhook a piped destination stream. - * - * If the destination stream was the only consumer of this stream (ie, - * there are no other piped destinations or `'data'` event listeners) - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest); - if (p) { - if (this[PIPES].length === 1) { - if (this[FLOWING] && this[DATALISTENERS] === 0) { - this[FLOWING] = false; - } - this[PIPES] = []; - } - else - this[PIPES].splice(this[PIPES].indexOf(p), 1); - p.unpipe(); - } - } - /** - * Alias for {@link Minipass#on} - */ - addListener(ev, handler) { - return this.on(ev, handler); - } - /** - * Mostly identical to `EventEmitter.on`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * - Adding a 'data' event handler will trigger the flow of data - * - * - Adding a 'readable' event handler when there is data waiting to be read - * will cause 'readable' to be emitted immediately. - * - * - Adding an 'endish' event handler ('end', 'finish', etc.) which has - * already passed will cause the event to be emitted immediately and all - * handlers removed. - * - * - Adding an 'error' event handler after an error has been emitted will - * cause the event to be re-emitted immediately with the error previously - * raised. - */ - on(ev, handler) { - const ret = super.on(ev, handler); - if (ev === 'data') { - this[DISCARDED] = false; - this[DATALISTENERS]++; - if (!this[PIPES].length && !this[FLOWING]) { - this[RESUME](); - } - } - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { - super.emit('readable'); - } - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev); - this.removeAllListeners(ev); - } - else if (ev === 'error' && this[EMITTED_ERROR]) { - const h = handler; - if (this[ASYNC]) - defer(() => h.call(this, this[EMITTED_ERROR])); - else - h.call(this, this[EMITTED_ERROR]); - } - return ret; - } - /** - * Alias for {@link Minipass#off} - */ - removeListener(ev, handler) { - return this.off(ev, handler); - } - /** - * Mostly identical to `EventEmitter.off` - * - * If a 'data' event handler is removed, and it was the last consumer - * (ie, there are no pipe destinations or other 'data' event listeners), - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - off(ev, handler) { - const ret = super.off(ev, handler); - // if we previously had listeners, and now we don't, and we don't - // have any pipes, then stop the flow, unless it's been explicitly - // put in a discarded flowing state via stream.resume(). - if (ev === 'data') { - this[DATALISTENERS] = this.listeners('data').length; - if (this[DATALISTENERS] === 0 && - !this[DISCARDED] && - !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * Mostly identical to `EventEmitter.removeAllListeners` - * - * If all 'data' event handlers are removed, and they were the last consumer - * (ie, there are no pipe destinations), then the flow of data will stop - * until there is another consumer or {@link Minipass#resume} is explicitly - * called. - */ - removeAllListeners(ev) { - const ret = super.removeAllListeners(ev); - if (ev === 'data' || ev === undefined) { - this[DATALISTENERS] = 0; - if (!this[DISCARDED] && !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * true if the 'end' event has been emitted - */ - get emittedEnd() { - return this[EMITTED_END]; - } - [MAYBE_EMIT_END]() { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF]) { - this[EMITTING_END] = true; - this.emit('end'); - this.emit('prefinish'); - this.emit('finish'); - if (this[CLOSED]) - this.emit('close'); - this[EMITTING_END] = false; - } - } - /** - * Mostly identical to `EventEmitter.emit`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * If the stream has been destroyed, and the event is something other - * than 'close' or 'error', then `false` is returned and no handlers - * are called. - * - * If the event is 'end', and has already been emitted, then the event - * is ignored. If the stream is in a paused or non-flowing state, then - * the event will be deferred until data flow resumes. If the stream is - * async, then handlers will be called on the next tick rather than - * immediately. - * - * If the event is 'close', and 'end' has not yet been emitted, then - * the event will be deferred until after 'end' is emitted. - * - * If the event is 'error', and an AbortSignal was provided for the stream, - * and there are no listeners, then the event is ignored, matching the - * behavior of node core streams in the presense of an AbortSignal. - * - * If the event is 'finish' or 'prefinish', then all listeners will be - * removed after emitting the event, to prevent double-firing. - */ - emit(ev, ...args) { - const data = args[0]; - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && - ev !== 'close' && - ev !== DESTROYED && - this[DESTROYED]) { - return false; - } - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? (defer(() => this[EMITDATA](data)), true) - : this[EMITDATA](data); - } - else if (ev === 'end') { - return this[EMITEND](); - } - else if (ev === 'close') { - this[CLOSED] = true; - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return false; - const ret = super.emit('close'); - this.removeAllListeners('close'); - return ret; - } - else if (ev === 'error') { - this[EMITTED_ERROR] = data; - super.emit(ERROR, data); - const ret = !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false; - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'resume') { - const ret = super.emit('resume'); - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev); - this.removeAllListeners(ev); - return ret; - } - // Some other unknown event - const ret = super.emit(ev, ...args); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) - this.pause(); - } - const ret = this[DISCARDED] ? false : super.emit('data', data); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITEND]() { - if (this[EMITTED_END]) - return false; - this[EMITTED_END] = true; - this.readable = false; - return this[ASYNC] - ? (defer(() => this[EMITEND2]()), true) - : this[EMITEND2](); - } - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end(); - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data); - } - if (!this[DISCARDED]) - super.emit('data', data); - } - } - for (const p of this[PIPES]) { - p.end(); - } - const ret = super.emit('end'); - this.removeAllListeners('end'); - return ret; - } - /** - * Return a Promise that resolves to an array of all emitted data once - * the stream ends. - */ - async collect() { - const buf = Object.assign([], { - dataLength: 0, - }); - if (!this[OBJECTMODE]) - buf.dataLength = 0; - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise(); - this.on('data', c => { - buf.push(c); - if (!this[OBJECTMODE]) - buf.dataLength += c.length; - }); - await p; - return buf; - } - /** - * Return a Promise that resolves to the concatenation of all emitted data - * once the stream ends. - * - * Not allowed on objectMode streams. - */ - async concat() { - if (this[OBJECTMODE]) { - throw new Error('cannot concat in objectMode'); - } - const buf = await this.collect(); - return (this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength)); - } - /** - * Return a void Promise that resolves once the stream ends. - */ - async promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))); - this.on('error', er => reject(er)); - this.on('end', () => resolve()); - }); - } - /** - * Asynchronous `for await of` iteration. - * - * This will continue emitting all chunks until the stream terminates. - */ - [Symbol.asyncIterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = async () => { - this.pause(); - stopped = true; - return { value: undefined, done: true }; - }; - const next = () => { - if (stopped) - return stop(); - const res = this.read(); - if (res !== null) - return Promise.resolve({ done: false, value: res }); - if (this[EOF]) - return stop(); - let resolve; - let reject; - const onerr = (er) => { - this.off('data', ondata); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - stop(); - reject(er); - }; - const ondata = (value) => { - this.off('error', onerr); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - this.pause(); - resolve({ value, done: !!this[EOF] }); - }; - const onend = () => { - this.off('error', onerr); - this.off('data', ondata); - this.off(DESTROYED, ondestroy); - stop(); - resolve({ done: true, value: undefined }); - }; - const ondestroy = () => onerr(new Error('stream destroyed')); - return new Promise((res, rej) => { - reject = rej; - resolve = res; - this.once(DESTROYED, ondestroy); - this.once('error', onerr); - this.once('end', onend); - this.once('data', ondata); - }); - }; - return { - next, - throw: stop, - return: stop, - [Symbol.asyncIterator]() { - return this; - }, - }; - } - /** - * Synchronous `for of` iteration. - * - * The iteration will terminate when the internal buffer runs out, even - * if the stream has not yet terminated. - */ - [Symbol.iterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = () => { - this.pause(); - this.off(ERROR, stop); - this.off(DESTROYED, stop); - this.off('end', stop); - stopped = true; - return { done: true, value: undefined }; - }; - const next = () => { - if (stopped) - return stop(); - const value = this.read(); - return value === null ? stop() : { done: false, value }; - }; - this.once('end', stop); - this.once(ERROR, stop); - this.once(DESTROYED, stop); - return { - next, - throw: stop, - return: stop, - [Symbol.iterator]() { - return this; - }, - }; - } - /** - * Destroy a stream, preventing it from being used for any further purpose. - * - * If the stream has a `close()` method, then it will be called on - * destruction. - * - * After destruction, any attempt to write data, read data, or emit most - * events will be ignored. - * - * If an error argument is provided, then it will be emitted in an - * 'error' event. - */ - destroy(er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er); - else - this.emit(DESTROYED); - return this; - } - this[DESTROYED] = true; - this[DISCARDED] = true; - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0; - this[BUFFERLENGTH] = 0; - const wc = this; - if (typeof wc.close === 'function' && !this[CLOSED]) - wc.close(); - if (er) - this.emit('error', er); - // if no error to emit, still reject pending promises - else - this.emit(DESTROYED); - return this; - } - /** - * Alias for {@link isStream} - * - * Former export location, maintained for backwards compatibility. - * - * @deprecated - */ - static get isStream() { - return exports.isStream; - } -} -exports.Minipass = Minipass; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/fs-minipass/node_modules/minipass/dist/cjs/package.json b/node_modules/fs-minipass/node_modules/minipass/dist/cjs/package.json deleted file mode 100644 index 5bbefffbabee3..0000000000000 --- a/node_modules/fs-minipass/node_modules/minipass/dist/cjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "commonjs" -} diff --git a/node_modules/fs-minipass/node_modules/minipass/dist/mjs/index.js b/node_modules/fs-minipass/node_modules/minipass/dist/mjs/index.js deleted file mode 100644 index b65fafbae43a4..0000000000000 --- a/node_modules/fs-minipass/node_modules/minipass/dist/mjs/index.js +++ /dev/null @@ -1,1018 +0,0 @@ -const proc = typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - }; -import { EventEmitter } from 'events'; -import Stream from 'stream'; -import { StringDecoder } from 'string_decoder'; -/** - * Return true if the argument is a Minipass stream, Node stream, or something - * else that Minipass can interact with. - */ -export const isStream = (s) => !!s && - typeof s === 'object' && - (s instanceof Minipass || - s instanceof Stream || - isReadable(s) || - isWritable(s)); -/** - * Return true if the argument is a valid {@link Minipass.Readable} - */ -export const isReadable = (s) => !!s && - typeof s === 'object' && - s instanceof EventEmitter && - typeof s.pipe === 'function' && - // node core Writable streams have a pipe() method, but it throws - s.pipe !== Stream.Writable.prototype.pipe; -/** - * Return true if the argument is a valid {@link Minipass.Writable} - */ -export const isWritable = (s) => !!s && - typeof s === 'object' && - s instanceof EventEmitter && - typeof s.write === 'function' && - typeof s.end === 'function'; -const EOF = Symbol('EOF'); -const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); -const EMITTED_END = Symbol('emittedEnd'); -const EMITTING_END = Symbol('emittingEnd'); -const EMITTED_ERROR = Symbol('emittedError'); -const CLOSED = Symbol('closed'); -const READ = Symbol('read'); -const FLUSH = Symbol('flush'); -const FLUSHCHUNK = Symbol('flushChunk'); -const ENCODING = Symbol('encoding'); -const DECODER = Symbol('decoder'); -const FLOWING = Symbol('flowing'); -const PAUSED = Symbol('paused'); -const RESUME = Symbol('resume'); -const BUFFER = Symbol('buffer'); -const PIPES = Symbol('pipes'); -const BUFFERLENGTH = Symbol('bufferLength'); -const BUFFERPUSH = Symbol('bufferPush'); -const BUFFERSHIFT = Symbol('bufferShift'); -const OBJECTMODE = Symbol('objectMode'); -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed'); -// internal event when stream has an error -const ERROR = Symbol('error'); -const EMITDATA = Symbol('emitData'); -const EMITEND = Symbol('emitEnd'); -const EMITEND2 = Symbol('emitEnd2'); -const ASYNC = Symbol('async'); -const ABORT = Symbol('abort'); -const ABORTED = Symbol('aborted'); -const SIGNAL = Symbol('signal'); -const DATALISTENERS = Symbol('dataListeners'); -const DISCARDED = Symbol('discarded'); -const defer = (fn) => Promise.resolve().then(fn); -const nodefer = (fn) => fn(); -const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; -const isArrayBufferLike = (b) => b instanceof ArrayBuffer || - (!!b && - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0); -const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); -/** - * Internal class representing a pipe to a destination stream. - * - * @internal - */ -class Pipe { - src; - dest; - opts; - ondrain; - constructor(src, dest, opts) { - this.src = src; - this.dest = dest; - this.opts = opts; - this.ondrain = () => src[RESUME](); - this.dest.on('drain', this.ondrain); - } - unpipe() { - this.dest.removeListener('drain', this.ondrain); - } - // only here for the prototype - /* c8 ignore start */ - proxyErrors(_er) { } - /* c8 ignore stop */ - end() { - this.unpipe(); - if (this.opts.end) - this.dest.end(); - } -} -/** - * Internal class representing a pipe to a destination stream where - * errors are proxied. - * - * @internal - */ -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors); - super.unpipe(); - } - constructor(src, dest, opts) { - super(src, dest, opts); - this.proxyErrors = er => dest.emit('error', er); - src.on('error', this.proxyErrors); - } -} -const isObjectModeOptions = (o) => !!o.objectMode; -const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; -/** - * Main export, the Minipass class - * - * `RType` is the type of data emitted, defaults to Buffer - * - * `WType` is the type of data to be written, if RType is buffer or string, - * then any {@link Minipass.ContiguousData} is allowed. - * - * `Events` is the set of event handler signatures that this object - * will emit, see {@link Minipass.Events} - */ -export class Minipass extends EventEmitter { - [FLOWING] = false; - [PAUSED] = false; - [PIPES] = []; - [BUFFER] = []; - [OBJECTMODE]; - [ENCODING]; - [ASYNC]; - [DECODER]; - [EOF] = false; - [EMITTED_END] = false; - [EMITTING_END] = false; - [CLOSED] = false; - [EMITTED_ERROR] = null; - [BUFFERLENGTH] = 0; - [DESTROYED] = false; - [SIGNAL]; - [ABORTED] = false; - [DATALISTENERS] = 0; - [DISCARDED] = false; - /** - * true if the stream can be written - */ - writable = true; - /** - * true if the stream can be read - */ - readable = true; - /** - * If `RType` is Buffer, then options do not need to be provided. - * Otherwise, an options object must be provided to specify either - * {@link Minipass.SharedOptions.objectMode} or - * {@link Minipass.SharedOptions.encoding}, as appropriate. - */ - constructor(...args) { - const options = (args[0] || - {}); - super(); - if (options.objectMode && typeof options.encoding === 'string') { - throw new TypeError('Encoding and objectMode may not be used together'); - } - if (isObjectModeOptions(options)) { - this[OBJECTMODE] = true; - this[ENCODING] = null; - } - else if (isEncodingOptions(options)) { - this[ENCODING] = options.encoding; - this[OBJECTMODE] = false; - } - else { - this[OBJECTMODE] = false; - this[ENCODING] = null; - } - this[ASYNC] = !!options.async; - this[DECODER] = this[ENCODING] - ? new StringDecoder(this[ENCODING]) - : null; - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); - } - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); - } - const { signal } = options; - if (signal) { - this[SIGNAL] = signal; - if (signal.aborted) { - this[ABORT](); - } - else { - signal.addEventListener('abort', () => this[ABORT]()); - } - } - } - /** - * The amount of data stored in the buffer waiting to be read. - * - * For Buffer strings, this will be the total byte length. - * For string encoding streams, this will be the string character length, - * according to JavaScript's `string.length` logic. - * For objectMode streams, this is a count of the items waiting to be - * emitted. - */ - get bufferLength() { - return this[BUFFERLENGTH]; - } - /** - * The `BufferEncoding` currently in use, or `null` - */ - get encoding() { - return this[ENCODING]; - } - /** - * @deprecated - This is a read only property - */ - set encoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * @deprecated - Encoding may only be set at instantiation time - */ - setEncoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * True if this is an objectMode stream - */ - get objectMode() { - return this[OBJECTMODE]; - } - /** - * @deprecated - This is a read-only property - */ - set objectMode(_om) { - throw new Error('objectMode must be set at instantiation time'); - } - /** - * true if this is an async stream - */ - get ['async']() { - return this[ASYNC]; - } - /** - * Set to true to make this stream async. - * - * Once set, it cannot be unset, as this would potentially cause incorrect - * behavior. Ie, a sync stream can be made async, but an async stream - * cannot be safely made sync. - */ - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a; - } - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true; - this.emit('abort', this[SIGNAL]?.reason); - this.destroy(this[SIGNAL]?.reason); - } - /** - * True if the stream has been aborted. - */ - get aborted() { - return this[ABORTED]; - } - /** - * No-op setter. Stream aborted status is set via the AbortSignal provided - * in the constructor options. - */ - set aborted(_) { } - write(chunk, encoding, cb) { - if (this[ABORTED]) - return false; - if (this[EOF]) - throw new Error('write after end'); - if (this[DESTROYED]) { - this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); - return true; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (!encoding) - encoding = 'utf8'; - const fn = this[ASYNC] ? defer : nodefer; - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything is only allowed if in object mode, so throw - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); - } - else if (isArrayBufferLike(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk); - } - else if (typeof chunk !== 'string') { - throw new Error('Non-contiguous data written to non-objectMode stream'); - } - } - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - // maybe impossible? - /* c8 ignore start */ - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - /* c8 ignore stop */ - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { - //@ts-ignore - sinful unsafe type change - chunk = Buffer.from(chunk, encoding); - } - if (Buffer.isBuffer(chunk) && this[ENCODING]) { - //@ts-ignore - sinful unsafe type change - chunk = this[DECODER].write(chunk); - } - // Note: flushing CAN potentially switch us into not-flowing mode - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - /** - * Low-level explicit read method. - * - * In objectMode, the argument is ignored, and one item is returned if - * available. - * - * `n` is the number of bytes (or in the case of encoding streams, - * characters) to consume. If `n` is not provided, then the entire buffer - * is returned, or `null` is returned if no data is available. - * - * If `n` is greater that the amount of data in the internal buffer, - * then `null` is returned. - */ - read(n) { - if (this[DESTROYED]) - return null; - this[DISCARDED] = false; - if (this[BUFFERLENGTH] === 0 || - n === 0 || - (n && n > this[BUFFERLENGTH])) { - this[MAYBE_EMIT_END](); - return null; - } - if (this[OBJECTMODE]) - n = null; - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - // not object mode, so if we have an encoding, then RType is string - // otherwise, must be Buffer - this[BUFFER] = [ - (this[ENCODING] - ? this[BUFFER].join('') - : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), - ]; - } - const ret = this[READ](n || null, this[BUFFER][0]); - this[MAYBE_EMIT_END](); - return ret; - } - [READ](n, chunk) { - if (this[OBJECTMODE]) - this[BUFFERSHIFT](); - else { - const c = chunk; - if (n === c.length || n === null) - this[BUFFERSHIFT](); - else if (typeof c === 'string') { - this[BUFFER][0] = c.slice(n); - chunk = c.slice(0, n); - this[BUFFERLENGTH] -= n; - } - else { - this[BUFFER][0] = c.subarray(n); - chunk = c.subarray(0, n); - this[BUFFERLENGTH] -= n; - } - } - this.emit('data', chunk); - if (!this[BUFFER].length && !this[EOF]) - this.emit('drain'); - return chunk; - } - end(chunk, encoding, cb) { - if (typeof chunk === 'function') { - cb = chunk; - chunk = undefined; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (chunk !== undefined) - this.write(chunk, encoding); - if (cb) - this.once('end', cb); - this[EOF] = true; - this.writable = false; - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this[FLOWING] || !this[PAUSED]) - this[MAYBE_EMIT_END](); - return this; - } - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) - return; - if (!this[DATALISTENERS] && !this[PIPES].length) { - this[DISCARDED] = true; - } - this[PAUSED] = false; - this[FLOWING] = true; - this.emit('resume'); - if (this[BUFFER].length) - this[FLUSH](); - else if (this[EOF]) - this[MAYBE_EMIT_END](); - else - this.emit('drain'); - } - /** - * Resume the stream if it is currently in a paused state - * - * If called when there are no pipe destinations or `data` event listeners, - * this will place the stream in a "discarded" state, where all data will - * be thrown away. The discarded state is removed if a pipe destination or - * data handler is added, if pause() is called, or if any synchronous or - * asynchronous iteration is started. - */ - resume() { - return this[RESUME](); - } - /** - * Pause the stream - */ - pause() { - this[FLOWING] = false; - this[PAUSED] = true; - this[DISCARDED] = false; - } - /** - * true if the stream has been forcibly destroyed - */ - get destroyed() { - return this[DESTROYED]; - } - /** - * true if the stream is currently in a flowing state, meaning that - * any writes will be immediately emitted. - */ - get flowing() { - return this[FLOWING]; - } - /** - * true if the stream is currently in a paused state - */ - get paused() { - return this[PAUSED]; - } - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1; - else - this[BUFFERLENGTH] += chunk.length; - this[BUFFER].push(chunk); - } - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1; - else - this[BUFFERLENGTH] -= this[BUFFER][0].length; - return this[BUFFER].shift(); - } - [FLUSH](noDrain = false) { - do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && - this[BUFFER].length); - if (!noDrain && !this[BUFFER].length && !this[EOF]) - this.emit('drain'); - } - [FLUSHCHUNK](chunk) { - this.emit('data', chunk); - return this[FLOWING]; - } - /** - * Pipe all data emitted by this stream into the destination provided. - * - * Triggers the flow of data. - */ - pipe(dest, opts) { - if (this[DESTROYED]) - return dest; - this[DISCARDED] = false; - const ended = this[EMITTED_END]; - opts = opts || {}; - if (dest === proc.stdout || dest === proc.stderr) - opts.end = false; - else - opts.end = opts.end !== false; - opts.proxyErrors = !!opts.proxyErrors; - // piping an ended stream ends immediately - if (ended) { - if (opts.end) - dest.end(); - } - else { - // "as" here just ignores the WType, which pipes don't care about, - // since they're only consuming from us, and writing to the dest - this[PIPES].push(!opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts)); - if (this[ASYNC]) - defer(() => this[RESUME]()); - else - this[RESUME](); - } - return dest; - } - /** - * Fully unhook a piped destination stream. - * - * If the destination stream was the only consumer of this stream (ie, - * there are no other piped destinations or `'data'` event listeners) - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest); - if (p) { - if (this[PIPES].length === 1) { - if (this[FLOWING] && this[DATALISTENERS] === 0) { - this[FLOWING] = false; - } - this[PIPES] = []; - } - else - this[PIPES].splice(this[PIPES].indexOf(p), 1); - p.unpipe(); - } - } - /** - * Alias for {@link Minipass#on} - */ - addListener(ev, handler) { - return this.on(ev, handler); - } - /** - * Mostly identical to `EventEmitter.on`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * - Adding a 'data' event handler will trigger the flow of data - * - * - Adding a 'readable' event handler when there is data waiting to be read - * will cause 'readable' to be emitted immediately. - * - * - Adding an 'endish' event handler ('end', 'finish', etc.) which has - * already passed will cause the event to be emitted immediately and all - * handlers removed. - * - * - Adding an 'error' event handler after an error has been emitted will - * cause the event to be re-emitted immediately with the error previously - * raised. - */ - on(ev, handler) { - const ret = super.on(ev, handler); - if (ev === 'data') { - this[DISCARDED] = false; - this[DATALISTENERS]++; - if (!this[PIPES].length && !this[FLOWING]) { - this[RESUME](); - } - } - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { - super.emit('readable'); - } - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev); - this.removeAllListeners(ev); - } - else if (ev === 'error' && this[EMITTED_ERROR]) { - const h = handler; - if (this[ASYNC]) - defer(() => h.call(this, this[EMITTED_ERROR])); - else - h.call(this, this[EMITTED_ERROR]); - } - return ret; - } - /** - * Alias for {@link Minipass#off} - */ - removeListener(ev, handler) { - return this.off(ev, handler); - } - /** - * Mostly identical to `EventEmitter.off` - * - * If a 'data' event handler is removed, and it was the last consumer - * (ie, there are no pipe destinations or other 'data' event listeners), - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - off(ev, handler) { - const ret = super.off(ev, handler); - // if we previously had listeners, and now we don't, and we don't - // have any pipes, then stop the flow, unless it's been explicitly - // put in a discarded flowing state via stream.resume(). - if (ev === 'data') { - this[DATALISTENERS] = this.listeners('data').length; - if (this[DATALISTENERS] === 0 && - !this[DISCARDED] && - !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * Mostly identical to `EventEmitter.removeAllListeners` - * - * If all 'data' event handlers are removed, and they were the last consumer - * (ie, there are no pipe destinations), then the flow of data will stop - * until there is another consumer or {@link Minipass#resume} is explicitly - * called. - */ - removeAllListeners(ev) { - const ret = super.removeAllListeners(ev); - if (ev === 'data' || ev === undefined) { - this[DATALISTENERS] = 0; - if (!this[DISCARDED] && !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * true if the 'end' event has been emitted - */ - get emittedEnd() { - return this[EMITTED_END]; - } - [MAYBE_EMIT_END]() { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF]) { - this[EMITTING_END] = true; - this.emit('end'); - this.emit('prefinish'); - this.emit('finish'); - if (this[CLOSED]) - this.emit('close'); - this[EMITTING_END] = false; - } - } - /** - * Mostly identical to `EventEmitter.emit`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * If the stream has been destroyed, and the event is something other - * than 'close' or 'error', then `false` is returned and no handlers - * are called. - * - * If the event is 'end', and has already been emitted, then the event - * is ignored. If the stream is in a paused or non-flowing state, then - * the event will be deferred until data flow resumes. If the stream is - * async, then handlers will be called on the next tick rather than - * immediately. - * - * If the event is 'close', and 'end' has not yet been emitted, then - * the event will be deferred until after 'end' is emitted. - * - * If the event is 'error', and an AbortSignal was provided for the stream, - * and there are no listeners, then the event is ignored, matching the - * behavior of node core streams in the presense of an AbortSignal. - * - * If the event is 'finish' or 'prefinish', then all listeners will be - * removed after emitting the event, to prevent double-firing. - */ - emit(ev, ...args) { - const data = args[0]; - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && - ev !== 'close' && - ev !== DESTROYED && - this[DESTROYED]) { - return false; - } - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? (defer(() => this[EMITDATA](data)), true) - : this[EMITDATA](data); - } - else if (ev === 'end') { - return this[EMITEND](); - } - else if (ev === 'close') { - this[CLOSED] = true; - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return false; - const ret = super.emit('close'); - this.removeAllListeners('close'); - return ret; - } - else if (ev === 'error') { - this[EMITTED_ERROR] = data; - super.emit(ERROR, data); - const ret = !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false; - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'resume') { - const ret = super.emit('resume'); - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev); - this.removeAllListeners(ev); - return ret; - } - // Some other unknown event - const ret = super.emit(ev, ...args); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) - this.pause(); - } - const ret = this[DISCARDED] ? false : super.emit('data', data); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITEND]() { - if (this[EMITTED_END]) - return false; - this[EMITTED_END] = true; - this.readable = false; - return this[ASYNC] - ? (defer(() => this[EMITEND2]()), true) - : this[EMITEND2](); - } - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end(); - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data); - } - if (!this[DISCARDED]) - super.emit('data', data); - } - } - for (const p of this[PIPES]) { - p.end(); - } - const ret = super.emit('end'); - this.removeAllListeners('end'); - return ret; - } - /** - * Return a Promise that resolves to an array of all emitted data once - * the stream ends. - */ - async collect() { - const buf = Object.assign([], { - dataLength: 0, - }); - if (!this[OBJECTMODE]) - buf.dataLength = 0; - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise(); - this.on('data', c => { - buf.push(c); - if (!this[OBJECTMODE]) - buf.dataLength += c.length; - }); - await p; - return buf; - } - /** - * Return a Promise that resolves to the concatenation of all emitted data - * once the stream ends. - * - * Not allowed on objectMode streams. - */ - async concat() { - if (this[OBJECTMODE]) { - throw new Error('cannot concat in objectMode'); - } - const buf = await this.collect(); - return (this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength)); - } - /** - * Return a void Promise that resolves once the stream ends. - */ - async promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))); - this.on('error', er => reject(er)); - this.on('end', () => resolve()); - }); - } - /** - * Asynchronous `for await of` iteration. - * - * This will continue emitting all chunks until the stream terminates. - */ - [Symbol.asyncIterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = async () => { - this.pause(); - stopped = true; - return { value: undefined, done: true }; - }; - const next = () => { - if (stopped) - return stop(); - const res = this.read(); - if (res !== null) - return Promise.resolve({ done: false, value: res }); - if (this[EOF]) - return stop(); - let resolve; - let reject; - const onerr = (er) => { - this.off('data', ondata); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - stop(); - reject(er); - }; - const ondata = (value) => { - this.off('error', onerr); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - this.pause(); - resolve({ value, done: !!this[EOF] }); - }; - const onend = () => { - this.off('error', onerr); - this.off('data', ondata); - this.off(DESTROYED, ondestroy); - stop(); - resolve({ done: true, value: undefined }); - }; - const ondestroy = () => onerr(new Error('stream destroyed')); - return new Promise((res, rej) => { - reject = rej; - resolve = res; - this.once(DESTROYED, ondestroy); - this.once('error', onerr); - this.once('end', onend); - this.once('data', ondata); - }); - }; - return { - next, - throw: stop, - return: stop, - [Symbol.asyncIterator]() { - return this; - }, - }; - } - /** - * Synchronous `for of` iteration. - * - * The iteration will terminate when the internal buffer runs out, even - * if the stream has not yet terminated. - */ - [Symbol.iterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = () => { - this.pause(); - this.off(ERROR, stop); - this.off(DESTROYED, stop); - this.off('end', stop); - stopped = true; - return { done: true, value: undefined }; - }; - const next = () => { - if (stopped) - return stop(); - const value = this.read(); - return value === null ? stop() : { done: false, value }; - }; - this.once('end', stop); - this.once(ERROR, stop); - this.once(DESTROYED, stop); - return { - next, - throw: stop, - return: stop, - [Symbol.iterator]() { - return this; - }, - }; - } - /** - * Destroy a stream, preventing it from being used for any further purpose. - * - * If the stream has a `close()` method, then it will be called on - * destruction. - * - * After destruction, any attempt to write data, read data, or emit most - * events will be ignored. - * - * If an error argument is provided, then it will be emitted in an - * 'error' event. - */ - destroy(er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er); - else - this.emit(DESTROYED); - return this; - } - this[DESTROYED] = true; - this[DISCARDED] = true; - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0; - this[BUFFERLENGTH] = 0; - const wc = this; - if (typeof wc.close === 'function' && !this[CLOSED]) - wc.close(); - if (er) - this.emit('error', er); - // if no error to emit, still reject pending promises - else - this.emit(DESTROYED); - return this; - } - /** - * Alias for {@link isStream} - * - * Former export location, maintained for backwards compatibility. - * - * @deprecated - */ - static get isStream() { - return isStream; - } -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/fs-minipass/node_modules/minipass/dist/mjs/package.json b/node_modules/fs-minipass/node_modules/minipass/dist/mjs/package.json deleted file mode 100644 index 3dbc1ca591c05..0000000000000 --- a/node_modules/fs-minipass/node_modules/minipass/dist/mjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "module" -} diff --git a/node_modules/fs-minipass/node_modules/minipass/package.json b/node_modules/fs-minipass/node_modules/minipass/package.json deleted file mode 100644 index 6faaa247a5bc6..0000000000000 --- a/node_modules/fs-minipass/node_modules/minipass/package.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "name": "minipass", - "version": "7.0.3", - "description": "minimal implementation of a PassThrough stream", - "main": "./dist/cjs/index.js", - "module": "./dist/mjs/index.js", - "types": "./dist/cjs/index.js", - "exports": { - ".": { - "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.js" - }, - "require": { - "types": "./dist/cjs/index.d.ts", - "default": "./dist/cjs/index.js" - } - }, - "./package.json": "./package.json" - }, - "files": [ - "dist" - ], - "scripts": { - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "preprepare": "rm -rf dist", - "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh", - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "test": "c8 tap", - "snap": "c8 tap", - "format": "prettier --write . --loglevel warn", - "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" - }, - "tap": { - "coverage": false, - "node-arg": [ - "--enable-source-maps", - "--no-warnings", - "--loader", - "ts-node/esm" - ], - "ts": false - }, - "prettier": { - "semi": false, - "printWidth": 75, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - }, - "devDependencies": { - "@types/node": "^20.1.2", - "@types/tap": "^15.0.8", - "c8": "^7.13.0", - "prettier": "^2.6.2", - "tap": "^16.3.0", - "ts-node": "^10.9.1", - "typedoc": "^0.24.8", - "typescript": "^5.1.3", - "end-of-stream": "^1.4.0", - "node-abort-controller": "^3.1.1", - "sync-content": "^1.0.2", - "through2": "^2.0.3" - }, - "repository": "https://github.com/isaacs/minipass", - "keywords": [ - "passthrough", - "stream" - ], - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } -} diff --git a/node_modules/make-fetch-happen/node_modules/minipass/dist/cjs/index.js b/node_modules/make-fetch-happen/node_modules/minipass/dist/cjs/index.js deleted file mode 100644 index b6cdae8eb514b..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/minipass/dist/cjs/index.js +++ /dev/null @@ -1,1028 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0; -const proc = typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - }; -const events_1 = require("events"); -const stream_1 = __importDefault(require("stream")); -const string_decoder_1 = require("string_decoder"); -/** - * Return true if the argument is a Minipass stream, Node stream, or something - * else that Minipass can interact with. - */ -const isStream = (s) => !!s && - typeof s === 'object' && - (s instanceof Minipass || - s instanceof stream_1.default || - (0, exports.isReadable)(s) || - (0, exports.isWritable)(s)); -exports.isStream = isStream; -/** - * Return true if the argument is a valid {@link Minipass.Readable} - */ -const isReadable = (s) => !!s && - typeof s === 'object' && - s instanceof events_1.EventEmitter && - typeof s.pipe === 'function' && - // node core Writable streams have a pipe() method, but it throws - s.pipe !== stream_1.default.Writable.prototype.pipe; -exports.isReadable = isReadable; -/** - * Return true if the argument is a valid {@link Minipass.Writable} - */ -const isWritable = (s) => !!s && - typeof s === 'object' && - s instanceof events_1.EventEmitter && - typeof s.write === 'function' && - typeof s.end === 'function'; -exports.isWritable = isWritable; -const EOF = Symbol('EOF'); -const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); -const EMITTED_END = Symbol('emittedEnd'); -const EMITTING_END = Symbol('emittingEnd'); -const EMITTED_ERROR = Symbol('emittedError'); -const CLOSED = Symbol('closed'); -const READ = Symbol('read'); -const FLUSH = Symbol('flush'); -const FLUSHCHUNK = Symbol('flushChunk'); -const ENCODING = Symbol('encoding'); -const DECODER = Symbol('decoder'); -const FLOWING = Symbol('flowing'); -const PAUSED = Symbol('paused'); -const RESUME = Symbol('resume'); -const BUFFER = Symbol('buffer'); -const PIPES = Symbol('pipes'); -const BUFFERLENGTH = Symbol('bufferLength'); -const BUFFERPUSH = Symbol('bufferPush'); -const BUFFERSHIFT = Symbol('bufferShift'); -const OBJECTMODE = Symbol('objectMode'); -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed'); -// internal event when stream has an error -const ERROR = Symbol('error'); -const EMITDATA = Symbol('emitData'); -const EMITEND = Symbol('emitEnd'); -const EMITEND2 = Symbol('emitEnd2'); -const ASYNC = Symbol('async'); -const ABORT = Symbol('abort'); -const ABORTED = Symbol('aborted'); -const SIGNAL = Symbol('signal'); -const DATALISTENERS = Symbol('dataListeners'); -const DISCARDED = Symbol('discarded'); -const defer = (fn) => Promise.resolve().then(fn); -const nodefer = (fn) => fn(); -const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; -const isArrayBufferLike = (b) => b instanceof ArrayBuffer || - (!!b && - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0); -const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); -/** - * Internal class representing a pipe to a destination stream. - * - * @internal - */ -class Pipe { - src; - dest; - opts; - ondrain; - constructor(src, dest, opts) { - this.src = src; - this.dest = dest; - this.opts = opts; - this.ondrain = () => src[RESUME](); - this.dest.on('drain', this.ondrain); - } - unpipe() { - this.dest.removeListener('drain', this.ondrain); - } - // only here for the prototype - /* c8 ignore start */ - proxyErrors(_er) { } - /* c8 ignore stop */ - end() { - this.unpipe(); - if (this.opts.end) - this.dest.end(); - } -} -/** - * Internal class representing a pipe to a destination stream where - * errors are proxied. - * - * @internal - */ -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors); - super.unpipe(); - } - constructor(src, dest, opts) { - super(src, dest, opts); - this.proxyErrors = er => dest.emit('error', er); - src.on('error', this.proxyErrors); - } -} -const isObjectModeOptions = (o) => !!o.objectMode; -const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; -/** - * Main export, the Minipass class - * - * `RType` is the type of data emitted, defaults to Buffer - * - * `WType` is the type of data to be written, if RType is buffer or string, - * then any {@link Minipass.ContiguousData} is allowed. - * - * `Events` is the set of event handler signatures that this object - * will emit, see {@link Minipass.Events} - */ -class Minipass extends events_1.EventEmitter { - [FLOWING] = false; - [PAUSED] = false; - [PIPES] = []; - [BUFFER] = []; - [OBJECTMODE]; - [ENCODING]; - [ASYNC]; - [DECODER]; - [EOF] = false; - [EMITTED_END] = false; - [EMITTING_END] = false; - [CLOSED] = false; - [EMITTED_ERROR] = null; - [BUFFERLENGTH] = 0; - [DESTROYED] = false; - [SIGNAL]; - [ABORTED] = false; - [DATALISTENERS] = 0; - [DISCARDED] = false; - /** - * true if the stream can be written - */ - writable = true; - /** - * true if the stream can be read - */ - readable = true; - /** - * If `RType` is Buffer, then options do not need to be provided. - * Otherwise, an options object must be provided to specify either - * {@link Minipass.SharedOptions.objectMode} or - * {@link Minipass.SharedOptions.encoding}, as appropriate. - */ - constructor(...args) { - const options = (args[0] || - {}); - super(); - if (options.objectMode && typeof options.encoding === 'string') { - throw new TypeError('Encoding and objectMode may not be used together'); - } - if (isObjectModeOptions(options)) { - this[OBJECTMODE] = true; - this[ENCODING] = null; - } - else if (isEncodingOptions(options)) { - this[ENCODING] = options.encoding; - this[OBJECTMODE] = false; - } - else { - this[OBJECTMODE] = false; - this[ENCODING] = null; - } - this[ASYNC] = !!options.async; - this[DECODER] = this[ENCODING] - ? new string_decoder_1.StringDecoder(this[ENCODING]) - : null; - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); - } - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); - } - const { signal } = options; - if (signal) { - this[SIGNAL] = signal; - if (signal.aborted) { - this[ABORT](); - } - else { - signal.addEventListener('abort', () => this[ABORT]()); - } - } - } - /** - * The amount of data stored in the buffer waiting to be read. - * - * For Buffer strings, this will be the total byte length. - * For string encoding streams, this will be the string character length, - * according to JavaScript's `string.length` logic. - * For objectMode streams, this is a count of the items waiting to be - * emitted. - */ - get bufferLength() { - return this[BUFFERLENGTH]; - } - /** - * The `BufferEncoding` currently in use, or `null` - */ - get encoding() { - return this[ENCODING]; - } - /** - * @deprecated - This is a read only property - */ - set encoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * @deprecated - Encoding may only be set at instantiation time - */ - setEncoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * True if this is an objectMode stream - */ - get objectMode() { - return this[OBJECTMODE]; - } - /** - * @deprecated - This is a read-only property - */ - set objectMode(_om) { - throw new Error('objectMode must be set at instantiation time'); - } - /** - * true if this is an async stream - */ - get ['async']() { - return this[ASYNC]; - } - /** - * Set to true to make this stream async. - * - * Once set, it cannot be unset, as this would potentially cause incorrect - * behavior. Ie, a sync stream can be made async, but an async stream - * cannot be safely made sync. - */ - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a; - } - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true; - this.emit('abort', this[SIGNAL]?.reason); - this.destroy(this[SIGNAL]?.reason); - } - /** - * True if the stream has been aborted. - */ - get aborted() { - return this[ABORTED]; - } - /** - * No-op setter. Stream aborted status is set via the AbortSignal provided - * in the constructor options. - */ - set aborted(_) { } - write(chunk, encoding, cb) { - if (this[ABORTED]) - return false; - if (this[EOF]) - throw new Error('write after end'); - if (this[DESTROYED]) { - this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); - return true; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (!encoding) - encoding = 'utf8'; - const fn = this[ASYNC] ? defer : nodefer; - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything is only allowed if in object mode, so throw - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); - } - else if (isArrayBufferLike(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk); - } - else if (typeof chunk !== 'string') { - throw new Error('Non-contiguous data written to non-objectMode stream'); - } - } - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - // maybe impossible? - /* c8 ignore start */ - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - /* c8 ignore stop */ - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { - //@ts-ignore - sinful unsafe type change - chunk = Buffer.from(chunk, encoding); - } - if (Buffer.isBuffer(chunk) && this[ENCODING]) { - //@ts-ignore - sinful unsafe type change - chunk = this[DECODER].write(chunk); - } - // Note: flushing CAN potentially switch us into not-flowing mode - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - /** - * Low-level explicit read method. - * - * In objectMode, the argument is ignored, and one item is returned if - * available. - * - * `n` is the number of bytes (or in the case of encoding streams, - * characters) to consume. If `n` is not provided, then the entire buffer - * is returned, or `null` is returned if no data is available. - * - * If `n` is greater that the amount of data in the internal buffer, - * then `null` is returned. - */ - read(n) { - if (this[DESTROYED]) - return null; - this[DISCARDED] = false; - if (this[BUFFERLENGTH] === 0 || - n === 0 || - (n && n > this[BUFFERLENGTH])) { - this[MAYBE_EMIT_END](); - return null; - } - if (this[OBJECTMODE]) - n = null; - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - // not object mode, so if we have an encoding, then RType is string - // otherwise, must be Buffer - this[BUFFER] = [ - (this[ENCODING] - ? this[BUFFER].join('') - : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), - ]; - } - const ret = this[READ](n || null, this[BUFFER][0]); - this[MAYBE_EMIT_END](); - return ret; - } - [READ](n, chunk) { - if (this[OBJECTMODE]) - this[BUFFERSHIFT](); - else { - const c = chunk; - if (n === c.length || n === null) - this[BUFFERSHIFT](); - else if (typeof c === 'string') { - this[BUFFER][0] = c.slice(n); - chunk = c.slice(0, n); - this[BUFFERLENGTH] -= n; - } - else { - this[BUFFER][0] = c.subarray(n); - chunk = c.subarray(0, n); - this[BUFFERLENGTH] -= n; - } - } - this.emit('data', chunk); - if (!this[BUFFER].length && !this[EOF]) - this.emit('drain'); - return chunk; - } - end(chunk, encoding, cb) { - if (typeof chunk === 'function') { - cb = chunk; - chunk = undefined; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (chunk !== undefined) - this.write(chunk, encoding); - if (cb) - this.once('end', cb); - this[EOF] = true; - this.writable = false; - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this[FLOWING] || !this[PAUSED]) - this[MAYBE_EMIT_END](); - return this; - } - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) - return; - if (!this[DATALISTENERS] && !this[PIPES].length) { - this[DISCARDED] = true; - } - this[PAUSED] = false; - this[FLOWING] = true; - this.emit('resume'); - if (this[BUFFER].length) - this[FLUSH](); - else if (this[EOF]) - this[MAYBE_EMIT_END](); - else - this.emit('drain'); - } - /** - * Resume the stream if it is currently in a paused state - * - * If called when there are no pipe destinations or `data` event listeners, - * this will place the stream in a "discarded" state, where all data will - * be thrown away. The discarded state is removed if a pipe destination or - * data handler is added, if pause() is called, or if any synchronous or - * asynchronous iteration is started. - */ - resume() { - return this[RESUME](); - } - /** - * Pause the stream - */ - pause() { - this[FLOWING] = false; - this[PAUSED] = true; - this[DISCARDED] = false; - } - /** - * true if the stream has been forcibly destroyed - */ - get destroyed() { - return this[DESTROYED]; - } - /** - * true if the stream is currently in a flowing state, meaning that - * any writes will be immediately emitted. - */ - get flowing() { - return this[FLOWING]; - } - /** - * true if the stream is currently in a paused state - */ - get paused() { - return this[PAUSED]; - } - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1; - else - this[BUFFERLENGTH] += chunk.length; - this[BUFFER].push(chunk); - } - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1; - else - this[BUFFERLENGTH] -= this[BUFFER][0].length; - return this[BUFFER].shift(); - } - [FLUSH](noDrain = false) { - do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && - this[BUFFER].length); - if (!noDrain && !this[BUFFER].length && !this[EOF]) - this.emit('drain'); - } - [FLUSHCHUNK](chunk) { - this.emit('data', chunk); - return this[FLOWING]; - } - /** - * Pipe all data emitted by this stream into the destination provided. - * - * Triggers the flow of data. - */ - pipe(dest, opts) { - if (this[DESTROYED]) - return dest; - this[DISCARDED] = false; - const ended = this[EMITTED_END]; - opts = opts || {}; - if (dest === proc.stdout || dest === proc.stderr) - opts.end = false; - else - opts.end = opts.end !== false; - opts.proxyErrors = !!opts.proxyErrors; - // piping an ended stream ends immediately - if (ended) { - if (opts.end) - dest.end(); - } - else { - // "as" here just ignores the WType, which pipes don't care about, - // since they're only consuming from us, and writing to the dest - this[PIPES].push(!opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts)); - if (this[ASYNC]) - defer(() => this[RESUME]()); - else - this[RESUME](); - } - return dest; - } - /** - * Fully unhook a piped destination stream. - * - * If the destination stream was the only consumer of this stream (ie, - * there are no other piped destinations or `'data'` event listeners) - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest); - if (p) { - if (this[PIPES].length === 1) { - if (this[FLOWING] && this[DATALISTENERS] === 0) { - this[FLOWING] = false; - } - this[PIPES] = []; - } - else - this[PIPES].splice(this[PIPES].indexOf(p), 1); - p.unpipe(); - } - } - /** - * Alias for {@link Minipass#on} - */ - addListener(ev, handler) { - return this.on(ev, handler); - } - /** - * Mostly identical to `EventEmitter.on`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * - Adding a 'data' event handler will trigger the flow of data - * - * - Adding a 'readable' event handler when there is data waiting to be read - * will cause 'readable' to be emitted immediately. - * - * - Adding an 'endish' event handler ('end', 'finish', etc.) which has - * already passed will cause the event to be emitted immediately and all - * handlers removed. - * - * - Adding an 'error' event handler after an error has been emitted will - * cause the event to be re-emitted immediately with the error previously - * raised. - */ - on(ev, handler) { - const ret = super.on(ev, handler); - if (ev === 'data') { - this[DISCARDED] = false; - this[DATALISTENERS]++; - if (!this[PIPES].length && !this[FLOWING]) { - this[RESUME](); - } - } - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { - super.emit('readable'); - } - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev); - this.removeAllListeners(ev); - } - else if (ev === 'error' && this[EMITTED_ERROR]) { - const h = handler; - if (this[ASYNC]) - defer(() => h.call(this, this[EMITTED_ERROR])); - else - h.call(this, this[EMITTED_ERROR]); - } - return ret; - } - /** - * Alias for {@link Minipass#off} - */ - removeListener(ev, handler) { - return this.off(ev, handler); - } - /** - * Mostly identical to `EventEmitter.off` - * - * If a 'data' event handler is removed, and it was the last consumer - * (ie, there are no pipe destinations or other 'data' event listeners), - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - off(ev, handler) { - const ret = super.off(ev, handler); - // if we previously had listeners, and now we don't, and we don't - // have any pipes, then stop the flow, unless it's been explicitly - // put in a discarded flowing state via stream.resume(). - if (ev === 'data') { - this[DATALISTENERS] = this.listeners('data').length; - if (this[DATALISTENERS] === 0 && - !this[DISCARDED] && - !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * Mostly identical to `EventEmitter.removeAllListeners` - * - * If all 'data' event handlers are removed, and they were the last consumer - * (ie, there are no pipe destinations), then the flow of data will stop - * until there is another consumer or {@link Minipass#resume} is explicitly - * called. - */ - removeAllListeners(ev) { - const ret = super.removeAllListeners(ev); - if (ev === 'data' || ev === undefined) { - this[DATALISTENERS] = 0; - if (!this[DISCARDED] && !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * true if the 'end' event has been emitted - */ - get emittedEnd() { - return this[EMITTED_END]; - } - [MAYBE_EMIT_END]() { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF]) { - this[EMITTING_END] = true; - this.emit('end'); - this.emit('prefinish'); - this.emit('finish'); - if (this[CLOSED]) - this.emit('close'); - this[EMITTING_END] = false; - } - } - /** - * Mostly identical to `EventEmitter.emit`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * If the stream has been destroyed, and the event is something other - * than 'close' or 'error', then `false` is returned and no handlers - * are called. - * - * If the event is 'end', and has already been emitted, then the event - * is ignored. If the stream is in a paused or non-flowing state, then - * the event will be deferred until data flow resumes. If the stream is - * async, then handlers will be called on the next tick rather than - * immediately. - * - * If the event is 'close', and 'end' has not yet been emitted, then - * the event will be deferred until after 'end' is emitted. - * - * If the event is 'error', and an AbortSignal was provided for the stream, - * and there are no listeners, then the event is ignored, matching the - * behavior of node core streams in the presense of an AbortSignal. - * - * If the event is 'finish' or 'prefinish', then all listeners will be - * removed after emitting the event, to prevent double-firing. - */ - emit(ev, ...args) { - const data = args[0]; - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && - ev !== 'close' && - ev !== DESTROYED && - this[DESTROYED]) { - return false; - } - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? (defer(() => this[EMITDATA](data)), true) - : this[EMITDATA](data); - } - else if (ev === 'end') { - return this[EMITEND](); - } - else if (ev === 'close') { - this[CLOSED] = true; - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return false; - const ret = super.emit('close'); - this.removeAllListeners('close'); - return ret; - } - else if (ev === 'error') { - this[EMITTED_ERROR] = data; - super.emit(ERROR, data); - const ret = !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false; - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'resume') { - const ret = super.emit('resume'); - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev); - this.removeAllListeners(ev); - return ret; - } - // Some other unknown event - const ret = super.emit(ev, ...args); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) - this.pause(); - } - const ret = this[DISCARDED] ? false : super.emit('data', data); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITEND]() { - if (this[EMITTED_END]) - return false; - this[EMITTED_END] = true; - this.readable = false; - return this[ASYNC] - ? (defer(() => this[EMITEND2]()), true) - : this[EMITEND2](); - } - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end(); - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data); - } - if (!this[DISCARDED]) - super.emit('data', data); - } - } - for (const p of this[PIPES]) { - p.end(); - } - const ret = super.emit('end'); - this.removeAllListeners('end'); - return ret; - } - /** - * Return a Promise that resolves to an array of all emitted data once - * the stream ends. - */ - async collect() { - const buf = Object.assign([], { - dataLength: 0, - }); - if (!this[OBJECTMODE]) - buf.dataLength = 0; - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise(); - this.on('data', c => { - buf.push(c); - if (!this[OBJECTMODE]) - buf.dataLength += c.length; - }); - await p; - return buf; - } - /** - * Return a Promise that resolves to the concatenation of all emitted data - * once the stream ends. - * - * Not allowed on objectMode streams. - */ - async concat() { - if (this[OBJECTMODE]) { - throw new Error('cannot concat in objectMode'); - } - const buf = await this.collect(); - return (this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength)); - } - /** - * Return a void Promise that resolves once the stream ends. - */ - async promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))); - this.on('error', er => reject(er)); - this.on('end', () => resolve()); - }); - } - /** - * Asynchronous `for await of` iteration. - * - * This will continue emitting all chunks until the stream terminates. - */ - [Symbol.asyncIterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = async () => { - this.pause(); - stopped = true; - return { value: undefined, done: true }; - }; - const next = () => { - if (stopped) - return stop(); - const res = this.read(); - if (res !== null) - return Promise.resolve({ done: false, value: res }); - if (this[EOF]) - return stop(); - let resolve; - let reject; - const onerr = (er) => { - this.off('data', ondata); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - stop(); - reject(er); - }; - const ondata = (value) => { - this.off('error', onerr); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - this.pause(); - resolve({ value, done: !!this[EOF] }); - }; - const onend = () => { - this.off('error', onerr); - this.off('data', ondata); - this.off(DESTROYED, ondestroy); - stop(); - resolve({ done: true, value: undefined }); - }; - const ondestroy = () => onerr(new Error('stream destroyed')); - return new Promise((res, rej) => { - reject = rej; - resolve = res; - this.once(DESTROYED, ondestroy); - this.once('error', onerr); - this.once('end', onend); - this.once('data', ondata); - }); - }; - return { - next, - throw: stop, - return: stop, - [Symbol.asyncIterator]() { - return this; - }, - }; - } - /** - * Synchronous `for of` iteration. - * - * The iteration will terminate when the internal buffer runs out, even - * if the stream has not yet terminated. - */ - [Symbol.iterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = () => { - this.pause(); - this.off(ERROR, stop); - this.off(DESTROYED, stop); - this.off('end', stop); - stopped = true; - return { done: true, value: undefined }; - }; - const next = () => { - if (stopped) - return stop(); - const value = this.read(); - return value === null ? stop() : { done: false, value }; - }; - this.once('end', stop); - this.once(ERROR, stop); - this.once(DESTROYED, stop); - return { - next, - throw: stop, - return: stop, - [Symbol.iterator]() { - return this; - }, - }; - } - /** - * Destroy a stream, preventing it from being used for any further purpose. - * - * If the stream has a `close()` method, then it will be called on - * destruction. - * - * After destruction, any attempt to write data, read data, or emit most - * events will be ignored. - * - * If an error argument is provided, then it will be emitted in an - * 'error' event. - */ - destroy(er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er); - else - this.emit(DESTROYED); - return this; - } - this[DESTROYED] = true; - this[DISCARDED] = true; - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0; - this[BUFFERLENGTH] = 0; - const wc = this; - if (typeof wc.close === 'function' && !this[CLOSED]) - wc.close(); - if (er) - this.emit('error', er); - // if no error to emit, still reject pending promises - else - this.emit(DESTROYED); - return this; - } - /** - * Alias for {@link isStream} - * - * Former export location, maintained for backwards compatibility. - * - * @deprecated - */ - static get isStream() { - return exports.isStream; - } -} -exports.Minipass = Minipass; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/make-fetch-happen/node_modules/minipass/dist/cjs/package.json b/node_modules/make-fetch-happen/node_modules/minipass/dist/cjs/package.json deleted file mode 100644 index 5bbefffbabee3..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/minipass/dist/cjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "commonjs" -} diff --git a/node_modules/make-fetch-happen/node_modules/minipass/dist/mjs/index.js b/node_modules/make-fetch-happen/node_modules/minipass/dist/mjs/index.js deleted file mode 100644 index b65fafbae43a4..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/minipass/dist/mjs/index.js +++ /dev/null @@ -1,1018 +0,0 @@ -const proc = typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - }; -import { EventEmitter } from 'events'; -import Stream from 'stream'; -import { StringDecoder } from 'string_decoder'; -/** - * Return true if the argument is a Minipass stream, Node stream, or something - * else that Minipass can interact with. - */ -export const isStream = (s) => !!s && - typeof s === 'object' && - (s instanceof Minipass || - s instanceof Stream || - isReadable(s) || - isWritable(s)); -/** - * Return true if the argument is a valid {@link Minipass.Readable} - */ -export const isReadable = (s) => !!s && - typeof s === 'object' && - s instanceof EventEmitter && - typeof s.pipe === 'function' && - // node core Writable streams have a pipe() method, but it throws - s.pipe !== Stream.Writable.prototype.pipe; -/** - * Return true if the argument is a valid {@link Minipass.Writable} - */ -export const isWritable = (s) => !!s && - typeof s === 'object' && - s instanceof EventEmitter && - typeof s.write === 'function' && - typeof s.end === 'function'; -const EOF = Symbol('EOF'); -const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); -const EMITTED_END = Symbol('emittedEnd'); -const EMITTING_END = Symbol('emittingEnd'); -const EMITTED_ERROR = Symbol('emittedError'); -const CLOSED = Symbol('closed'); -const READ = Symbol('read'); -const FLUSH = Symbol('flush'); -const FLUSHCHUNK = Symbol('flushChunk'); -const ENCODING = Symbol('encoding'); -const DECODER = Symbol('decoder'); -const FLOWING = Symbol('flowing'); -const PAUSED = Symbol('paused'); -const RESUME = Symbol('resume'); -const BUFFER = Symbol('buffer'); -const PIPES = Symbol('pipes'); -const BUFFERLENGTH = Symbol('bufferLength'); -const BUFFERPUSH = Symbol('bufferPush'); -const BUFFERSHIFT = Symbol('bufferShift'); -const OBJECTMODE = Symbol('objectMode'); -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed'); -// internal event when stream has an error -const ERROR = Symbol('error'); -const EMITDATA = Symbol('emitData'); -const EMITEND = Symbol('emitEnd'); -const EMITEND2 = Symbol('emitEnd2'); -const ASYNC = Symbol('async'); -const ABORT = Symbol('abort'); -const ABORTED = Symbol('aborted'); -const SIGNAL = Symbol('signal'); -const DATALISTENERS = Symbol('dataListeners'); -const DISCARDED = Symbol('discarded'); -const defer = (fn) => Promise.resolve().then(fn); -const nodefer = (fn) => fn(); -const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; -const isArrayBufferLike = (b) => b instanceof ArrayBuffer || - (!!b && - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0); -const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); -/** - * Internal class representing a pipe to a destination stream. - * - * @internal - */ -class Pipe { - src; - dest; - opts; - ondrain; - constructor(src, dest, opts) { - this.src = src; - this.dest = dest; - this.opts = opts; - this.ondrain = () => src[RESUME](); - this.dest.on('drain', this.ondrain); - } - unpipe() { - this.dest.removeListener('drain', this.ondrain); - } - // only here for the prototype - /* c8 ignore start */ - proxyErrors(_er) { } - /* c8 ignore stop */ - end() { - this.unpipe(); - if (this.opts.end) - this.dest.end(); - } -} -/** - * Internal class representing a pipe to a destination stream where - * errors are proxied. - * - * @internal - */ -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors); - super.unpipe(); - } - constructor(src, dest, opts) { - super(src, dest, opts); - this.proxyErrors = er => dest.emit('error', er); - src.on('error', this.proxyErrors); - } -} -const isObjectModeOptions = (o) => !!o.objectMode; -const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; -/** - * Main export, the Minipass class - * - * `RType` is the type of data emitted, defaults to Buffer - * - * `WType` is the type of data to be written, if RType is buffer or string, - * then any {@link Minipass.ContiguousData} is allowed. - * - * `Events` is the set of event handler signatures that this object - * will emit, see {@link Minipass.Events} - */ -export class Minipass extends EventEmitter { - [FLOWING] = false; - [PAUSED] = false; - [PIPES] = []; - [BUFFER] = []; - [OBJECTMODE]; - [ENCODING]; - [ASYNC]; - [DECODER]; - [EOF] = false; - [EMITTED_END] = false; - [EMITTING_END] = false; - [CLOSED] = false; - [EMITTED_ERROR] = null; - [BUFFERLENGTH] = 0; - [DESTROYED] = false; - [SIGNAL]; - [ABORTED] = false; - [DATALISTENERS] = 0; - [DISCARDED] = false; - /** - * true if the stream can be written - */ - writable = true; - /** - * true if the stream can be read - */ - readable = true; - /** - * If `RType` is Buffer, then options do not need to be provided. - * Otherwise, an options object must be provided to specify either - * {@link Minipass.SharedOptions.objectMode} or - * {@link Minipass.SharedOptions.encoding}, as appropriate. - */ - constructor(...args) { - const options = (args[0] || - {}); - super(); - if (options.objectMode && typeof options.encoding === 'string') { - throw new TypeError('Encoding and objectMode may not be used together'); - } - if (isObjectModeOptions(options)) { - this[OBJECTMODE] = true; - this[ENCODING] = null; - } - else if (isEncodingOptions(options)) { - this[ENCODING] = options.encoding; - this[OBJECTMODE] = false; - } - else { - this[OBJECTMODE] = false; - this[ENCODING] = null; - } - this[ASYNC] = !!options.async; - this[DECODER] = this[ENCODING] - ? new StringDecoder(this[ENCODING]) - : null; - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); - } - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); - } - const { signal } = options; - if (signal) { - this[SIGNAL] = signal; - if (signal.aborted) { - this[ABORT](); - } - else { - signal.addEventListener('abort', () => this[ABORT]()); - } - } - } - /** - * The amount of data stored in the buffer waiting to be read. - * - * For Buffer strings, this will be the total byte length. - * For string encoding streams, this will be the string character length, - * according to JavaScript's `string.length` logic. - * For objectMode streams, this is a count of the items waiting to be - * emitted. - */ - get bufferLength() { - return this[BUFFERLENGTH]; - } - /** - * The `BufferEncoding` currently in use, or `null` - */ - get encoding() { - return this[ENCODING]; - } - /** - * @deprecated - This is a read only property - */ - set encoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * @deprecated - Encoding may only be set at instantiation time - */ - setEncoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * True if this is an objectMode stream - */ - get objectMode() { - return this[OBJECTMODE]; - } - /** - * @deprecated - This is a read-only property - */ - set objectMode(_om) { - throw new Error('objectMode must be set at instantiation time'); - } - /** - * true if this is an async stream - */ - get ['async']() { - return this[ASYNC]; - } - /** - * Set to true to make this stream async. - * - * Once set, it cannot be unset, as this would potentially cause incorrect - * behavior. Ie, a sync stream can be made async, but an async stream - * cannot be safely made sync. - */ - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a; - } - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true; - this.emit('abort', this[SIGNAL]?.reason); - this.destroy(this[SIGNAL]?.reason); - } - /** - * True if the stream has been aborted. - */ - get aborted() { - return this[ABORTED]; - } - /** - * No-op setter. Stream aborted status is set via the AbortSignal provided - * in the constructor options. - */ - set aborted(_) { } - write(chunk, encoding, cb) { - if (this[ABORTED]) - return false; - if (this[EOF]) - throw new Error('write after end'); - if (this[DESTROYED]) { - this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); - return true; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (!encoding) - encoding = 'utf8'; - const fn = this[ASYNC] ? defer : nodefer; - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything is only allowed if in object mode, so throw - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); - } - else if (isArrayBufferLike(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk); - } - else if (typeof chunk !== 'string') { - throw new Error('Non-contiguous data written to non-objectMode stream'); - } - } - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - // maybe impossible? - /* c8 ignore start */ - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - /* c8 ignore stop */ - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { - //@ts-ignore - sinful unsafe type change - chunk = Buffer.from(chunk, encoding); - } - if (Buffer.isBuffer(chunk) && this[ENCODING]) { - //@ts-ignore - sinful unsafe type change - chunk = this[DECODER].write(chunk); - } - // Note: flushing CAN potentially switch us into not-flowing mode - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - /** - * Low-level explicit read method. - * - * In objectMode, the argument is ignored, and one item is returned if - * available. - * - * `n` is the number of bytes (or in the case of encoding streams, - * characters) to consume. If `n` is not provided, then the entire buffer - * is returned, or `null` is returned if no data is available. - * - * If `n` is greater that the amount of data in the internal buffer, - * then `null` is returned. - */ - read(n) { - if (this[DESTROYED]) - return null; - this[DISCARDED] = false; - if (this[BUFFERLENGTH] === 0 || - n === 0 || - (n && n > this[BUFFERLENGTH])) { - this[MAYBE_EMIT_END](); - return null; - } - if (this[OBJECTMODE]) - n = null; - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - // not object mode, so if we have an encoding, then RType is string - // otherwise, must be Buffer - this[BUFFER] = [ - (this[ENCODING] - ? this[BUFFER].join('') - : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), - ]; - } - const ret = this[READ](n || null, this[BUFFER][0]); - this[MAYBE_EMIT_END](); - return ret; - } - [READ](n, chunk) { - if (this[OBJECTMODE]) - this[BUFFERSHIFT](); - else { - const c = chunk; - if (n === c.length || n === null) - this[BUFFERSHIFT](); - else if (typeof c === 'string') { - this[BUFFER][0] = c.slice(n); - chunk = c.slice(0, n); - this[BUFFERLENGTH] -= n; - } - else { - this[BUFFER][0] = c.subarray(n); - chunk = c.subarray(0, n); - this[BUFFERLENGTH] -= n; - } - } - this.emit('data', chunk); - if (!this[BUFFER].length && !this[EOF]) - this.emit('drain'); - return chunk; - } - end(chunk, encoding, cb) { - if (typeof chunk === 'function') { - cb = chunk; - chunk = undefined; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (chunk !== undefined) - this.write(chunk, encoding); - if (cb) - this.once('end', cb); - this[EOF] = true; - this.writable = false; - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this[FLOWING] || !this[PAUSED]) - this[MAYBE_EMIT_END](); - return this; - } - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) - return; - if (!this[DATALISTENERS] && !this[PIPES].length) { - this[DISCARDED] = true; - } - this[PAUSED] = false; - this[FLOWING] = true; - this.emit('resume'); - if (this[BUFFER].length) - this[FLUSH](); - else if (this[EOF]) - this[MAYBE_EMIT_END](); - else - this.emit('drain'); - } - /** - * Resume the stream if it is currently in a paused state - * - * If called when there are no pipe destinations or `data` event listeners, - * this will place the stream in a "discarded" state, where all data will - * be thrown away. The discarded state is removed if a pipe destination or - * data handler is added, if pause() is called, or if any synchronous or - * asynchronous iteration is started. - */ - resume() { - return this[RESUME](); - } - /** - * Pause the stream - */ - pause() { - this[FLOWING] = false; - this[PAUSED] = true; - this[DISCARDED] = false; - } - /** - * true if the stream has been forcibly destroyed - */ - get destroyed() { - return this[DESTROYED]; - } - /** - * true if the stream is currently in a flowing state, meaning that - * any writes will be immediately emitted. - */ - get flowing() { - return this[FLOWING]; - } - /** - * true if the stream is currently in a paused state - */ - get paused() { - return this[PAUSED]; - } - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1; - else - this[BUFFERLENGTH] += chunk.length; - this[BUFFER].push(chunk); - } - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1; - else - this[BUFFERLENGTH] -= this[BUFFER][0].length; - return this[BUFFER].shift(); - } - [FLUSH](noDrain = false) { - do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && - this[BUFFER].length); - if (!noDrain && !this[BUFFER].length && !this[EOF]) - this.emit('drain'); - } - [FLUSHCHUNK](chunk) { - this.emit('data', chunk); - return this[FLOWING]; - } - /** - * Pipe all data emitted by this stream into the destination provided. - * - * Triggers the flow of data. - */ - pipe(dest, opts) { - if (this[DESTROYED]) - return dest; - this[DISCARDED] = false; - const ended = this[EMITTED_END]; - opts = opts || {}; - if (dest === proc.stdout || dest === proc.stderr) - opts.end = false; - else - opts.end = opts.end !== false; - opts.proxyErrors = !!opts.proxyErrors; - // piping an ended stream ends immediately - if (ended) { - if (opts.end) - dest.end(); - } - else { - // "as" here just ignores the WType, which pipes don't care about, - // since they're only consuming from us, and writing to the dest - this[PIPES].push(!opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts)); - if (this[ASYNC]) - defer(() => this[RESUME]()); - else - this[RESUME](); - } - return dest; - } - /** - * Fully unhook a piped destination stream. - * - * If the destination stream was the only consumer of this stream (ie, - * there are no other piped destinations or `'data'` event listeners) - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest); - if (p) { - if (this[PIPES].length === 1) { - if (this[FLOWING] && this[DATALISTENERS] === 0) { - this[FLOWING] = false; - } - this[PIPES] = []; - } - else - this[PIPES].splice(this[PIPES].indexOf(p), 1); - p.unpipe(); - } - } - /** - * Alias for {@link Minipass#on} - */ - addListener(ev, handler) { - return this.on(ev, handler); - } - /** - * Mostly identical to `EventEmitter.on`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * - Adding a 'data' event handler will trigger the flow of data - * - * - Adding a 'readable' event handler when there is data waiting to be read - * will cause 'readable' to be emitted immediately. - * - * - Adding an 'endish' event handler ('end', 'finish', etc.) which has - * already passed will cause the event to be emitted immediately and all - * handlers removed. - * - * - Adding an 'error' event handler after an error has been emitted will - * cause the event to be re-emitted immediately with the error previously - * raised. - */ - on(ev, handler) { - const ret = super.on(ev, handler); - if (ev === 'data') { - this[DISCARDED] = false; - this[DATALISTENERS]++; - if (!this[PIPES].length && !this[FLOWING]) { - this[RESUME](); - } - } - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { - super.emit('readable'); - } - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev); - this.removeAllListeners(ev); - } - else if (ev === 'error' && this[EMITTED_ERROR]) { - const h = handler; - if (this[ASYNC]) - defer(() => h.call(this, this[EMITTED_ERROR])); - else - h.call(this, this[EMITTED_ERROR]); - } - return ret; - } - /** - * Alias for {@link Minipass#off} - */ - removeListener(ev, handler) { - return this.off(ev, handler); - } - /** - * Mostly identical to `EventEmitter.off` - * - * If a 'data' event handler is removed, and it was the last consumer - * (ie, there are no pipe destinations or other 'data' event listeners), - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - off(ev, handler) { - const ret = super.off(ev, handler); - // if we previously had listeners, and now we don't, and we don't - // have any pipes, then stop the flow, unless it's been explicitly - // put in a discarded flowing state via stream.resume(). - if (ev === 'data') { - this[DATALISTENERS] = this.listeners('data').length; - if (this[DATALISTENERS] === 0 && - !this[DISCARDED] && - !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * Mostly identical to `EventEmitter.removeAllListeners` - * - * If all 'data' event handlers are removed, and they were the last consumer - * (ie, there are no pipe destinations), then the flow of data will stop - * until there is another consumer or {@link Minipass#resume} is explicitly - * called. - */ - removeAllListeners(ev) { - const ret = super.removeAllListeners(ev); - if (ev === 'data' || ev === undefined) { - this[DATALISTENERS] = 0; - if (!this[DISCARDED] && !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * true if the 'end' event has been emitted - */ - get emittedEnd() { - return this[EMITTED_END]; - } - [MAYBE_EMIT_END]() { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF]) { - this[EMITTING_END] = true; - this.emit('end'); - this.emit('prefinish'); - this.emit('finish'); - if (this[CLOSED]) - this.emit('close'); - this[EMITTING_END] = false; - } - } - /** - * Mostly identical to `EventEmitter.emit`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * If the stream has been destroyed, and the event is something other - * than 'close' or 'error', then `false` is returned and no handlers - * are called. - * - * If the event is 'end', and has already been emitted, then the event - * is ignored. If the stream is in a paused or non-flowing state, then - * the event will be deferred until data flow resumes. If the stream is - * async, then handlers will be called on the next tick rather than - * immediately. - * - * If the event is 'close', and 'end' has not yet been emitted, then - * the event will be deferred until after 'end' is emitted. - * - * If the event is 'error', and an AbortSignal was provided for the stream, - * and there are no listeners, then the event is ignored, matching the - * behavior of node core streams in the presense of an AbortSignal. - * - * If the event is 'finish' or 'prefinish', then all listeners will be - * removed after emitting the event, to prevent double-firing. - */ - emit(ev, ...args) { - const data = args[0]; - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && - ev !== 'close' && - ev !== DESTROYED && - this[DESTROYED]) { - return false; - } - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? (defer(() => this[EMITDATA](data)), true) - : this[EMITDATA](data); - } - else if (ev === 'end') { - return this[EMITEND](); - } - else if (ev === 'close') { - this[CLOSED] = true; - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return false; - const ret = super.emit('close'); - this.removeAllListeners('close'); - return ret; - } - else if (ev === 'error') { - this[EMITTED_ERROR] = data; - super.emit(ERROR, data); - const ret = !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false; - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'resume') { - const ret = super.emit('resume'); - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev); - this.removeAllListeners(ev); - return ret; - } - // Some other unknown event - const ret = super.emit(ev, ...args); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) - this.pause(); - } - const ret = this[DISCARDED] ? false : super.emit('data', data); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITEND]() { - if (this[EMITTED_END]) - return false; - this[EMITTED_END] = true; - this.readable = false; - return this[ASYNC] - ? (defer(() => this[EMITEND2]()), true) - : this[EMITEND2](); - } - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end(); - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data); - } - if (!this[DISCARDED]) - super.emit('data', data); - } - } - for (const p of this[PIPES]) { - p.end(); - } - const ret = super.emit('end'); - this.removeAllListeners('end'); - return ret; - } - /** - * Return a Promise that resolves to an array of all emitted data once - * the stream ends. - */ - async collect() { - const buf = Object.assign([], { - dataLength: 0, - }); - if (!this[OBJECTMODE]) - buf.dataLength = 0; - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise(); - this.on('data', c => { - buf.push(c); - if (!this[OBJECTMODE]) - buf.dataLength += c.length; - }); - await p; - return buf; - } - /** - * Return a Promise that resolves to the concatenation of all emitted data - * once the stream ends. - * - * Not allowed on objectMode streams. - */ - async concat() { - if (this[OBJECTMODE]) { - throw new Error('cannot concat in objectMode'); - } - const buf = await this.collect(); - return (this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength)); - } - /** - * Return a void Promise that resolves once the stream ends. - */ - async promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))); - this.on('error', er => reject(er)); - this.on('end', () => resolve()); - }); - } - /** - * Asynchronous `for await of` iteration. - * - * This will continue emitting all chunks until the stream terminates. - */ - [Symbol.asyncIterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = async () => { - this.pause(); - stopped = true; - return { value: undefined, done: true }; - }; - const next = () => { - if (stopped) - return stop(); - const res = this.read(); - if (res !== null) - return Promise.resolve({ done: false, value: res }); - if (this[EOF]) - return stop(); - let resolve; - let reject; - const onerr = (er) => { - this.off('data', ondata); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - stop(); - reject(er); - }; - const ondata = (value) => { - this.off('error', onerr); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - this.pause(); - resolve({ value, done: !!this[EOF] }); - }; - const onend = () => { - this.off('error', onerr); - this.off('data', ondata); - this.off(DESTROYED, ondestroy); - stop(); - resolve({ done: true, value: undefined }); - }; - const ondestroy = () => onerr(new Error('stream destroyed')); - return new Promise((res, rej) => { - reject = rej; - resolve = res; - this.once(DESTROYED, ondestroy); - this.once('error', onerr); - this.once('end', onend); - this.once('data', ondata); - }); - }; - return { - next, - throw: stop, - return: stop, - [Symbol.asyncIterator]() { - return this; - }, - }; - } - /** - * Synchronous `for of` iteration. - * - * The iteration will terminate when the internal buffer runs out, even - * if the stream has not yet terminated. - */ - [Symbol.iterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = () => { - this.pause(); - this.off(ERROR, stop); - this.off(DESTROYED, stop); - this.off('end', stop); - stopped = true; - return { done: true, value: undefined }; - }; - const next = () => { - if (stopped) - return stop(); - const value = this.read(); - return value === null ? stop() : { done: false, value }; - }; - this.once('end', stop); - this.once(ERROR, stop); - this.once(DESTROYED, stop); - return { - next, - throw: stop, - return: stop, - [Symbol.iterator]() { - return this; - }, - }; - } - /** - * Destroy a stream, preventing it from being used for any further purpose. - * - * If the stream has a `close()` method, then it will be called on - * destruction. - * - * After destruction, any attempt to write data, read data, or emit most - * events will be ignored. - * - * If an error argument is provided, then it will be emitted in an - * 'error' event. - */ - destroy(er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er); - else - this.emit(DESTROYED); - return this; - } - this[DESTROYED] = true; - this[DISCARDED] = true; - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0; - this[BUFFERLENGTH] = 0; - const wc = this; - if (typeof wc.close === 'function' && !this[CLOSED]) - wc.close(); - if (er) - this.emit('error', er); - // if no error to emit, still reject pending promises - else - this.emit(DESTROYED); - return this; - } - /** - * Alias for {@link isStream} - * - * Former export location, maintained for backwards compatibility. - * - * @deprecated - */ - static get isStream() { - return isStream; - } -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/make-fetch-happen/node_modules/minipass/dist/mjs/package.json b/node_modules/make-fetch-happen/node_modules/minipass/dist/mjs/package.json deleted file mode 100644 index 3dbc1ca591c05..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/minipass/dist/mjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "module" -} diff --git a/node_modules/make-fetch-happen/node_modules/minipass/package.json b/node_modules/make-fetch-happen/node_modules/minipass/package.json deleted file mode 100644 index 6faaa247a5bc6..0000000000000 --- a/node_modules/make-fetch-happen/node_modules/minipass/package.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "name": "minipass", - "version": "7.0.3", - "description": "minimal implementation of a PassThrough stream", - "main": "./dist/cjs/index.js", - "module": "./dist/mjs/index.js", - "types": "./dist/cjs/index.js", - "exports": { - ".": { - "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.js" - }, - "require": { - "types": "./dist/cjs/index.d.ts", - "default": "./dist/cjs/index.js" - } - }, - "./package.json": "./package.json" - }, - "files": [ - "dist" - ], - "scripts": { - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "preprepare": "rm -rf dist", - "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh", - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "test": "c8 tap", - "snap": "c8 tap", - "format": "prettier --write . --loglevel warn", - "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" - }, - "tap": { - "coverage": false, - "node-arg": [ - "--enable-source-maps", - "--no-warnings", - "--loader", - "ts-node/esm" - ], - "ts": false - }, - "prettier": { - "semi": false, - "printWidth": 75, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - }, - "devDependencies": { - "@types/node": "^20.1.2", - "@types/tap": "^15.0.8", - "c8": "^7.13.0", - "prettier": "^2.6.2", - "tap": "^16.3.0", - "ts-node": "^10.9.1", - "typedoc": "^0.24.8", - "typescript": "^5.1.3", - "end-of-stream": "^1.4.0", - "node-abort-controller": "^3.1.1", - "sync-content": "^1.0.2", - "through2": "^2.0.3" - }, - "repository": "https://github.com/isaacs/minipass", - "keywords": [ - "passthrough", - "stream" - ], - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } -} diff --git a/node_modules/fs-minipass/node_modules/minipass/LICENSE b/node_modules/minipass-fetch/node_modules/minipass/LICENSE similarity index 100% rename from node_modules/fs-minipass/node_modules/minipass/LICENSE rename to node_modules/minipass-fetch/node_modules/minipass/LICENSE diff --git a/node_modules/minipass-fetch/node_modules/minipass/index.js b/node_modules/minipass-fetch/node_modules/minipass/index.js new file mode 100644 index 0000000000000..ed07c17acd97b --- /dev/null +++ b/node_modules/minipass-fetch/node_modules/minipass/index.js @@ -0,0 +1,702 @@ +'use strict' +const proc = + typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + } +const EE = require('events') +const Stream = require('stream') +const stringdecoder = require('string_decoder') +const SD = stringdecoder.StringDecoder + +const EOF = Symbol('EOF') +const MAYBE_EMIT_END = Symbol('maybeEmitEnd') +const EMITTED_END = Symbol('emittedEnd') +const EMITTING_END = Symbol('emittingEnd') +const EMITTED_ERROR = Symbol('emittedError') +const CLOSED = Symbol('closed') +const READ = Symbol('read') +const FLUSH = Symbol('flush') +const FLUSHCHUNK = Symbol('flushChunk') +const ENCODING = Symbol('encoding') +const DECODER = Symbol('decoder') +const FLOWING = Symbol('flowing') +const PAUSED = Symbol('paused') +const RESUME = Symbol('resume') +const BUFFER = Symbol('buffer') +const PIPES = Symbol('pipes') +const BUFFERLENGTH = Symbol('bufferLength') +const BUFFERPUSH = Symbol('bufferPush') +const BUFFERSHIFT = Symbol('bufferShift') +const OBJECTMODE = Symbol('objectMode') +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed') +// internal event when stream has an error +const ERROR = Symbol('error') +const EMITDATA = Symbol('emitData') +const EMITEND = Symbol('emitEnd') +const EMITEND2 = Symbol('emitEnd2') +const ASYNC = Symbol('async') +const ABORT = Symbol('abort') +const ABORTED = Symbol('aborted') +const SIGNAL = Symbol('signal') + +const defer = fn => Promise.resolve().then(fn) + +// TODO remove when Node v8 support drops +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' +const ASYNCITERATOR = + (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') +const ITERATOR = + (doIter && Symbol.iterator) || Symbol('iterator not implemented') + +// events that mean 'the stream is over' +// these are treated specially, and re-emitted +// if they are listened for after emitting. +const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' + +const isArrayBuffer = b => + b instanceof ArrayBuffer || + (typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0) + +const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) + +class Pipe { + constructor(src, dest, opts) { + this.src = src + this.dest = dest + this.opts = opts + this.ondrain = () => src[RESUME]() + dest.on('drain', this.ondrain) + } + unpipe() { + this.dest.removeListener('drain', this.ondrain) + } + // istanbul ignore next - only here for the prototype + proxyErrors() {} + end() { + this.unpipe() + if (this.opts.end) this.dest.end() + } +} + +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors) + super.unpipe() + } + constructor(src, dest, opts) { + super(src, dest, opts) + this.proxyErrors = er => dest.emit('error', er) + src.on('error', this.proxyErrors) + } +} + +class Minipass extends Stream { + constructor(options) { + super() + this[FLOWING] = false + // whether we're explicitly paused + this[PAUSED] = false + this[PIPES] = [] + this[BUFFER] = [] + this[OBJECTMODE] = (options && options.objectMode) || false + if (this[OBJECTMODE]) this[ENCODING] = null + else this[ENCODING] = (options && options.encoding) || null + if (this[ENCODING] === 'buffer') this[ENCODING] = null + this[ASYNC] = (options && !!options.async) || false + this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null + this[EOF] = false + this[EMITTED_END] = false + this[EMITTING_END] = false + this[CLOSED] = false + this[EMITTED_ERROR] = null + this.writable = true + this.readable = true + this[BUFFERLENGTH] = 0 + this[DESTROYED] = false + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) + } + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) + } + this[SIGNAL] = options && options.signal + this[ABORTED] = false + if (this[SIGNAL]) { + this[SIGNAL].addEventListener('abort', () => this[ABORT]()) + if (this[SIGNAL].aborted) { + this[ABORT]() + } + } + } + + get bufferLength() { + return this[BUFFERLENGTH] + } + + get encoding() { + return this[ENCODING] + } + set encoding(enc) { + if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') + + if ( + this[ENCODING] && + enc !== this[ENCODING] && + ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) + ) + throw new Error('cannot change encoding') + + if (this[ENCODING] !== enc) { + this[DECODER] = enc ? new SD(enc) : null + if (this[BUFFER].length) + this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) + } + + this[ENCODING] = enc + } + + setEncoding(enc) { + this.encoding = enc + } + + get objectMode() { + return this[OBJECTMODE] + } + set objectMode(om) { + this[OBJECTMODE] = this[OBJECTMODE] || !!om + } + + get ['async']() { + return this[ASYNC] + } + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a + } + + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true + this.emit('abort', this[SIGNAL].reason) + this.destroy(this[SIGNAL].reason) + } + + get aborted() { + return this[ABORTED] + } + set aborted(_) {} + + write(chunk, encoding, cb) { + if (this[ABORTED]) return false + if (this[EOF]) throw new Error('write after end') + + if (this[DESTROYED]) { + this.emit( + 'error', + Object.assign( + new Error('Cannot call write after a stream was destroyed'), + { code: 'ERR_STREAM_DESTROYED' } + ) + ) + return true + } + + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + + if (!encoding) encoding = 'utf8' + + const fn = this[ASYNC] ? defer : f => f() + + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything else switches us into object mode + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) + else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) + else if (typeof chunk !== 'string') + // use the setter so we throw if we have encoding set + this.objectMode = true + } + + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + /* istanbul ignore if - maybe impossible? */ + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + if (cb) fn(cb) + return this.flowing + } + + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if ( + typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER].lastNeed) + ) { + chunk = Buffer.from(chunk, encoding) + } + + if (Buffer.isBuffer(chunk) && this[ENCODING]) + chunk = this[DECODER].write(chunk) + + // Note: flushing CAN potentially switch us into not-flowing mode + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + read(n) { + if (this[DESTROYED]) return null + + if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { + this[MAYBE_EMIT_END]() + return null + } + + if (this[OBJECTMODE]) n = null + + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] + else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] + } + + const ret = this[READ](n || null, this[BUFFER][0]) + this[MAYBE_EMIT_END]() + return ret + } + + [READ](n, chunk) { + if (n === chunk.length || n === null) this[BUFFERSHIFT]() + else { + this[BUFFER][0] = chunk.slice(n) + chunk = chunk.slice(0, n) + this[BUFFERLENGTH] -= n + } + + this.emit('data', chunk) + + if (!this[BUFFER].length && !this[EOF]) this.emit('drain') + + return chunk + } + + end(chunk, encoding, cb) { + if (typeof chunk === 'function') (cb = chunk), (chunk = null) + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + if (chunk) this.write(chunk, encoding) + if (cb) this.once('end', cb) + this[EOF] = true + this.writable = false + + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() + return this + } + + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) return + + this[PAUSED] = false + this[FLOWING] = true + this.emit('resume') + if (this[BUFFER].length) this[FLUSH]() + else if (this[EOF]) this[MAYBE_EMIT_END]() + else this.emit('drain') + } + + resume() { + return this[RESUME]() + } + + pause() { + this[FLOWING] = false + this[PAUSED] = true + } + + get destroyed() { + return this[DESTROYED] + } + + get flowing() { + return this[FLOWING] + } + + get paused() { + return this[PAUSED] + } + + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 + else this[BUFFERLENGTH] += chunk.length + this[BUFFER].push(chunk) + } + + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 + else this[BUFFERLENGTH] -= this[BUFFER][0].length + return this[BUFFER].shift() + } + + [FLUSH](noDrain) { + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) + + if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') + } + + [FLUSHCHUNK](chunk) { + this.emit('data', chunk) + return this.flowing + } + + pipe(dest, opts) { + if (this[DESTROYED]) return + + const ended = this[EMITTED_END] + opts = opts || {} + if (dest === proc.stdout || dest === proc.stderr) opts.end = false + else opts.end = opts.end !== false + opts.proxyErrors = !!opts.proxyErrors + + // piping an ended stream ends immediately + if (ended) { + if (opts.end) dest.end() + } else { + this[PIPES].push( + !opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts) + ) + if (this[ASYNC]) defer(() => this[RESUME]()) + else this[RESUME]() + } + + return dest + } + + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest) + if (p) { + this[PIPES].splice(this[PIPES].indexOf(p), 1) + p.unpipe() + } + } + + addListener(ev, fn) { + return this.on(ev, fn) + } + + on(ev, fn) { + const ret = super.on(ev, fn) + if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) + super.emit('readable') + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev) + this.removeAllListeners(ev) + } else if (ev === 'error' && this[EMITTED_ERROR]) { + if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) + else fn.call(this, this[EMITTED_ERROR]) + } + return ret + } + + get emittedEnd() { + return this[EMITTED_END] + } + + [MAYBE_EMIT_END]() { + if ( + !this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF] + ) { + this[EMITTING_END] = true + this.emit('end') + this.emit('prefinish') + this.emit('finish') + if (this[CLOSED]) this.emit('close') + this[EMITTING_END] = false + } + } + + emit(ev, data, ...extra) { + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) + return + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? defer(() => this[EMITDATA](data)) + : this[EMITDATA](data) + } else if (ev === 'end') { + return this[EMITEND]() + } else if (ev === 'close') { + this[CLOSED] = true + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) return + const ret = super.emit('close') + this.removeAllListeners('close') + return ret + } else if (ev === 'error') { + this[EMITTED_ERROR] = data + super.emit(ERROR, data) + const ret = + !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'resume') { + const ret = super.emit('resume') + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev) + this.removeAllListeners(ev) + return ret + } + + // Some other unknown event + const ret = super.emit(ev, data, ...extra) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) this.pause() + } + const ret = super.emit('data', data) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITEND]() { + if (this[EMITTED_END]) return + + this[EMITTED_END] = true + this.readable = false + if (this[ASYNC]) defer(() => this[EMITEND2]()) + else this[EMITEND2]() + } + + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end() + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data) + } + super.emit('data', data) + } + } + + for (const p of this[PIPES]) { + p.end() + } + const ret = super.emit('end') + this.removeAllListeners('end') + return ret + } + + // const all = await stream.collect() + collect() { + const buf = [] + if (!this[OBJECTMODE]) buf.dataLength = 0 + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise() + this.on('data', c => { + buf.push(c) + if (!this[OBJECTMODE]) buf.dataLength += c.length + }) + return p.then(() => buf) + } + + // const data = await stream.concat() + concat() { + return this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this.collect().then(buf => + this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength) + ) + } + + // stream.promise().then(() => done, er => emitted error) + promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))) + this.on('error', er => reject(er)) + this.on('end', () => resolve()) + }) + } + + // for await (let chunk of stream) + [ASYNCITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + stopped = true + return Promise.resolve({ done: true }) + } + const next = () => { + if (stopped) return stop() + const res = this.read() + if (res !== null) return Promise.resolve({ done: false, value: res }) + + if (this[EOF]) return stop() + + let resolve = null + let reject = null + const onerr = er => { + this.removeListener('data', ondata) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + stop() + reject(er) + } + const ondata = value => { + this.removeListener('error', onerr) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + this.pause() + resolve({ value: value, done: !!this[EOF] }) + } + const onend = () => { + this.removeListener('error', onerr) + this.removeListener('data', ondata) + this.removeListener(DESTROYED, ondestroy) + stop() + resolve({ done: true }) + } + const ondestroy = () => onerr(new Error('stream destroyed')) + return new Promise((res, rej) => { + reject = rej + resolve = res + this.once(DESTROYED, ondestroy) + this.once('error', onerr) + this.once('end', onend) + this.once('data', ondata) + }) + } + + return { + next, + throw: stop, + return: stop, + [ASYNCITERATOR]() { + return this + }, + } + } + + // for (let chunk of stream) + [ITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + this.removeListener(ERROR, stop) + this.removeListener(DESTROYED, stop) + this.removeListener('end', stop) + stopped = true + return { done: true } + } + + const next = () => { + if (stopped) return stop() + const value = this.read() + return value === null ? stop() : { value } + } + this.once('end', stop) + this.once(ERROR, stop) + this.once(DESTROYED, stop) + + return { + next, + throw: stop, + return: stop, + [ITERATOR]() { + return this + }, + } + } + + destroy(er) { + if (this[DESTROYED]) { + if (er) this.emit('error', er) + else this.emit(DESTROYED) + return this + } + + this[DESTROYED] = true + + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0 + this[BUFFERLENGTH] = 0 + + if (typeof this.close === 'function' && !this[CLOSED]) this.close() + + if (er) this.emit('error', er) + // if no error to emit, still reject pending promises + else this.emit(DESTROYED) + + return this + } + + static isStream(s) { + return ( + !!s && + (s instanceof Minipass || + s instanceof Stream || + (s instanceof EE && + // readable + (typeof s.pipe === 'function' || + // writable + (typeof s.write === 'function' && typeof s.end === 'function')))) + ) + } +} + +exports.Minipass = Minipass diff --git a/node_modules/minipass-fetch/node_modules/minipass/index.mjs b/node_modules/minipass-fetch/node_modules/minipass/index.mjs new file mode 100644 index 0000000000000..6ef6cd8cf0703 --- /dev/null +++ b/node_modules/minipass-fetch/node_modules/minipass/index.mjs @@ -0,0 +1,702 @@ +'use strict' +const proc = + typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + } +import EE from 'events' +import Stream from 'stream' +import stringdecoder from 'string_decoder' +const SD = stringdecoder.StringDecoder + +const EOF = Symbol('EOF') +const MAYBE_EMIT_END = Symbol('maybeEmitEnd') +const EMITTED_END = Symbol('emittedEnd') +const EMITTING_END = Symbol('emittingEnd') +const EMITTED_ERROR = Symbol('emittedError') +const CLOSED = Symbol('closed') +const READ = Symbol('read') +const FLUSH = Symbol('flush') +const FLUSHCHUNK = Symbol('flushChunk') +const ENCODING = Symbol('encoding') +const DECODER = Symbol('decoder') +const FLOWING = Symbol('flowing') +const PAUSED = Symbol('paused') +const RESUME = Symbol('resume') +const BUFFER = Symbol('buffer') +const PIPES = Symbol('pipes') +const BUFFERLENGTH = Symbol('bufferLength') +const BUFFERPUSH = Symbol('bufferPush') +const BUFFERSHIFT = Symbol('bufferShift') +const OBJECTMODE = Symbol('objectMode') +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed') +// internal event when stream has an error +const ERROR = Symbol('error') +const EMITDATA = Symbol('emitData') +const EMITEND = Symbol('emitEnd') +const EMITEND2 = Symbol('emitEnd2') +const ASYNC = Symbol('async') +const ABORT = Symbol('abort') +const ABORTED = Symbol('aborted') +const SIGNAL = Symbol('signal') + +const defer = fn => Promise.resolve().then(fn) + +// TODO remove when Node v8 support drops +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' +const ASYNCITERATOR = + (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') +const ITERATOR = + (doIter && Symbol.iterator) || Symbol('iterator not implemented') + +// events that mean 'the stream is over' +// these are treated specially, and re-emitted +// if they are listened for after emitting. +const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' + +const isArrayBuffer = b => + b instanceof ArrayBuffer || + (typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0) + +const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) + +class Pipe { + constructor(src, dest, opts) { + this.src = src + this.dest = dest + this.opts = opts + this.ondrain = () => src[RESUME]() + dest.on('drain', this.ondrain) + } + unpipe() { + this.dest.removeListener('drain', this.ondrain) + } + // istanbul ignore next - only here for the prototype + proxyErrors() {} + end() { + this.unpipe() + if (this.opts.end) this.dest.end() + } +} + +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors) + super.unpipe() + } + constructor(src, dest, opts) { + super(src, dest, opts) + this.proxyErrors = er => dest.emit('error', er) + src.on('error', this.proxyErrors) + } +} + +export class Minipass extends Stream { + constructor(options) { + super() + this[FLOWING] = false + // whether we're explicitly paused + this[PAUSED] = false + this[PIPES] = [] + this[BUFFER] = [] + this[OBJECTMODE] = (options && options.objectMode) || false + if (this[OBJECTMODE]) this[ENCODING] = null + else this[ENCODING] = (options && options.encoding) || null + if (this[ENCODING] === 'buffer') this[ENCODING] = null + this[ASYNC] = (options && !!options.async) || false + this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null + this[EOF] = false + this[EMITTED_END] = false + this[EMITTING_END] = false + this[CLOSED] = false + this[EMITTED_ERROR] = null + this.writable = true + this.readable = true + this[BUFFERLENGTH] = 0 + this[DESTROYED] = false + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) + } + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) + } + this[SIGNAL] = options && options.signal + this[ABORTED] = false + if (this[SIGNAL]) { + this[SIGNAL].addEventListener('abort', () => this[ABORT]()) + if (this[SIGNAL].aborted) { + this[ABORT]() + } + } + } + + get bufferLength() { + return this[BUFFERLENGTH] + } + + get encoding() { + return this[ENCODING] + } + set encoding(enc) { + if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') + + if ( + this[ENCODING] && + enc !== this[ENCODING] && + ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) + ) + throw new Error('cannot change encoding') + + if (this[ENCODING] !== enc) { + this[DECODER] = enc ? new SD(enc) : null + if (this[BUFFER].length) + this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) + } + + this[ENCODING] = enc + } + + setEncoding(enc) { + this.encoding = enc + } + + get objectMode() { + return this[OBJECTMODE] + } + set objectMode(om) { + this[OBJECTMODE] = this[OBJECTMODE] || !!om + } + + get ['async']() { + return this[ASYNC] + } + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a + } + + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true + this.emit('abort', this[SIGNAL].reason) + this.destroy(this[SIGNAL].reason) + } + + get aborted() { + return this[ABORTED] + } + set aborted(_) {} + + write(chunk, encoding, cb) { + if (this[ABORTED]) return false + if (this[EOF]) throw new Error('write after end') + + if (this[DESTROYED]) { + this.emit( + 'error', + Object.assign( + new Error('Cannot call write after a stream was destroyed'), + { code: 'ERR_STREAM_DESTROYED' } + ) + ) + return true + } + + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + + if (!encoding) encoding = 'utf8' + + const fn = this[ASYNC] ? defer : f => f() + + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything else switches us into object mode + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) + else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) + else if (typeof chunk !== 'string') + // use the setter so we throw if we have encoding set + this.objectMode = true + } + + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + /* istanbul ignore if - maybe impossible? */ + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + if (cb) fn(cb) + return this.flowing + } + + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if ( + typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER].lastNeed) + ) { + chunk = Buffer.from(chunk, encoding) + } + + if (Buffer.isBuffer(chunk) && this[ENCODING]) + chunk = this[DECODER].write(chunk) + + // Note: flushing CAN potentially switch us into not-flowing mode + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + read(n) { + if (this[DESTROYED]) return null + + if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { + this[MAYBE_EMIT_END]() + return null + } + + if (this[OBJECTMODE]) n = null + + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] + else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] + } + + const ret = this[READ](n || null, this[BUFFER][0]) + this[MAYBE_EMIT_END]() + return ret + } + + [READ](n, chunk) { + if (n === chunk.length || n === null) this[BUFFERSHIFT]() + else { + this[BUFFER][0] = chunk.slice(n) + chunk = chunk.slice(0, n) + this[BUFFERLENGTH] -= n + } + + this.emit('data', chunk) + + if (!this[BUFFER].length && !this[EOF]) this.emit('drain') + + return chunk + } + + end(chunk, encoding, cb) { + if (typeof chunk === 'function') (cb = chunk), (chunk = null) + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + if (chunk) this.write(chunk, encoding) + if (cb) this.once('end', cb) + this[EOF] = true + this.writable = false + + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() + return this + } + + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) return + + this[PAUSED] = false + this[FLOWING] = true + this.emit('resume') + if (this[BUFFER].length) this[FLUSH]() + else if (this[EOF]) this[MAYBE_EMIT_END]() + else this.emit('drain') + } + + resume() { + return this[RESUME]() + } + + pause() { + this[FLOWING] = false + this[PAUSED] = true + } + + get destroyed() { + return this[DESTROYED] + } + + get flowing() { + return this[FLOWING] + } + + get paused() { + return this[PAUSED] + } + + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 + else this[BUFFERLENGTH] += chunk.length + this[BUFFER].push(chunk) + } + + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 + else this[BUFFERLENGTH] -= this[BUFFER][0].length + return this[BUFFER].shift() + } + + [FLUSH](noDrain) { + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) + + if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') + } + + [FLUSHCHUNK](chunk) { + this.emit('data', chunk) + return this.flowing + } + + pipe(dest, opts) { + if (this[DESTROYED]) return + + const ended = this[EMITTED_END] + opts = opts || {} + if (dest === proc.stdout || dest === proc.stderr) opts.end = false + else opts.end = opts.end !== false + opts.proxyErrors = !!opts.proxyErrors + + // piping an ended stream ends immediately + if (ended) { + if (opts.end) dest.end() + } else { + this[PIPES].push( + !opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts) + ) + if (this[ASYNC]) defer(() => this[RESUME]()) + else this[RESUME]() + } + + return dest + } + + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest) + if (p) { + this[PIPES].splice(this[PIPES].indexOf(p), 1) + p.unpipe() + } + } + + addListener(ev, fn) { + return this.on(ev, fn) + } + + on(ev, fn) { + const ret = super.on(ev, fn) + if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) + super.emit('readable') + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev) + this.removeAllListeners(ev) + } else if (ev === 'error' && this[EMITTED_ERROR]) { + if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) + else fn.call(this, this[EMITTED_ERROR]) + } + return ret + } + + get emittedEnd() { + return this[EMITTED_END] + } + + [MAYBE_EMIT_END]() { + if ( + !this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF] + ) { + this[EMITTING_END] = true + this.emit('end') + this.emit('prefinish') + this.emit('finish') + if (this[CLOSED]) this.emit('close') + this[EMITTING_END] = false + } + } + + emit(ev, data, ...extra) { + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) + return + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? defer(() => this[EMITDATA](data)) + : this[EMITDATA](data) + } else if (ev === 'end') { + return this[EMITEND]() + } else if (ev === 'close') { + this[CLOSED] = true + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) return + const ret = super.emit('close') + this.removeAllListeners('close') + return ret + } else if (ev === 'error') { + this[EMITTED_ERROR] = data + super.emit(ERROR, data) + const ret = + !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'resume') { + const ret = super.emit('resume') + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev) + this.removeAllListeners(ev) + return ret + } + + // Some other unknown event + const ret = super.emit(ev, data, ...extra) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) this.pause() + } + const ret = super.emit('data', data) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITEND]() { + if (this[EMITTED_END]) return + + this[EMITTED_END] = true + this.readable = false + if (this[ASYNC]) defer(() => this[EMITEND2]()) + else this[EMITEND2]() + } + + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end() + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data) + } + super.emit('data', data) + } + } + + for (const p of this[PIPES]) { + p.end() + } + const ret = super.emit('end') + this.removeAllListeners('end') + return ret + } + + // const all = await stream.collect() + collect() { + const buf = [] + if (!this[OBJECTMODE]) buf.dataLength = 0 + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise() + this.on('data', c => { + buf.push(c) + if (!this[OBJECTMODE]) buf.dataLength += c.length + }) + return p.then(() => buf) + } + + // const data = await stream.concat() + concat() { + return this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this.collect().then(buf => + this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength) + ) + } + + // stream.promise().then(() => done, er => emitted error) + promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))) + this.on('error', er => reject(er)) + this.on('end', () => resolve()) + }) + } + + // for await (let chunk of stream) + [ASYNCITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + stopped = true + return Promise.resolve({ done: true }) + } + const next = () => { + if (stopped) return stop() + const res = this.read() + if (res !== null) return Promise.resolve({ done: false, value: res }) + + if (this[EOF]) return stop() + + let resolve = null + let reject = null + const onerr = er => { + this.removeListener('data', ondata) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + stop() + reject(er) + } + const ondata = value => { + this.removeListener('error', onerr) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + this.pause() + resolve({ value: value, done: !!this[EOF] }) + } + const onend = () => { + this.removeListener('error', onerr) + this.removeListener('data', ondata) + this.removeListener(DESTROYED, ondestroy) + stop() + resolve({ done: true }) + } + const ondestroy = () => onerr(new Error('stream destroyed')) + return new Promise((res, rej) => { + reject = rej + resolve = res + this.once(DESTROYED, ondestroy) + this.once('error', onerr) + this.once('end', onend) + this.once('data', ondata) + }) + } + + return { + next, + throw: stop, + return: stop, + [ASYNCITERATOR]() { + return this + }, + } + } + + // for (let chunk of stream) + [ITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + this.removeListener(ERROR, stop) + this.removeListener(DESTROYED, stop) + this.removeListener('end', stop) + stopped = true + return { done: true } + } + + const next = () => { + if (stopped) return stop() + const value = this.read() + return value === null ? stop() : { value } + } + this.once('end', stop) + this.once(ERROR, stop) + this.once(DESTROYED, stop) + + return { + next, + throw: stop, + return: stop, + [ITERATOR]() { + return this + }, + } + } + + destroy(er) { + if (this[DESTROYED]) { + if (er) this.emit('error', er) + else this.emit(DESTROYED) + return this + } + + this[DESTROYED] = true + + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0 + this[BUFFERLENGTH] = 0 + + if (typeof this.close === 'function' && !this[CLOSED]) this.close() + + if (er) this.emit('error', er) + // if no error to emit, still reject pending promises + else this.emit(DESTROYED) + + return this + } + + static isStream(s) { + return ( + !!s && + (s instanceof Minipass || + s instanceof Stream || + (s instanceof EE && + // readable + (typeof s.pipe === 'function' || + // writable + (typeof s.write === 'function' && typeof s.end === 'function')))) + ) + } +} + + diff --git a/node_modules/minipass-fetch/node_modules/minipass/package.json b/node_modules/minipass-fetch/node_modules/minipass/package.json new file mode 100644 index 0000000000000..0e20e988047f2 --- /dev/null +++ b/node_modules/minipass-fetch/node_modules/minipass/package.json @@ -0,0 +1,76 @@ +{ + "name": "minipass", + "version": "5.0.0", + "description": "minimal implementation of a PassThrough stream", + "main": "./index.js", + "module": "./index.mjs", + "types": "./index.d.ts", + "exports": { + ".": { + "import": { + "types": "./index.d.ts", + "default": "./index.mjs" + }, + "require": { + "types": "./index.d.ts", + "default": "./index.js" + } + }, + "./package.json": "./package.json" + }, + "devDependencies": { + "@types/node": "^17.0.41", + "end-of-stream": "^1.4.0", + "node-abort-controller": "^3.1.1", + "prettier": "^2.6.2", + "tap": "^16.2.0", + "through2": "^2.0.3", + "ts-node": "^10.8.1", + "typedoc": "^0.23.24", + "typescript": "^4.7.3" + }, + "scripts": { + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "prepare": "node ./scripts/transpile-to-esm.js", + "snap": "tap", + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags", + "typedoc": "typedoc ./index.d.ts", + "format": "prettier --write . --loglevel warn" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minipass.git" + }, + "keywords": [ + "passthrough", + "stream" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "files": [ + "index.d.ts", + "index.js", + "index.mjs" + ], + "tap": { + "check-coverage": true + }, + "engines": { + "node": ">=8" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/node_modules/cacache/node_modules/minipass/dist/cjs/index.js b/node_modules/minipass/dist/cjs/index.js similarity index 100% rename from node_modules/cacache/node_modules/minipass/dist/cjs/index.js rename to node_modules/minipass/dist/cjs/index.js diff --git a/node_modules/cacache/node_modules/minipass/dist/cjs/package.json b/node_modules/minipass/dist/cjs/package.json similarity index 100% rename from node_modules/cacache/node_modules/minipass/dist/cjs/package.json rename to node_modules/minipass/dist/cjs/package.json diff --git a/node_modules/cacache/node_modules/minipass/dist/mjs/index.js b/node_modules/minipass/dist/mjs/index.js similarity index 100% rename from node_modules/cacache/node_modules/minipass/dist/mjs/index.js rename to node_modules/minipass/dist/mjs/index.js diff --git a/node_modules/cacache/node_modules/minipass/dist/mjs/package.json b/node_modules/minipass/dist/mjs/package.json similarity index 100% rename from node_modules/cacache/node_modules/minipass/dist/mjs/package.json rename to node_modules/minipass/dist/mjs/package.json diff --git a/node_modules/minipass/package.json b/node_modules/minipass/package.json index 0e20e988047f2..6faaa247a5bc6 100644 --- a/node_modules/minipass/package.json +++ b/node_modules/minipass/package.json @@ -1,70 +1,52 @@ { "name": "minipass", - "version": "5.0.0", + "version": "7.0.3", "description": "minimal implementation of a PassThrough stream", - "main": "./index.js", - "module": "./index.mjs", - "types": "./index.d.ts", + "main": "./dist/cjs/index.js", + "module": "./dist/mjs/index.js", + "types": "./dist/cjs/index.js", "exports": { ".": { "import": { - "types": "./index.d.ts", - "default": "./index.mjs" + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" }, "require": { - "types": "./index.d.ts", - "default": "./index.js" + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" } }, "./package.json": "./package.json" }, - "devDependencies": { - "@types/node": "^17.0.41", - "end-of-stream": "^1.4.0", - "node-abort-controller": "^3.1.1", - "prettier": "^2.6.2", - "tap": "^16.2.0", - "through2": "^2.0.3", - "ts-node": "^10.8.1", - "typedoc": "^0.23.24", - "typescript": "^4.7.3" - }, + "files": [ + "dist" + ], "scripts": { - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "prepare": "node ./scripts/transpile-to-esm.js", - "snap": "tap", - "test": "tap", "preversion": "npm test", "postversion": "npm publish", - "postpublish": "git push origin --follow-tags", - "typedoc": "typedoc ./index.d.ts", - "format": "prettier --write . --loglevel warn" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/isaacs/minipass.git" + "prepublishOnly": "git push origin --follow-tags", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" }, - "keywords": [ - "passthrough", - "stream" - ], - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "files": [ - "index.d.ts", - "index.js", - "index.mjs" - ], "tap": { - "check-coverage": true - }, - "engines": { - "node": ">=8" + "coverage": false, + "node-arg": [ + "--enable-source-maps", + "--no-warnings", + "--loader", + "ts-node/esm" + ], + "ts": false }, "prettier": { "semi": false, - "printWidth": 80, + "printWidth": 75, "tabWidth": 2, "useTabs": false, "singleQuote": true, @@ -72,5 +54,29 @@ "bracketSameLine": true, "arrowParens": "avoid", "endOfLine": "lf" + }, + "devDependencies": { + "@types/node": "^20.1.2", + "@types/tap": "^15.0.8", + "c8": "^7.13.0", + "prettier": "^2.6.2", + "tap": "^16.3.0", + "ts-node": "^10.9.1", + "typedoc": "^0.24.8", + "typescript": "^5.1.3", + "end-of-stream": "^1.4.0", + "node-abort-controller": "^3.1.1", + "sync-content": "^1.0.2", + "through2": "^2.0.3" + }, + "repository": "https://github.com/isaacs/minipass", + "keywords": [ + "passthrough", + "stream" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" } } diff --git a/node_modules/make-fetch-happen/node_modules/minipass/LICENSE b/node_modules/node-gyp/node_modules/minipass/LICENSE similarity index 100% rename from node_modules/make-fetch-happen/node_modules/minipass/LICENSE rename to node_modules/node-gyp/node_modules/minipass/LICENSE diff --git a/node_modules/node-gyp/node_modules/minipass/index.js b/node_modules/node-gyp/node_modules/minipass/index.js new file mode 100644 index 0000000000000..ed07c17acd97b --- /dev/null +++ b/node_modules/node-gyp/node_modules/minipass/index.js @@ -0,0 +1,702 @@ +'use strict' +const proc = + typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + } +const EE = require('events') +const Stream = require('stream') +const stringdecoder = require('string_decoder') +const SD = stringdecoder.StringDecoder + +const EOF = Symbol('EOF') +const MAYBE_EMIT_END = Symbol('maybeEmitEnd') +const EMITTED_END = Symbol('emittedEnd') +const EMITTING_END = Symbol('emittingEnd') +const EMITTED_ERROR = Symbol('emittedError') +const CLOSED = Symbol('closed') +const READ = Symbol('read') +const FLUSH = Symbol('flush') +const FLUSHCHUNK = Symbol('flushChunk') +const ENCODING = Symbol('encoding') +const DECODER = Symbol('decoder') +const FLOWING = Symbol('flowing') +const PAUSED = Symbol('paused') +const RESUME = Symbol('resume') +const BUFFER = Symbol('buffer') +const PIPES = Symbol('pipes') +const BUFFERLENGTH = Symbol('bufferLength') +const BUFFERPUSH = Symbol('bufferPush') +const BUFFERSHIFT = Symbol('bufferShift') +const OBJECTMODE = Symbol('objectMode') +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed') +// internal event when stream has an error +const ERROR = Symbol('error') +const EMITDATA = Symbol('emitData') +const EMITEND = Symbol('emitEnd') +const EMITEND2 = Symbol('emitEnd2') +const ASYNC = Symbol('async') +const ABORT = Symbol('abort') +const ABORTED = Symbol('aborted') +const SIGNAL = Symbol('signal') + +const defer = fn => Promise.resolve().then(fn) + +// TODO remove when Node v8 support drops +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' +const ASYNCITERATOR = + (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') +const ITERATOR = + (doIter && Symbol.iterator) || Symbol('iterator not implemented') + +// events that mean 'the stream is over' +// these are treated specially, and re-emitted +// if they are listened for after emitting. +const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' + +const isArrayBuffer = b => + b instanceof ArrayBuffer || + (typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0) + +const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) + +class Pipe { + constructor(src, dest, opts) { + this.src = src + this.dest = dest + this.opts = opts + this.ondrain = () => src[RESUME]() + dest.on('drain', this.ondrain) + } + unpipe() { + this.dest.removeListener('drain', this.ondrain) + } + // istanbul ignore next - only here for the prototype + proxyErrors() {} + end() { + this.unpipe() + if (this.opts.end) this.dest.end() + } +} + +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors) + super.unpipe() + } + constructor(src, dest, opts) { + super(src, dest, opts) + this.proxyErrors = er => dest.emit('error', er) + src.on('error', this.proxyErrors) + } +} + +class Minipass extends Stream { + constructor(options) { + super() + this[FLOWING] = false + // whether we're explicitly paused + this[PAUSED] = false + this[PIPES] = [] + this[BUFFER] = [] + this[OBJECTMODE] = (options && options.objectMode) || false + if (this[OBJECTMODE]) this[ENCODING] = null + else this[ENCODING] = (options && options.encoding) || null + if (this[ENCODING] === 'buffer') this[ENCODING] = null + this[ASYNC] = (options && !!options.async) || false + this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null + this[EOF] = false + this[EMITTED_END] = false + this[EMITTING_END] = false + this[CLOSED] = false + this[EMITTED_ERROR] = null + this.writable = true + this.readable = true + this[BUFFERLENGTH] = 0 + this[DESTROYED] = false + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) + } + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) + } + this[SIGNAL] = options && options.signal + this[ABORTED] = false + if (this[SIGNAL]) { + this[SIGNAL].addEventListener('abort', () => this[ABORT]()) + if (this[SIGNAL].aborted) { + this[ABORT]() + } + } + } + + get bufferLength() { + return this[BUFFERLENGTH] + } + + get encoding() { + return this[ENCODING] + } + set encoding(enc) { + if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') + + if ( + this[ENCODING] && + enc !== this[ENCODING] && + ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) + ) + throw new Error('cannot change encoding') + + if (this[ENCODING] !== enc) { + this[DECODER] = enc ? new SD(enc) : null + if (this[BUFFER].length) + this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) + } + + this[ENCODING] = enc + } + + setEncoding(enc) { + this.encoding = enc + } + + get objectMode() { + return this[OBJECTMODE] + } + set objectMode(om) { + this[OBJECTMODE] = this[OBJECTMODE] || !!om + } + + get ['async']() { + return this[ASYNC] + } + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a + } + + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true + this.emit('abort', this[SIGNAL].reason) + this.destroy(this[SIGNAL].reason) + } + + get aborted() { + return this[ABORTED] + } + set aborted(_) {} + + write(chunk, encoding, cb) { + if (this[ABORTED]) return false + if (this[EOF]) throw new Error('write after end') + + if (this[DESTROYED]) { + this.emit( + 'error', + Object.assign( + new Error('Cannot call write after a stream was destroyed'), + { code: 'ERR_STREAM_DESTROYED' } + ) + ) + return true + } + + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + + if (!encoding) encoding = 'utf8' + + const fn = this[ASYNC] ? defer : f => f() + + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything else switches us into object mode + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) + else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) + else if (typeof chunk !== 'string') + // use the setter so we throw if we have encoding set + this.objectMode = true + } + + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + /* istanbul ignore if - maybe impossible? */ + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + if (cb) fn(cb) + return this.flowing + } + + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if ( + typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER].lastNeed) + ) { + chunk = Buffer.from(chunk, encoding) + } + + if (Buffer.isBuffer(chunk) && this[ENCODING]) + chunk = this[DECODER].write(chunk) + + // Note: flushing CAN potentially switch us into not-flowing mode + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + read(n) { + if (this[DESTROYED]) return null + + if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { + this[MAYBE_EMIT_END]() + return null + } + + if (this[OBJECTMODE]) n = null + + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] + else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] + } + + const ret = this[READ](n || null, this[BUFFER][0]) + this[MAYBE_EMIT_END]() + return ret + } + + [READ](n, chunk) { + if (n === chunk.length || n === null) this[BUFFERSHIFT]() + else { + this[BUFFER][0] = chunk.slice(n) + chunk = chunk.slice(0, n) + this[BUFFERLENGTH] -= n + } + + this.emit('data', chunk) + + if (!this[BUFFER].length && !this[EOF]) this.emit('drain') + + return chunk + } + + end(chunk, encoding, cb) { + if (typeof chunk === 'function') (cb = chunk), (chunk = null) + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + if (chunk) this.write(chunk, encoding) + if (cb) this.once('end', cb) + this[EOF] = true + this.writable = false + + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() + return this + } + + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) return + + this[PAUSED] = false + this[FLOWING] = true + this.emit('resume') + if (this[BUFFER].length) this[FLUSH]() + else if (this[EOF]) this[MAYBE_EMIT_END]() + else this.emit('drain') + } + + resume() { + return this[RESUME]() + } + + pause() { + this[FLOWING] = false + this[PAUSED] = true + } + + get destroyed() { + return this[DESTROYED] + } + + get flowing() { + return this[FLOWING] + } + + get paused() { + return this[PAUSED] + } + + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 + else this[BUFFERLENGTH] += chunk.length + this[BUFFER].push(chunk) + } + + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 + else this[BUFFERLENGTH] -= this[BUFFER][0].length + return this[BUFFER].shift() + } + + [FLUSH](noDrain) { + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) + + if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') + } + + [FLUSHCHUNK](chunk) { + this.emit('data', chunk) + return this.flowing + } + + pipe(dest, opts) { + if (this[DESTROYED]) return + + const ended = this[EMITTED_END] + opts = opts || {} + if (dest === proc.stdout || dest === proc.stderr) opts.end = false + else opts.end = opts.end !== false + opts.proxyErrors = !!opts.proxyErrors + + // piping an ended stream ends immediately + if (ended) { + if (opts.end) dest.end() + } else { + this[PIPES].push( + !opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts) + ) + if (this[ASYNC]) defer(() => this[RESUME]()) + else this[RESUME]() + } + + return dest + } + + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest) + if (p) { + this[PIPES].splice(this[PIPES].indexOf(p), 1) + p.unpipe() + } + } + + addListener(ev, fn) { + return this.on(ev, fn) + } + + on(ev, fn) { + const ret = super.on(ev, fn) + if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) + super.emit('readable') + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev) + this.removeAllListeners(ev) + } else if (ev === 'error' && this[EMITTED_ERROR]) { + if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) + else fn.call(this, this[EMITTED_ERROR]) + } + return ret + } + + get emittedEnd() { + return this[EMITTED_END] + } + + [MAYBE_EMIT_END]() { + if ( + !this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF] + ) { + this[EMITTING_END] = true + this.emit('end') + this.emit('prefinish') + this.emit('finish') + if (this[CLOSED]) this.emit('close') + this[EMITTING_END] = false + } + } + + emit(ev, data, ...extra) { + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) + return + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? defer(() => this[EMITDATA](data)) + : this[EMITDATA](data) + } else if (ev === 'end') { + return this[EMITEND]() + } else if (ev === 'close') { + this[CLOSED] = true + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) return + const ret = super.emit('close') + this.removeAllListeners('close') + return ret + } else if (ev === 'error') { + this[EMITTED_ERROR] = data + super.emit(ERROR, data) + const ret = + !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'resume') { + const ret = super.emit('resume') + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev) + this.removeAllListeners(ev) + return ret + } + + // Some other unknown event + const ret = super.emit(ev, data, ...extra) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) this.pause() + } + const ret = super.emit('data', data) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITEND]() { + if (this[EMITTED_END]) return + + this[EMITTED_END] = true + this.readable = false + if (this[ASYNC]) defer(() => this[EMITEND2]()) + else this[EMITEND2]() + } + + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end() + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data) + } + super.emit('data', data) + } + } + + for (const p of this[PIPES]) { + p.end() + } + const ret = super.emit('end') + this.removeAllListeners('end') + return ret + } + + // const all = await stream.collect() + collect() { + const buf = [] + if (!this[OBJECTMODE]) buf.dataLength = 0 + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise() + this.on('data', c => { + buf.push(c) + if (!this[OBJECTMODE]) buf.dataLength += c.length + }) + return p.then(() => buf) + } + + // const data = await stream.concat() + concat() { + return this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this.collect().then(buf => + this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength) + ) + } + + // stream.promise().then(() => done, er => emitted error) + promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))) + this.on('error', er => reject(er)) + this.on('end', () => resolve()) + }) + } + + // for await (let chunk of stream) + [ASYNCITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + stopped = true + return Promise.resolve({ done: true }) + } + const next = () => { + if (stopped) return stop() + const res = this.read() + if (res !== null) return Promise.resolve({ done: false, value: res }) + + if (this[EOF]) return stop() + + let resolve = null + let reject = null + const onerr = er => { + this.removeListener('data', ondata) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + stop() + reject(er) + } + const ondata = value => { + this.removeListener('error', onerr) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + this.pause() + resolve({ value: value, done: !!this[EOF] }) + } + const onend = () => { + this.removeListener('error', onerr) + this.removeListener('data', ondata) + this.removeListener(DESTROYED, ondestroy) + stop() + resolve({ done: true }) + } + const ondestroy = () => onerr(new Error('stream destroyed')) + return new Promise((res, rej) => { + reject = rej + resolve = res + this.once(DESTROYED, ondestroy) + this.once('error', onerr) + this.once('end', onend) + this.once('data', ondata) + }) + } + + return { + next, + throw: stop, + return: stop, + [ASYNCITERATOR]() { + return this + }, + } + } + + // for (let chunk of stream) + [ITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + this.removeListener(ERROR, stop) + this.removeListener(DESTROYED, stop) + this.removeListener('end', stop) + stopped = true + return { done: true } + } + + const next = () => { + if (stopped) return stop() + const value = this.read() + return value === null ? stop() : { value } + } + this.once('end', stop) + this.once(ERROR, stop) + this.once(DESTROYED, stop) + + return { + next, + throw: stop, + return: stop, + [ITERATOR]() { + return this + }, + } + } + + destroy(er) { + if (this[DESTROYED]) { + if (er) this.emit('error', er) + else this.emit(DESTROYED) + return this + } + + this[DESTROYED] = true + + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0 + this[BUFFERLENGTH] = 0 + + if (typeof this.close === 'function' && !this[CLOSED]) this.close() + + if (er) this.emit('error', er) + // if no error to emit, still reject pending promises + else this.emit(DESTROYED) + + return this + } + + static isStream(s) { + return ( + !!s && + (s instanceof Minipass || + s instanceof Stream || + (s instanceof EE && + // readable + (typeof s.pipe === 'function' || + // writable + (typeof s.write === 'function' && typeof s.end === 'function')))) + ) + } +} + +exports.Minipass = Minipass diff --git a/node_modules/node-gyp/node_modules/minipass/index.mjs b/node_modules/node-gyp/node_modules/minipass/index.mjs new file mode 100644 index 0000000000000..6ef6cd8cf0703 --- /dev/null +++ b/node_modules/node-gyp/node_modules/minipass/index.mjs @@ -0,0 +1,702 @@ +'use strict' +const proc = + typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + } +import EE from 'events' +import Stream from 'stream' +import stringdecoder from 'string_decoder' +const SD = stringdecoder.StringDecoder + +const EOF = Symbol('EOF') +const MAYBE_EMIT_END = Symbol('maybeEmitEnd') +const EMITTED_END = Symbol('emittedEnd') +const EMITTING_END = Symbol('emittingEnd') +const EMITTED_ERROR = Symbol('emittedError') +const CLOSED = Symbol('closed') +const READ = Symbol('read') +const FLUSH = Symbol('flush') +const FLUSHCHUNK = Symbol('flushChunk') +const ENCODING = Symbol('encoding') +const DECODER = Symbol('decoder') +const FLOWING = Symbol('flowing') +const PAUSED = Symbol('paused') +const RESUME = Symbol('resume') +const BUFFER = Symbol('buffer') +const PIPES = Symbol('pipes') +const BUFFERLENGTH = Symbol('bufferLength') +const BUFFERPUSH = Symbol('bufferPush') +const BUFFERSHIFT = Symbol('bufferShift') +const OBJECTMODE = Symbol('objectMode') +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed') +// internal event when stream has an error +const ERROR = Symbol('error') +const EMITDATA = Symbol('emitData') +const EMITEND = Symbol('emitEnd') +const EMITEND2 = Symbol('emitEnd2') +const ASYNC = Symbol('async') +const ABORT = Symbol('abort') +const ABORTED = Symbol('aborted') +const SIGNAL = Symbol('signal') + +const defer = fn => Promise.resolve().then(fn) + +// TODO remove when Node v8 support drops +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' +const ASYNCITERATOR = + (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') +const ITERATOR = + (doIter && Symbol.iterator) || Symbol('iterator not implemented') + +// events that mean 'the stream is over' +// these are treated specially, and re-emitted +// if they are listened for after emitting. +const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' + +const isArrayBuffer = b => + b instanceof ArrayBuffer || + (typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0) + +const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) + +class Pipe { + constructor(src, dest, opts) { + this.src = src + this.dest = dest + this.opts = opts + this.ondrain = () => src[RESUME]() + dest.on('drain', this.ondrain) + } + unpipe() { + this.dest.removeListener('drain', this.ondrain) + } + // istanbul ignore next - only here for the prototype + proxyErrors() {} + end() { + this.unpipe() + if (this.opts.end) this.dest.end() + } +} + +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors) + super.unpipe() + } + constructor(src, dest, opts) { + super(src, dest, opts) + this.proxyErrors = er => dest.emit('error', er) + src.on('error', this.proxyErrors) + } +} + +export class Minipass extends Stream { + constructor(options) { + super() + this[FLOWING] = false + // whether we're explicitly paused + this[PAUSED] = false + this[PIPES] = [] + this[BUFFER] = [] + this[OBJECTMODE] = (options && options.objectMode) || false + if (this[OBJECTMODE]) this[ENCODING] = null + else this[ENCODING] = (options && options.encoding) || null + if (this[ENCODING] === 'buffer') this[ENCODING] = null + this[ASYNC] = (options && !!options.async) || false + this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null + this[EOF] = false + this[EMITTED_END] = false + this[EMITTING_END] = false + this[CLOSED] = false + this[EMITTED_ERROR] = null + this.writable = true + this.readable = true + this[BUFFERLENGTH] = 0 + this[DESTROYED] = false + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) + } + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) + } + this[SIGNAL] = options && options.signal + this[ABORTED] = false + if (this[SIGNAL]) { + this[SIGNAL].addEventListener('abort', () => this[ABORT]()) + if (this[SIGNAL].aborted) { + this[ABORT]() + } + } + } + + get bufferLength() { + return this[BUFFERLENGTH] + } + + get encoding() { + return this[ENCODING] + } + set encoding(enc) { + if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') + + if ( + this[ENCODING] && + enc !== this[ENCODING] && + ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) + ) + throw new Error('cannot change encoding') + + if (this[ENCODING] !== enc) { + this[DECODER] = enc ? new SD(enc) : null + if (this[BUFFER].length) + this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) + } + + this[ENCODING] = enc + } + + setEncoding(enc) { + this.encoding = enc + } + + get objectMode() { + return this[OBJECTMODE] + } + set objectMode(om) { + this[OBJECTMODE] = this[OBJECTMODE] || !!om + } + + get ['async']() { + return this[ASYNC] + } + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a + } + + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true + this.emit('abort', this[SIGNAL].reason) + this.destroy(this[SIGNAL].reason) + } + + get aborted() { + return this[ABORTED] + } + set aborted(_) {} + + write(chunk, encoding, cb) { + if (this[ABORTED]) return false + if (this[EOF]) throw new Error('write after end') + + if (this[DESTROYED]) { + this.emit( + 'error', + Object.assign( + new Error('Cannot call write after a stream was destroyed'), + { code: 'ERR_STREAM_DESTROYED' } + ) + ) + return true + } + + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + + if (!encoding) encoding = 'utf8' + + const fn = this[ASYNC] ? defer : f => f() + + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything else switches us into object mode + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) + else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) + else if (typeof chunk !== 'string') + // use the setter so we throw if we have encoding set + this.objectMode = true + } + + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + /* istanbul ignore if - maybe impossible? */ + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + if (cb) fn(cb) + return this.flowing + } + + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if ( + typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER].lastNeed) + ) { + chunk = Buffer.from(chunk, encoding) + } + + if (Buffer.isBuffer(chunk) && this[ENCODING]) + chunk = this[DECODER].write(chunk) + + // Note: flushing CAN potentially switch us into not-flowing mode + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + read(n) { + if (this[DESTROYED]) return null + + if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { + this[MAYBE_EMIT_END]() + return null + } + + if (this[OBJECTMODE]) n = null + + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] + else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] + } + + const ret = this[READ](n || null, this[BUFFER][0]) + this[MAYBE_EMIT_END]() + return ret + } + + [READ](n, chunk) { + if (n === chunk.length || n === null) this[BUFFERSHIFT]() + else { + this[BUFFER][0] = chunk.slice(n) + chunk = chunk.slice(0, n) + this[BUFFERLENGTH] -= n + } + + this.emit('data', chunk) + + if (!this[BUFFER].length && !this[EOF]) this.emit('drain') + + return chunk + } + + end(chunk, encoding, cb) { + if (typeof chunk === 'function') (cb = chunk), (chunk = null) + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + if (chunk) this.write(chunk, encoding) + if (cb) this.once('end', cb) + this[EOF] = true + this.writable = false + + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() + return this + } + + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) return + + this[PAUSED] = false + this[FLOWING] = true + this.emit('resume') + if (this[BUFFER].length) this[FLUSH]() + else if (this[EOF]) this[MAYBE_EMIT_END]() + else this.emit('drain') + } + + resume() { + return this[RESUME]() + } + + pause() { + this[FLOWING] = false + this[PAUSED] = true + } + + get destroyed() { + return this[DESTROYED] + } + + get flowing() { + return this[FLOWING] + } + + get paused() { + return this[PAUSED] + } + + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 + else this[BUFFERLENGTH] += chunk.length + this[BUFFER].push(chunk) + } + + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 + else this[BUFFERLENGTH] -= this[BUFFER][0].length + return this[BUFFER].shift() + } + + [FLUSH](noDrain) { + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) + + if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') + } + + [FLUSHCHUNK](chunk) { + this.emit('data', chunk) + return this.flowing + } + + pipe(dest, opts) { + if (this[DESTROYED]) return + + const ended = this[EMITTED_END] + opts = opts || {} + if (dest === proc.stdout || dest === proc.stderr) opts.end = false + else opts.end = opts.end !== false + opts.proxyErrors = !!opts.proxyErrors + + // piping an ended stream ends immediately + if (ended) { + if (opts.end) dest.end() + } else { + this[PIPES].push( + !opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts) + ) + if (this[ASYNC]) defer(() => this[RESUME]()) + else this[RESUME]() + } + + return dest + } + + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest) + if (p) { + this[PIPES].splice(this[PIPES].indexOf(p), 1) + p.unpipe() + } + } + + addListener(ev, fn) { + return this.on(ev, fn) + } + + on(ev, fn) { + const ret = super.on(ev, fn) + if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) + super.emit('readable') + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev) + this.removeAllListeners(ev) + } else if (ev === 'error' && this[EMITTED_ERROR]) { + if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) + else fn.call(this, this[EMITTED_ERROR]) + } + return ret + } + + get emittedEnd() { + return this[EMITTED_END] + } + + [MAYBE_EMIT_END]() { + if ( + !this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF] + ) { + this[EMITTING_END] = true + this.emit('end') + this.emit('prefinish') + this.emit('finish') + if (this[CLOSED]) this.emit('close') + this[EMITTING_END] = false + } + } + + emit(ev, data, ...extra) { + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) + return + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? defer(() => this[EMITDATA](data)) + : this[EMITDATA](data) + } else if (ev === 'end') { + return this[EMITEND]() + } else if (ev === 'close') { + this[CLOSED] = true + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) return + const ret = super.emit('close') + this.removeAllListeners('close') + return ret + } else if (ev === 'error') { + this[EMITTED_ERROR] = data + super.emit(ERROR, data) + const ret = + !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'resume') { + const ret = super.emit('resume') + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev) + this.removeAllListeners(ev) + return ret + } + + // Some other unknown event + const ret = super.emit(ev, data, ...extra) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) this.pause() + } + const ret = super.emit('data', data) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITEND]() { + if (this[EMITTED_END]) return + + this[EMITTED_END] = true + this.readable = false + if (this[ASYNC]) defer(() => this[EMITEND2]()) + else this[EMITEND2]() + } + + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end() + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data) + } + super.emit('data', data) + } + } + + for (const p of this[PIPES]) { + p.end() + } + const ret = super.emit('end') + this.removeAllListeners('end') + return ret + } + + // const all = await stream.collect() + collect() { + const buf = [] + if (!this[OBJECTMODE]) buf.dataLength = 0 + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise() + this.on('data', c => { + buf.push(c) + if (!this[OBJECTMODE]) buf.dataLength += c.length + }) + return p.then(() => buf) + } + + // const data = await stream.concat() + concat() { + return this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this.collect().then(buf => + this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength) + ) + } + + // stream.promise().then(() => done, er => emitted error) + promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))) + this.on('error', er => reject(er)) + this.on('end', () => resolve()) + }) + } + + // for await (let chunk of stream) + [ASYNCITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + stopped = true + return Promise.resolve({ done: true }) + } + const next = () => { + if (stopped) return stop() + const res = this.read() + if (res !== null) return Promise.resolve({ done: false, value: res }) + + if (this[EOF]) return stop() + + let resolve = null + let reject = null + const onerr = er => { + this.removeListener('data', ondata) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + stop() + reject(er) + } + const ondata = value => { + this.removeListener('error', onerr) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + this.pause() + resolve({ value: value, done: !!this[EOF] }) + } + const onend = () => { + this.removeListener('error', onerr) + this.removeListener('data', ondata) + this.removeListener(DESTROYED, ondestroy) + stop() + resolve({ done: true }) + } + const ondestroy = () => onerr(new Error('stream destroyed')) + return new Promise((res, rej) => { + reject = rej + resolve = res + this.once(DESTROYED, ondestroy) + this.once('error', onerr) + this.once('end', onend) + this.once('data', ondata) + }) + } + + return { + next, + throw: stop, + return: stop, + [ASYNCITERATOR]() { + return this + }, + } + } + + // for (let chunk of stream) + [ITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + this.removeListener(ERROR, stop) + this.removeListener(DESTROYED, stop) + this.removeListener('end', stop) + stopped = true + return { done: true } + } + + const next = () => { + if (stopped) return stop() + const value = this.read() + return value === null ? stop() : { value } + } + this.once('end', stop) + this.once(ERROR, stop) + this.once(DESTROYED, stop) + + return { + next, + throw: stop, + return: stop, + [ITERATOR]() { + return this + }, + } + } + + destroy(er) { + if (this[DESTROYED]) { + if (er) this.emit('error', er) + else this.emit(DESTROYED) + return this + } + + this[DESTROYED] = true + + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0 + this[BUFFERLENGTH] = 0 + + if (typeof this.close === 'function' && !this[CLOSED]) this.close() + + if (er) this.emit('error', er) + // if no error to emit, still reject pending promises + else this.emit(DESTROYED) + + return this + } + + static isStream(s) { + return ( + !!s && + (s instanceof Minipass || + s instanceof Stream || + (s instanceof EE && + // readable + (typeof s.pipe === 'function' || + // writable + (typeof s.write === 'function' && typeof s.end === 'function')))) + ) + } +} + + diff --git a/node_modules/node-gyp/node_modules/minipass/package.json b/node_modules/node-gyp/node_modules/minipass/package.json new file mode 100644 index 0000000000000..0e20e988047f2 --- /dev/null +++ b/node_modules/node-gyp/node_modules/minipass/package.json @@ -0,0 +1,76 @@ +{ + "name": "minipass", + "version": "5.0.0", + "description": "minimal implementation of a PassThrough stream", + "main": "./index.js", + "module": "./index.mjs", + "types": "./index.d.ts", + "exports": { + ".": { + "import": { + "types": "./index.d.ts", + "default": "./index.mjs" + }, + "require": { + "types": "./index.d.ts", + "default": "./index.js" + } + }, + "./package.json": "./package.json" + }, + "devDependencies": { + "@types/node": "^17.0.41", + "end-of-stream": "^1.4.0", + "node-abort-controller": "^3.1.1", + "prettier": "^2.6.2", + "tap": "^16.2.0", + "through2": "^2.0.3", + "ts-node": "^10.8.1", + "typedoc": "^0.23.24", + "typescript": "^4.7.3" + }, + "scripts": { + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "prepare": "node ./scripts/transpile-to-esm.js", + "snap": "tap", + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags", + "typedoc": "typedoc ./index.d.ts", + "format": "prettier --write . --loglevel warn" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minipass.git" + }, + "keywords": [ + "passthrough", + "stream" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "files": [ + "index.d.ts", + "index.js", + "index.mjs" + ], + "tap": { + "check-coverage": true + }, + "engines": { + "node": ">=8" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/node_modules/npm-registry-fetch/node_modules/minipass/dist/cjs/index.js b/node_modules/npm-registry-fetch/node_modules/minipass/dist/cjs/index.js deleted file mode 100644 index 068c095b69793..0000000000000 --- a/node_modules/npm-registry-fetch/node_modules/minipass/dist/cjs/index.js +++ /dev/null @@ -1,1028 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0; -const proc = typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - }; -const node_events_1 = require("node:events"); -const node_stream_1 = __importDefault(require("node:stream")); -const node_string_decoder_1 = require("node:string_decoder"); -/** - * Return true if the argument is a Minipass stream, Node stream, or something - * else that Minipass can interact with. - */ -const isStream = (s) => !!s && - typeof s === 'object' && - (s instanceof Minipass || - s instanceof node_stream_1.default || - (0, exports.isReadable)(s) || - (0, exports.isWritable)(s)); -exports.isStream = isStream; -/** - * Return true if the argument is a valid {@link Minipass.Readable} - */ -const isReadable = (s) => !!s && - typeof s === 'object' && - s instanceof node_events_1.EventEmitter && - typeof s.pipe === 'function' && - // node core Writable streams have a pipe() method, but it throws - s.pipe !== node_stream_1.default.Writable.prototype.pipe; -exports.isReadable = isReadable; -/** - * Return true if the argument is a valid {@link Minipass.Writable} - */ -const isWritable = (s) => !!s && - typeof s === 'object' && - s instanceof node_events_1.EventEmitter && - typeof s.write === 'function' && - typeof s.end === 'function'; -exports.isWritable = isWritable; -const EOF = Symbol('EOF'); -const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); -const EMITTED_END = Symbol('emittedEnd'); -const EMITTING_END = Symbol('emittingEnd'); -const EMITTED_ERROR = Symbol('emittedError'); -const CLOSED = Symbol('closed'); -const READ = Symbol('read'); -const FLUSH = Symbol('flush'); -const FLUSHCHUNK = Symbol('flushChunk'); -const ENCODING = Symbol('encoding'); -const DECODER = Symbol('decoder'); -const FLOWING = Symbol('flowing'); -const PAUSED = Symbol('paused'); -const RESUME = Symbol('resume'); -const BUFFER = Symbol('buffer'); -const PIPES = Symbol('pipes'); -const BUFFERLENGTH = Symbol('bufferLength'); -const BUFFERPUSH = Symbol('bufferPush'); -const BUFFERSHIFT = Symbol('bufferShift'); -const OBJECTMODE = Symbol('objectMode'); -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed'); -// internal event when stream has an error -const ERROR = Symbol('error'); -const EMITDATA = Symbol('emitData'); -const EMITEND = Symbol('emitEnd'); -const EMITEND2 = Symbol('emitEnd2'); -const ASYNC = Symbol('async'); -const ABORT = Symbol('abort'); -const ABORTED = Symbol('aborted'); -const SIGNAL = Symbol('signal'); -const DATALISTENERS = Symbol('dataListeners'); -const DISCARDED = Symbol('discarded'); -const defer = (fn) => Promise.resolve().then(fn); -const nodefer = (fn) => fn(); -const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; -const isArrayBufferLike = (b) => b instanceof ArrayBuffer || - (!!b && - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0); -const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); -/** - * Internal class representing a pipe to a destination stream. - * - * @internal - */ -class Pipe { - src; - dest; - opts; - ondrain; - constructor(src, dest, opts) { - this.src = src; - this.dest = dest; - this.opts = opts; - this.ondrain = () => src[RESUME](); - this.dest.on('drain', this.ondrain); - } - unpipe() { - this.dest.removeListener('drain', this.ondrain); - } - // only here for the prototype - /* c8 ignore start */ - proxyErrors(_er) { } - /* c8 ignore stop */ - end() { - this.unpipe(); - if (this.opts.end) - this.dest.end(); - } -} -/** - * Internal class representing a pipe to a destination stream where - * errors are proxied. - * - * @internal - */ -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors); - super.unpipe(); - } - constructor(src, dest, opts) { - super(src, dest, opts); - this.proxyErrors = er => dest.emit('error', er); - src.on('error', this.proxyErrors); - } -} -const isObjectModeOptions = (o) => !!o.objectMode; -const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; -/** - * Main export, the Minipass class - * - * `RType` is the type of data emitted, defaults to Buffer - * - * `WType` is the type of data to be written, if RType is buffer or string, - * then any {@link Minipass.ContiguousData} is allowed. - * - * `Events` is the set of event handler signatures that this object - * will emit, see {@link Minipass.Events} - */ -class Minipass extends node_events_1.EventEmitter { - [FLOWING] = false; - [PAUSED] = false; - [PIPES] = []; - [BUFFER] = []; - [OBJECTMODE]; - [ENCODING]; - [ASYNC]; - [DECODER]; - [EOF] = false; - [EMITTED_END] = false; - [EMITTING_END] = false; - [CLOSED] = false; - [EMITTED_ERROR] = null; - [BUFFERLENGTH] = 0; - [DESTROYED] = false; - [SIGNAL]; - [ABORTED] = false; - [DATALISTENERS] = 0; - [DISCARDED] = false; - /** - * true if the stream can be written - */ - writable = true; - /** - * true if the stream can be read - */ - readable = true; - /** - * If `RType` is Buffer, then options do not need to be provided. - * Otherwise, an options object must be provided to specify either - * {@link Minipass.SharedOptions.objectMode} or - * {@link Minipass.SharedOptions.encoding}, as appropriate. - */ - constructor(...args) { - const options = (args[0] || - {}); - super(); - if (options.objectMode && typeof options.encoding === 'string') { - throw new TypeError('Encoding and objectMode may not be used together'); - } - if (isObjectModeOptions(options)) { - this[OBJECTMODE] = true; - this[ENCODING] = null; - } - else if (isEncodingOptions(options)) { - this[ENCODING] = options.encoding; - this[OBJECTMODE] = false; - } - else { - this[OBJECTMODE] = false; - this[ENCODING] = null; - } - this[ASYNC] = !!options.async; - this[DECODER] = this[ENCODING] - ? new node_string_decoder_1.StringDecoder(this[ENCODING]) - : null; - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); - } - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); - } - const { signal } = options; - if (signal) { - this[SIGNAL] = signal; - if (signal.aborted) { - this[ABORT](); - } - else { - signal.addEventListener('abort', () => this[ABORT]()); - } - } - } - /** - * The amount of data stored in the buffer waiting to be read. - * - * For Buffer strings, this will be the total byte length. - * For string encoding streams, this will be the string character length, - * according to JavaScript's `string.length` logic. - * For objectMode streams, this is a count of the items waiting to be - * emitted. - */ - get bufferLength() { - return this[BUFFERLENGTH]; - } - /** - * The `BufferEncoding` currently in use, or `null` - */ - get encoding() { - return this[ENCODING]; - } - /** - * @deprecated - This is a read only property - */ - set encoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * @deprecated - Encoding may only be set at instantiation time - */ - setEncoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * True if this is an objectMode stream - */ - get objectMode() { - return this[OBJECTMODE]; - } - /** - * @deprecated - This is a read-only property - */ - set objectMode(_om) { - throw new Error('objectMode must be set at instantiation time'); - } - /** - * true if this is an async stream - */ - get ['async']() { - return this[ASYNC]; - } - /** - * Set to true to make this stream async. - * - * Once set, it cannot be unset, as this would potentially cause incorrect - * behavior. Ie, a sync stream can be made async, but an async stream - * cannot be safely made sync. - */ - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a; - } - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true; - this.emit('abort', this[SIGNAL]?.reason); - this.destroy(this[SIGNAL]?.reason); - } - /** - * True if the stream has been aborted. - */ - get aborted() { - return this[ABORTED]; - } - /** - * No-op setter. Stream aborted status is set via the AbortSignal provided - * in the constructor options. - */ - set aborted(_) { } - write(chunk, encoding, cb) { - if (this[ABORTED]) - return false; - if (this[EOF]) - throw new Error('write after end'); - if (this[DESTROYED]) { - this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); - return true; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (!encoding) - encoding = 'utf8'; - const fn = this[ASYNC] ? defer : nodefer; - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything is only allowed if in object mode, so throw - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); - } - else if (isArrayBufferLike(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk); - } - else if (typeof chunk !== 'string') { - throw new Error('Non-contiguous data written to non-objectMode stream'); - } - } - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - // maybe impossible? - /* c8 ignore start */ - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - /* c8 ignore stop */ - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { - //@ts-ignore - sinful unsafe type change - chunk = Buffer.from(chunk, encoding); - } - if (Buffer.isBuffer(chunk) && this[ENCODING]) { - //@ts-ignore - sinful unsafe type change - chunk = this[DECODER].write(chunk); - } - // Note: flushing CAN potentially switch us into not-flowing mode - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - /** - * Low-level explicit read method. - * - * In objectMode, the argument is ignored, and one item is returned if - * available. - * - * `n` is the number of bytes (or in the case of encoding streams, - * characters) to consume. If `n` is not provided, then the entire buffer - * is returned, or `null` is returned if no data is available. - * - * If `n` is greater that the amount of data in the internal buffer, - * then `null` is returned. - */ - read(n) { - if (this[DESTROYED]) - return null; - this[DISCARDED] = false; - if (this[BUFFERLENGTH] === 0 || - n === 0 || - (n && n > this[BUFFERLENGTH])) { - this[MAYBE_EMIT_END](); - return null; - } - if (this[OBJECTMODE]) - n = null; - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - // not object mode, so if we have an encoding, then RType is string - // otherwise, must be Buffer - this[BUFFER] = [ - (this[ENCODING] - ? this[BUFFER].join('') - : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), - ]; - } - const ret = this[READ](n || null, this[BUFFER][0]); - this[MAYBE_EMIT_END](); - return ret; - } - [READ](n, chunk) { - if (this[OBJECTMODE]) - this[BUFFERSHIFT](); - else { - const c = chunk; - if (n === c.length || n === null) - this[BUFFERSHIFT](); - else if (typeof c === 'string') { - this[BUFFER][0] = c.slice(n); - chunk = c.slice(0, n); - this[BUFFERLENGTH] -= n; - } - else { - this[BUFFER][0] = c.subarray(n); - chunk = c.subarray(0, n); - this[BUFFERLENGTH] -= n; - } - } - this.emit('data', chunk); - if (!this[BUFFER].length && !this[EOF]) - this.emit('drain'); - return chunk; - } - end(chunk, encoding, cb) { - if (typeof chunk === 'function') { - cb = chunk; - chunk = undefined; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (chunk !== undefined) - this.write(chunk, encoding); - if (cb) - this.once('end', cb); - this[EOF] = true; - this.writable = false; - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this[FLOWING] || !this[PAUSED]) - this[MAYBE_EMIT_END](); - return this; - } - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) - return; - if (!this[DATALISTENERS] && !this[PIPES].length) { - this[DISCARDED] = true; - } - this[PAUSED] = false; - this[FLOWING] = true; - this.emit('resume'); - if (this[BUFFER].length) - this[FLUSH](); - else if (this[EOF]) - this[MAYBE_EMIT_END](); - else - this.emit('drain'); - } - /** - * Resume the stream if it is currently in a paused state - * - * If called when there are no pipe destinations or `data` event listeners, - * this will place the stream in a "discarded" state, where all data will - * be thrown away. The discarded state is removed if a pipe destination or - * data handler is added, if pause() is called, or if any synchronous or - * asynchronous iteration is started. - */ - resume() { - return this[RESUME](); - } - /** - * Pause the stream - */ - pause() { - this[FLOWING] = false; - this[PAUSED] = true; - this[DISCARDED] = false; - } - /** - * true if the stream has been forcibly destroyed - */ - get destroyed() { - return this[DESTROYED]; - } - /** - * true if the stream is currently in a flowing state, meaning that - * any writes will be immediately emitted. - */ - get flowing() { - return this[FLOWING]; - } - /** - * true if the stream is currently in a paused state - */ - get paused() { - return this[PAUSED]; - } - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1; - else - this[BUFFERLENGTH] += chunk.length; - this[BUFFER].push(chunk); - } - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1; - else - this[BUFFERLENGTH] -= this[BUFFER][0].length; - return this[BUFFER].shift(); - } - [FLUSH](noDrain = false) { - do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && - this[BUFFER].length); - if (!noDrain && !this[BUFFER].length && !this[EOF]) - this.emit('drain'); - } - [FLUSHCHUNK](chunk) { - this.emit('data', chunk); - return this[FLOWING]; - } - /** - * Pipe all data emitted by this stream into the destination provided. - * - * Triggers the flow of data. - */ - pipe(dest, opts) { - if (this[DESTROYED]) - return dest; - this[DISCARDED] = false; - const ended = this[EMITTED_END]; - opts = opts || {}; - if (dest === proc.stdout || dest === proc.stderr) - opts.end = false; - else - opts.end = opts.end !== false; - opts.proxyErrors = !!opts.proxyErrors; - // piping an ended stream ends immediately - if (ended) { - if (opts.end) - dest.end(); - } - else { - // "as" here just ignores the WType, which pipes don't care about, - // since they're only consuming from us, and writing to the dest - this[PIPES].push(!opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts)); - if (this[ASYNC]) - defer(() => this[RESUME]()); - else - this[RESUME](); - } - return dest; - } - /** - * Fully unhook a piped destination stream. - * - * If the destination stream was the only consumer of this stream (ie, - * there are no other piped destinations or `'data'` event listeners) - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest); - if (p) { - if (this[PIPES].length === 1) { - if (this[FLOWING] && this[DATALISTENERS] === 0) { - this[FLOWING] = false; - } - this[PIPES] = []; - } - else - this[PIPES].splice(this[PIPES].indexOf(p), 1); - p.unpipe(); - } - } - /** - * Alias for {@link Minipass#on} - */ - addListener(ev, handler) { - return this.on(ev, handler); - } - /** - * Mostly identical to `EventEmitter.on`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * - Adding a 'data' event handler will trigger the flow of data - * - * - Adding a 'readable' event handler when there is data waiting to be read - * will cause 'readable' to be emitted immediately. - * - * - Adding an 'endish' event handler ('end', 'finish', etc.) which has - * already passed will cause the event to be emitted immediately and all - * handlers removed. - * - * - Adding an 'error' event handler after an error has been emitted will - * cause the event to be re-emitted immediately with the error previously - * raised. - */ - on(ev, handler) { - const ret = super.on(ev, handler); - if (ev === 'data') { - this[DISCARDED] = false; - this[DATALISTENERS]++; - if (!this[PIPES].length && !this[FLOWING]) { - this[RESUME](); - } - } - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { - super.emit('readable'); - } - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev); - this.removeAllListeners(ev); - } - else if (ev === 'error' && this[EMITTED_ERROR]) { - const h = handler; - if (this[ASYNC]) - defer(() => h.call(this, this[EMITTED_ERROR])); - else - h.call(this, this[EMITTED_ERROR]); - } - return ret; - } - /** - * Alias for {@link Minipass#off} - */ - removeListener(ev, handler) { - return this.off(ev, handler); - } - /** - * Mostly identical to `EventEmitter.off` - * - * If a 'data' event handler is removed, and it was the last consumer - * (ie, there are no pipe destinations or other 'data' event listeners), - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - off(ev, handler) { - const ret = super.off(ev, handler); - // if we previously had listeners, and now we don't, and we don't - // have any pipes, then stop the flow, unless it's been explicitly - // put in a discarded flowing state via stream.resume(). - if (ev === 'data') { - this[DATALISTENERS] = this.listeners('data').length; - if (this[DATALISTENERS] === 0 && - !this[DISCARDED] && - !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * Mostly identical to `EventEmitter.removeAllListeners` - * - * If all 'data' event handlers are removed, and they were the last consumer - * (ie, there are no pipe destinations), then the flow of data will stop - * until there is another consumer or {@link Minipass#resume} is explicitly - * called. - */ - removeAllListeners(ev) { - const ret = super.removeAllListeners(ev); - if (ev === 'data' || ev === undefined) { - this[DATALISTENERS] = 0; - if (!this[DISCARDED] && !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * true if the 'end' event has been emitted - */ - get emittedEnd() { - return this[EMITTED_END]; - } - [MAYBE_EMIT_END]() { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF]) { - this[EMITTING_END] = true; - this.emit('end'); - this.emit('prefinish'); - this.emit('finish'); - if (this[CLOSED]) - this.emit('close'); - this[EMITTING_END] = false; - } - } - /** - * Mostly identical to `EventEmitter.emit`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * If the stream has been destroyed, and the event is something other - * than 'close' or 'error', then `false` is returned and no handlers - * are called. - * - * If the event is 'end', and has already been emitted, then the event - * is ignored. If the stream is in a paused or non-flowing state, then - * the event will be deferred until data flow resumes. If the stream is - * async, then handlers will be called on the next tick rather than - * immediately. - * - * If the event is 'close', and 'end' has not yet been emitted, then - * the event will be deferred until after 'end' is emitted. - * - * If the event is 'error', and an AbortSignal was provided for the stream, - * and there are no listeners, then the event is ignored, matching the - * behavior of node core streams in the presense of an AbortSignal. - * - * If the event is 'finish' or 'prefinish', then all listeners will be - * removed after emitting the event, to prevent double-firing. - */ - emit(ev, ...args) { - const data = args[0]; - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && - ev !== 'close' && - ev !== DESTROYED && - this[DESTROYED]) { - return false; - } - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? (defer(() => this[EMITDATA](data)), true) - : this[EMITDATA](data); - } - else if (ev === 'end') { - return this[EMITEND](); - } - else if (ev === 'close') { - this[CLOSED] = true; - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return false; - const ret = super.emit('close'); - this.removeAllListeners('close'); - return ret; - } - else if (ev === 'error') { - this[EMITTED_ERROR] = data; - super.emit(ERROR, data); - const ret = !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false; - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'resume') { - const ret = super.emit('resume'); - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev); - this.removeAllListeners(ev); - return ret; - } - // Some other unknown event - const ret = super.emit(ev, ...args); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) - this.pause(); - } - const ret = this[DISCARDED] ? false : super.emit('data', data); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITEND]() { - if (this[EMITTED_END]) - return false; - this[EMITTED_END] = true; - this.readable = false; - return this[ASYNC] - ? (defer(() => this[EMITEND2]()), true) - : this[EMITEND2](); - } - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end(); - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data); - } - if (!this[DISCARDED]) - super.emit('data', data); - } - } - for (const p of this[PIPES]) { - p.end(); - } - const ret = super.emit('end'); - this.removeAllListeners('end'); - return ret; - } - /** - * Return a Promise that resolves to an array of all emitted data once - * the stream ends. - */ - async collect() { - const buf = Object.assign([], { - dataLength: 0, - }); - if (!this[OBJECTMODE]) - buf.dataLength = 0; - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise(); - this.on('data', c => { - buf.push(c); - if (!this[OBJECTMODE]) - buf.dataLength += c.length; - }); - await p; - return buf; - } - /** - * Return a Promise that resolves to the concatenation of all emitted data - * once the stream ends. - * - * Not allowed on objectMode streams. - */ - async concat() { - if (this[OBJECTMODE]) { - throw new Error('cannot concat in objectMode'); - } - const buf = await this.collect(); - return (this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength)); - } - /** - * Return a void Promise that resolves once the stream ends. - */ - async promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))); - this.on('error', er => reject(er)); - this.on('end', () => resolve()); - }); - } - /** - * Asynchronous `for await of` iteration. - * - * This will continue emitting all chunks until the stream terminates. - */ - [Symbol.asyncIterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = async () => { - this.pause(); - stopped = true; - return { value: undefined, done: true }; - }; - const next = () => { - if (stopped) - return stop(); - const res = this.read(); - if (res !== null) - return Promise.resolve({ done: false, value: res }); - if (this[EOF]) - return stop(); - let resolve; - let reject; - const onerr = (er) => { - this.off('data', ondata); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - stop(); - reject(er); - }; - const ondata = (value) => { - this.off('error', onerr); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - this.pause(); - resolve({ value, done: !!this[EOF] }); - }; - const onend = () => { - this.off('error', onerr); - this.off('data', ondata); - this.off(DESTROYED, ondestroy); - stop(); - resolve({ done: true, value: undefined }); - }; - const ondestroy = () => onerr(new Error('stream destroyed')); - return new Promise((res, rej) => { - reject = rej; - resolve = res; - this.once(DESTROYED, ondestroy); - this.once('error', onerr); - this.once('end', onend); - this.once('data', ondata); - }); - }; - return { - next, - throw: stop, - return: stop, - [Symbol.asyncIterator]() { - return this; - }, - }; - } - /** - * Synchronous `for of` iteration. - * - * The iteration will terminate when the internal buffer runs out, even - * if the stream has not yet terminated. - */ - [Symbol.iterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = () => { - this.pause(); - this.off(ERROR, stop); - this.off(DESTROYED, stop); - this.off('end', stop); - stopped = true; - return { done: true, value: undefined }; - }; - const next = () => { - if (stopped) - return stop(); - const value = this.read(); - return value === null ? stop() : { done: false, value }; - }; - this.once('end', stop); - this.once(ERROR, stop); - this.once(DESTROYED, stop); - return { - next, - throw: stop, - return: stop, - [Symbol.iterator]() { - return this; - }, - }; - } - /** - * Destroy a stream, preventing it from being used for any further purpose. - * - * If the stream has a `close()` method, then it will be called on - * destruction. - * - * After destruction, any attempt to write data, read data, or emit most - * events will be ignored. - * - * If an error argument is provided, then it will be emitted in an - * 'error' event. - */ - destroy(er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er); - else - this.emit(DESTROYED); - return this; - } - this[DESTROYED] = true; - this[DISCARDED] = true; - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0; - this[BUFFERLENGTH] = 0; - const wc = this; - if (typeof wc.close === 'function' && !this[CLOSED]) - wc.close(); - if (er) - this.emit('error', er); - // if no error to emit, still reject pending promises - else - this.emit(DESTROYED); - return this; - } - /** - * Alias for {@link isStream} - * - * Former export location, maintained for backwards compatibility. - * - * @deprecated - */ - static get isStream() { - return exports.isStream; - } -} -exports.Minipass = Minipass; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/npm-registry-fetch/node_modules/minipass/dist/cjs/package.json b/node_modules/npm-registry-fetch/node_modules/minipass/dist/cjs/package.json deleted file mode 100644 index 5bbefffbabee3..0000000000000 --- a/node_modules/npm-registry-fetch/node_modules/minipass/dist/cjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "commonjs" -} diff --git a/node_modules/npm-registry-fetch/node_modules/minipass/dist/mjs/index.js b/node_modules/npm-registry-fetch/node_modules/minipass/dist/mjs/index.js deleted file mode 100644 index b5fa4513c9083..0000000000000 --- a/node_modules/npm-registry-fetch/node_modules/minipass/dist/mjs/index.js +++ /dev/null @@ -1,1018 +0,0 @@ -const proc = typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - }; -import { EventEmitter } from 'node:events'; -import Stream from 'node:stream'; -import { StringDecoder } from 'node:string_decoder'; -/** - * Return true if the argument is a Minipass stream, Node stream, or something - * else that Minipass can interact with. - */ -export const isStream = (s) => !!s && - typeof s === 'object' && - (s instanceof Minipass || - s instanceof Stream || - isReadable(s) || - isWritable(s)); -/** - * Return true if the argument is a valid {@link Minipass.Readable} - */ -export const isReadable = (s) => !!s && - typeof s === 'object' && - s instanceof EventEmitter && - typeof s.pipe === 'function' && - // node core Writable streams have a pipe() method, but it throws - s.pipe !== Stream.Writable.prototype.pipe; -/** - * Return true if the argument is a valid {@link Minipass.Writable} - */ -export const isWritable = (s) => !!s && - typeof s === 'object' && - s instanceof EventEmitter && - typeof s.write === 'function' && - typeof s.end === 'function'; -const EOF = Symbol('EOF'); -const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); -const EMITTED_END = Symbol('emittedEnd'); -const EMITTING_END = Symbol('emittingEnd'); -const EMITTED_ERROR = Symbol('emittedError'); -const CLOSED = Symbol('closed'); -const READ = Symbol('read'); -const FLUSH = Symbol('flush'); -const FLUSHCHUNK = Symbol('flushChunk'); -const ENCODING = Symbol('encoding'); -const DECODER = Symbol('decoder'); -const FLOWING = Symbol('flowing'); -const PAUSED = Symbol('paused'); -const RESUME = Symbol('resume'); -const BUFFER = Symbol('buffer'); -const PIPES = Symbol('pipes'); -const BUFFERLENGTH = Symbol('bufferLength'); -const BUFFERPUSH = Symbol('bufferPush'); -const BUFFERSHIFT = Symbol('bufferShift'); -const OBJECTMODE = Symbol('objectMode'); -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed'); -// internal event when stream has an error -const ERROR = Symbol('error'); -const EMITDATA = Symbol('emitData'); -const EMITEND = Symbol('emitEnd'); -const EMITEND2 = Symbol('emitEnd2'); -const ASYNC = Symbol('async'); -const ABORT = Symbol('abort'); -const ABORTED = Symbol('aborted'); -const SIGNAL = Symbol('signal'); -const DATALISTENERS = Symbol('dataListeners'); -const DISCARDED = Symbol('discarded'); -const defer = (fn) => Promise.resolve().then(fn); -const nodefer = (fn) => fn(); -const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; -const isArrayBufferLike = (b) => b instanceof ArrayBuffer || - (!!b && - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0); -const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); -/** - * Internal class representing a pipe to a destination stream. - * - * @internal - */ -class Pipe { - src; - dest; - opts; - ondrain; - constructor(src, dest, opts) { - this.src = src; - this.dest = dest; - this.opts = opts; - this.ondrain = () => src[RESUME](); - this.dest.on('drain', this.ondrain); - } - unpipe() { - this.dest.removeListener('drain', this.ondrain); - } - // only here for the prototype - /* c8 ignore start */ - proxyErrors(_er) { } - /* c8 ignore stop */ - end() { - this.unpipe(); - if (this.opts.end) - this.dest.end(); - } -} -/** - * Internal class representing a pipe to a destination stream where - * errors are proxied. - * - * @internal - */ -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors); - super.unpipe(); - } - constructor(src, dest, opts) { - super(src, dest, opts); - this.proxyErrors = er => dest.emit('error', er); - src.on('error', this.proxyErrors); - } -} -const isObjectModeOptions = (o) => !!o.objectMode; -const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; -/** - * Main export, the Minipass class - * - * `RType` is the type of data emitted, defaults to Buffer - * - * `WType` is the type of data to be written, if RType is buffer or string, - * then any {@link Minipass.ContiguousData} is allowed. - * - * `Events` is the set of event handler signatures that this object - * will emit, see {@link Minipass.Events} - */ -export class Minipass extends EventEmitter { - [FLOWING] = false; - [PAUSED] = false; - [PIPES] = []; - [BUFFER] = []; - [OBJECTMODE]; - [ENCODING]; - [ASYNC]; - [DECODER]; - [EOF] = false; - [EMITTED_END] = false; - [EMITTING_END] = false; - [CLOSED] = false; - [EMITTED_ERROR] = null; - [BUFFERLENGTH] = 0; - [DESTROYED] = false; - [SIGNAL]; - [ABORTED] = false; - [DATALISTENERS] = 0; - [DISCARDED] = false; - /** - * true if the stream can be written - */ - writable = true; - /** - * true if the stream can be read - */ - readable = true; - /** - * If `RType` is Buffer, then options do not need to be provided. - * Otherwise, an options object must be provided to specify either - * {@link Minipass.SharedOptions.objectMode} or - * {@link Minipass.SharedOptions.encoding}, as appropriate. - */ - constructor(...args) { - const options = (args[0] || - {}); - super(); - if (options.objectMode && typeof options.encoding === 'string') { - throw new TypeError('Encoding and objectMode may not be used together'); - } - if (isObjectModeOptions(options)) { - this[OBJECTMODE] = true; - this[ENCODING] = null; - } - else if (isEncodingOptions(options)) { - this[ENCODING] = options.encoding; - this[OBJECTMODE] = false; - } - else { - this[OBJECTMODE] = false; - this[ENCODING] = null; - } - this[ASYNC] = !!options.async; - this[DECODER] = this[ENCODING] - ? new StringDecoder(this[ENCODING]) - : null; - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); - } - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); - } - const { signal } = options; - if (signal) { - this[SIGNAL] = signal; - if (signal.aborted) { - this[ABORT](); - } - else { - signal.addEventListener('abort', () => this[ABORT]()); - } - } - } - /** - * The amount of data stored in the buffer waiting to be read. - * - * For Buffer strings, this will be the total byte length. - * For string encoding streams, this will be the string character length, - * according to JavaScript's `string.length` logic. - * For objectMode streams, this is a count of the items waiting to be - * emitted. - */ - get bufferLength() { - return this[BUFFERLENGTH]; - } - /** - * The `BufferEncoding` currently in use, or `null` - */ - get encoding() { - return this[ENCODING]; - } - /** - * @deprecated - This is a read only property - */ - set encoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * @deprecated - Encoding may only be set at instantiation time - */ - setEncoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * True if this is an objectMode stream - */ - get objectMode() { - return this[OBJECTMODE]; - } - /** - * @deprecated - This is a read-only property - */ - set objectMode(_om) { - throw new Error('objectMode must be set at instantiation time'); - } - /** - * true if this is an async stream - */ - get ['async']() { - return this[ASYNC]; - } - /** - * Set to true to make this stream async. - * - * Once set, it cannot be unset, as this would potentially cause incorrect - * behavior. Ie, a sync stream can be made async, but an async stream - * cannot be safely made sync. - */ - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a; - } - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true; - this.emit('abort', this[SIGNAL]?.reason); - this.destroy(this[SIGNAL]?.reason); - } - /** - * True if the stream has been aborted. - */ - get aborted() { - return this[ABORTED]; - } - /** - * No-op setter. Stream aborted status is set via the AbortSignal provided - * in the constructor options. - */ - set aborted(_) { } - write(chunk, encoding, cb) { - if (this[ABORTED]) - return false; - if (this[EOF]) - throw new Error('write after end'); - if (this[DESTROYED]) { - this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); - return true; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (!encoding) - encoding = 'utf8'; - const fn = this[ASYNC] ? defer : nodefer; - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything is only allowed if in object mode, so throw - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); - } - else if (isArrayBufferLike(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk); - } - else if (typeof chunk !== 'string') { - throw new Error('Non-contiguous data written to non-objectMode stream'); - } - } - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - // maybe impossible? - /* c8 ignore start */ - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - /* c8 ignore stop */ - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { - //@ts-ignore - sinful unsafe type change - chunk = Buffer.from(chunk, encoding); - } - if (Buffer.isBuffer(chunk) && this[ENCODING]) { - //@ts-ignore - sinful unsafe type change - chunk = this[DECODER].write(chunk); - } - // Note: flushing CAN potentially switch us into not-flowing mode - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - /** - * Low-level explicit read method. - * - * In objectMode, the argument is ignored, and one item is returned if - * available. - * - * `n` is the number of bytes (or in the case of encoding streams, - * characters) to consume. If `n` is not provided, then the entire buffer - * is returned, or `null` is returned if no data is available. - * - * If `n` is greater that the amount of data in the internal buffer, - * then `null` is returned. - */ - read(n) { - if (this[DESTROYED]) - return null; - this[DISCARDED] = false; - if (this[BUFFERLENGTH] === 0 || - n === 0 || - (n && n > this[BUFFERLENGTH])) { - this[MAYBE_EMIT_END](); - return null; - } - if (this[OBJECTMODE]) - n = null; - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - // not object mode, so if we have an encoding, then RType is string - // otherwise, must be Buffer - this[BUFFER] = [ - (this[ENCODING] - ? this[BUFFER].join('') - : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), - ]; - } - const ret = this[READ](n || null, this[BUFFER][0]); - this[MAYBE_EMIT_END](); - return ret; - } - [READ](n, chunk) { - if (this[OBJECTMODE]) - this[BUFFERSHIFT](); - else { - const c = chunk; - if (n === c.length || n === null) - this[BUFFERSHIFT](); - else if (typeof c === 'string') { - this[BUFFER][0] = c.slice(n); - chunk = c.slice(0, n); - this[BUFFERLENGTH] -= n; - } - else { - this[BUFFER][0] = c.subarray(n); - chunk = c.subarray(0, n); - this[BUFFERLENGTH] -= n; - } - } - this.emit('data', chunk); - if (!this[BUFFER].length && !this[EOF]) - this.emit('drain'); - return chunk; - } - end(chunk, encoding, cb) { - if (typeof chunk === 'function') { - cb = chunk; - chunk = undefined; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (chunk !== undefined) - this.write(chunk, encoding); - if (cb) - this.once('end', cb); - this[EOF] = true; - this.writable = false; - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this[FLOWING] || !this[PAUSED]) - this[MAYBE_EMIT_END](); - return this; - } - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) - return; - if (!this[DATALISTENERS] && !this[PIPES].length) { - this[DISCARDED] = true; - } - this[PAUSED] = false; - this[FLOWING] = true; - this.emit('resume'); - if (this[BUFFER].length) - this[FLUSH](); - else if (this[EOF]) - this[MAYBE_EMIT_END](); - else - this.emit('drain'); - } - /** - * Resume the stream if it is currently in a paused state - * - * If called when there are no pipe destinations or `data` event listeners, - * this will place the stream in a "discarded" state, where all data will - * be thrown away. The discarded state is removed if a pipe destination or - * data handler is added, if pause() is called, or if any synchronous or - * asynchronous iteration is started. - */ - resume() { - return this[RESUME](); - } - /** - * Pause the stream - */ - pause() { - this[FLOWING] = false; - this[PAUSED] = true; - this[DISCARDED] = false; - } - /** - * true if the stream has been forcibly destroyed - */ - get destroyed() { - return this[DESTROYED]; - } - /** - * true if the stream is currently in a flowing state, meaning that - * any writes will be immediately emitted. - */ - get flowing() { - return this[FLOWING]; - } - /** - * true if the stream is currently in a paused state - */ - get paused() { - return this[PAUSED]; - } - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1; - else - this[BUFFERLENGTH] += chunk.length; - this[BUFFER].push(chunk); - } - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1; - else - this[BUFFERLENGTH] -= this[BUFFER][0].length; - return this[BUFFER].shift(); - } - [FLUSH](noDrain = false) { - do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && - this[BUFFER].length); - if (!noDrain && !this[BUFFER].length && !this[EOF]) - this.emit('drain'); - } - [FLUSHCHUNK](chunk) { - this.emit('data', chunk); - return this[FLOWING]; - } - /** - * Pipe all data emitted by this stream into the destination provided. - * - * Triggers the flow of data. - */ - pipe(dest, opts) { - if (this[DESTROYED]) - return dest; - this[DISCARDED] = false; - const ended = this[EMITTED_END]; - opts = opts || {}; - if (dest === proc.stdout || dest === proc.stderr) - opts.end = false; - else - opts.end = opts.end !== false; - opts.proxyErrors = !!opts.proxyErrors; - // piping an ended stream ends immediately - if (ended) { - if (opts.end) - dest.end(); - } - else { - // "as" here just ignores the WType, which pipes don't care about, - // since they're only consuming from us, and writing to the dest - this[PIPES].push(!opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts)); - if (this[ASYNC]) - defer(() => this[RESUME]()); - else - this[RESUME](); - } - return dest; - } - /** - * Fully unhook a piped destination stream. - * - * If the destination stream was the only consumer of this stream (ie, - * there are no other piped destinations or `'data'` event listeners) - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest); - if (p) { - if (this[PIPES].length === 1) { - if (this[FLOWING] && this[DATALISTENERS] === 0) { - this[FLOWING] = false; - } - this[PIPES] = []; - } - else - this[PIPES].splice(this[PIPES].indexOf(p), 1); - p.unpipe(); - } - } - /** - * Alias for {@link Minipass#on} - */ - addListener(ev, handler) { - return this.on(ev, handler); - } - /** - * Mostly identical to `EventEmitter.on`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * - Adding a 'data' event handler will trigger the flow of data - * - * - Adding a 'readable' event handler when there is data waiting to be read - * will cause 'readable' to be emitted immediately. - * - * - Adding an 'endish' event handler ('end', 'finish', etc.) which has - * already passed will cause the event to be emitted immediately and all - * handlers removed. - * - * - Adding an 'error' event handler after an error has been emitted will - * cause the event to be re-emitted immediately with the error previously - * raised. - */ - on(ev, handler) { - const ret = super.on(ev, handler); - if (ev === 'data') { - this[DISCARDED] = false; - this[DATALISTENERS]++; - if (!this[PIPES].length && !this[FLOWING]) { - this[RESUME](); - } - } - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { - super.emit('readable'); - } - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev); - this.removeAllListeners(ev); - } - else if (ev === 'error' && this[EMITTED_ERROR]) { - const h = handler; - if (this[ASYNC]) - defer(() => h.call(this, this[EMITTED_ERROR])); - else - h.call(this, this[EMITTED_ERROR]); - } - return ret; - } - /** - * Alias for {@link Minipass#off} - */ - removeListener(ev, handler) { - return this.off(ev, handler); - } - /** - * Mostly identical to `EventEmitter.off` - * - * If a 'data' event handler is removed, and it was the last consumer - * (ie, there are no pipe destinations or other 'data' event listeners), - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - off(ev, handler) { - const ret = super.off(ev, handler); - // if we previously had listeners, and now we don't, and we don't - // have any pipes, then stop the flow, unless it's been explicitly - // put in a discarded flowing state via stream.resume(). - if (ev === 'data') { - this[DATALISTENERS] = this.listeners('data').length; - if (this[DATALISTENERS] === 0 && - !this[DISCARDED] && - !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * Mostly identical to `EventEmitter.removeAllListeners` - * - * If all 'data' event handlers are removed, and they were the last consumer - * (ie, there are no pipe destinations), then the flow of data will stop - * until there is another consumer or {@link Minipass#resume} is explicitly - * called. - */ - removeAllListeners(ev) { - const ret = super.removeAllListeners(ev); - if (ev === 'data' || ev === undefined) { - this[DATALISTENERS] = 0; - if (!this[DISCARDED] && !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * true if the 'end' event has been emitted - */ - get emittedEnd() { - return this[EMITTED_END]; - } - [MAYBE_EMIT_END]() { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF]) { - this[EMITTING_END] = true; - this.emit('end'); - this.emit('prefinish'); - this.emit('finish'); - if (this[CLOSED]) - this.emit('close'); - this[EMITTING_END] = false; - } - } - /** - * Mostly identical to `EventEmitter.emit`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * If the stream has been destroyed, and the event is something other - * than 'close' or 'error', then `false` is returned and no handlers - * are called. - * - * If the event is 'end', and has already been emitted, then the event - * is ignored. If the stream is in a paused or non-flowing state, then - * the event will be deferred until data flow resumes. If the stream is - * async, then handlers will be called on the next tick rather than - * immediately. - * - * If the event is 'close', and 'end' has not yet been emitted, then - * the event will be deferred until after 'end' is emitted. - * - * If the event is 'error', and an AbortSignal was provided for the stream, - * and there are no listeners, then the event is ignored, matching the - * behavior of node core streams in the presense of an AbortSignal. - * - * If the event is 'finish' or 'prefinish', then all listeners will be - * removed after emitting the event, to prevent double-firing. - */ - emit(ev, ...args) { - const data = args[0]; - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && - ev !== 'close' && - ev !== DESTROYED && - this[DESTROYED]) { - return false; - } - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? (defer(() => this[EMITDATA](data)), true) - : this[EMITDATA](data); - } - else if (ev === 'end') { - return this[EMITEND](); - } - else if (ev === 'close') { - this[CLOSED] = true; - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return false; - const ret = super.emit('close'); - this.removeAllListeners('close'); - return ret; - } - else if (ev === 'error') { - this[EMITTED_ERROR] = data; - super.emit(ERROR, data); - const ret = !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false; - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'resume') { - const ret = super.emit('resume'); - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev); - this.removeAllListeners(ev); - return ret; - } - // Some other unknown event - const ret = super.emit(ev, ...args); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) - this.pause(); - } - const ret = this[DISCARDED] ? false : super.emit('data', data); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITEND]() { - if (this[EMITTED_END]) - return false; - this[EMITTED_END] = true; - this.readable = false; - return this[ASYNC] - ? (defer(() => this[EMITEND2]()), true) - : this[EMITEND2](); - } - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end(); - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data); - } - if (!this[DISCARDED]) - super.emit('data', data); - } - } - for (const p of this[PIPES]) { - p.end(); - } - const ret = super.emit('end'); - this.removeAllListeners('end'); - return ret; - } - /** - * Return a Promise that resolves to an array of all emitted data once - * the stream ends. - */ - async collect() { - const buf = Object.assign([], { - dataLength: 0, - }); - if (!this[OBJECTMODE]) - buf.dataLength = 0; - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise(); - this.on('data', c => { - buf.push(c); - if (!this[OBJECTMODE]) - buf.dataLength += c.length; - }); - await p; - return buf; - } - /** - * Return a Promise that resolves to the concatenation of all emitted data - * once the stream ends. - * - * Not allowed on objectMode streams. - */ - async concat() { - if (this[OBJECTMODE]) { - throw new Error('cannot concat in objectMode'); - } - const buf = await this.collect(); - return (this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength)); - } - /** - * Return a void Promise that resolves once the stream ends. - */ - async promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))); - this.on('error', er => reject(er)); - this.on('end', () => resolve()); - }); - } - /** - * Asynchronous `for await of` iteration. - * - * This will continue emitting all chunks until the stream terminates. - */ - [Symbol.asyncIterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = async () => { - this.pause(); - stopped = true; - return { value: undefined, done: true }; - }; - const next = () => { - if (stopped) - return stop(); - const res = this.read(); - if (res !== null) - return Promise.resolve({ done: false, value: res }); - if (this[EOF]) - return stop(); - let resolve; - let reject; - const onerr = (er) => { - this.off('data', ondata); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - stop(); - reject(er); - }; - const ondata = (value) => { - this.off('error', onerr); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - this.pause(); - resolve({ value, done: !!this[EOF] }); - }; - const onend = () => { - this.off('error', onerr); - this.off('data', ondata); - this.off(DESTROYED, ondestroy); - stop(); - resolve({ done: true, value: undefined }); - }; - const ondestroy = () => onerr(new Error('stream destroyed')); - return new Promise((res, rej) => { - reject = rej; - resolve = res; - this.once(DESTROYED, ondestroy); - this.once('error', onerr); - this.once('end', onend); - this.once('data', ondata); - }); - }; - return { - next, - throw: stop, - return: stop, - [Symbol.asyncIterator]() { - return this; - }, - }; - } - /** - * Synchronous `for of` iteration. - * - * The iteration will terminate when the internal buffer runs out, even - * if the stream has not yet terminated. - */ - [Symbol.iterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = () => { - this.pause(); - this.off(ERROR, stop); - this.off(DESTROYED, stop); - this.off('end', stop); - stopped = true; - return { done: true, value: undefined }; - }; - const next = () => { - if (stopped) - return stop(); - const value = this.read(); - return value === null ? stop() : { done: false, value }; - }; - this.once('end', stop); - this.once(ERROR, stop); - this.once(DESTROYED, stop); - return { - next, - throw: stop, - return: stop, - [Symbol.iterator]() { - return this; - }, - }; - } - /** - * Destroy a stream, preventing it from being used for any further purpose. - * - * If the stream has a `close()` method, then it will be called on - * destruction. - * - * After destruction, any attempt to write data, read data, or emit most - * events will be ignored. - * - * If an error argument is provided, then it will be emitted in an - * 'error' event. - */ - destroy(er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er); - else - this.emit(DESTROYED); - return this; - } - this[DESTROYED] = true; - this[DISCARDED] = true; - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0; - this[BUFFERLENGTH] = 0; - const wc = this; - if (typeof wc.close === 'function' && !this[CLOSED]) - wc.close(); - if (er) - this.emit('error', er); - // if no error to emit, still reject pending promises - else - this.emit(DESTROYED); - return this; - } - /** - * Alias for {@link isStream} - * - * Former export location, maintained for backwards compatibility. - * - * @deprecated - */ - static get isStream() { - return isStream; - } -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/npm-registry-fetch/node_modules/minipass/dist/mjs/package.json b/node_modules/npm-registry-fetch/node_modules/minipass/dist/mjs/package.json deleted file mode 100644 index 3dbc1ca591c05..0000000000000 --- a/node_modules/npm-registry-fetch/node_modules/minipass/dist/mjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "module" -} diff --git a/node_modules/npm-registry-fetch/node_modules/minipass/package.json b/node_modules/npm-registry-fetch/node_modules/minipass/package.json deleted file mode 100644 index 355501c0a10c1..0000000000000 --- a/node_modules/npm-registry-fetch/node_modules/minipass/package.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "name": "minipass", - "version": "7.0.2", - "description": "minimal implementation of a PassThrough stream", - "main": "./dist/cjs/index.js", - "module": "./dist/mjs/index.js", - "types": "./dist/cjs/index.js", - "exports": { - ".": { - "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.js" - }, - "require": { - "types": "./dist/cjs/index.d.ts", - "default": "./dist/cjs/index.js" - } - }, - "./package.json": "./package.json" - }, - "files": [ - "dist" - ], - "scripts": { - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "preprepare": "rm -rf dist", - "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh", - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "test": "c8 tap", - "snap": "c8 tap", - "format": "prettier --write . --loglevel warn", - "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" - }, - "tap": { - "coverage": false, - "node-arg": [ - "--enable-source-maps", - "--no-warnings", - "--loader", - "ts-node/esm" - ], - "ts": false - }, - "prettier": { - "semi": false, - "printWidth": 75, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - }, - "devDependencies": { - "@types/node": "^20.1.2", - "@types/tap": "^15.0.8", - "c8": "^7.13.0", - "prettier": "^2.6.2", - "tap": "^16.3.0", - "ts-node": "^10.9.1", - "typedoc": "^0.24.8", - "typescript": "^5.1.3", - "end-of-stream": "^1.4.0", - "node-abort-controller": "^3.1.1", - "sync-content": "^1.0.2", - "through2": "^2.0.3" - }, - "repository": "https://github.com/isaacs/minipass", - "keywords": [ - "passthrough", - "stream" - ], - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } -} diff --git a/node_modules/pacote/node_modules/minipass/dist/cjs/index.js b/node_modules/pacote/node_modules/minipass/dist/cjs/index.js deleted file mode 100644 index 068c095b69793..0000000000000 --- a/node_modules/pacote/node_modules/minipass/dist/cjs/index.js +++ /dev/null @@ -1,1028 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0; -const proc = typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - }; -const node_events_1 = require("node:events"); -const node_stream_1 = __importDefault(require("node:stream")); -const node_string_decoder_1 = require("node:string_decoder"); -/** - * Return true if the argument is a Minipass stream, Node stream, or something - * else that Minipass can interact with. - */ -const isStream = (s) => !!s && - typeof s === 'object' && - (s instanceof Minipass || - s instanceof node_stream_1.default || - (0, exports.isReadable)(s) || - (0, exports.isWritable)(s)); -exports.isStream = isStream; -/** - * Return true if the argument is a valid {@link Minipass.Readable} - */ -const isReadable = (s) => !!s && - typeof s === 'object' && - s instanceof node_events_1.EventEmitter && - typeof s.pipe === 'function' && - // node core Writable streams have a pipe() method, but it throws - s.pipe !== node_stream_1.default.Writable.prototype.pipe; -exports.isReadable = isReadable; -/** - * Return true if the argument is a valid {@link Minipass.Writable} - */ -const isWritable = (s) => !!s && - typeof s === 'object' && - s instanceof node_events_1.EventEmitter && - typeof s.write === 'function' && - typeof s.end === 'function'; -exports.isWritable = isWritable; -const EOF = Symbol('EOF'); -const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); -const EMITTED_END = Symbol('emittedEnd'); -const EMITTING_END = Symbol('emittingEnd'); -const EMITTED_ERROR = Symbol('emittedError'); -const CLOSED = Symbol('closed'); -const READ = Symbol('read'); -const FLUSH = Symbol('flush'); -const FLUSHCHUNK = Symbol('flushChunk'); -const ENCODING = Symbol('encoding'); -const DECODER = Symbol('decoder'); -const FLOWING = Symbol('flowing'); -const PAUSED = Symbol('paused'); -const RESUME = Symbol('resume'); -const BUFFER = Symbol('buffer'); -const PIPES = Symbol('pipes'); -const BUFFERLENGTH = Symbol('bufferLength'); -const BUFFERPUSH = Symbol('bufferPush'); -const BUFFERSHIFT = Symbol('bufferShift'); -const OBJECTMODE = Symbol('objectMode'); -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed'); -// internal event when stream has an error -const ERROR = Symbol('error'); -const EMITDATA = Symbol('emitData'); -const EMITEND = Symbol('emitEnd'); -const EMITEND2 = Symbol('emitEnd2'); -const ASYNC = Symbol('async'); -const ABORT = Symbol('abort'); -const ABORTED = Symbol('aborted'); -const SIGNAL = Symbol('signal'); -const DATALISTENERS = Symbol('dataListeners'); -const DISCARDED = Symbol('discarded'); -const defer = (fn) => Promise.resolve().then(fn); -const nodefer = (fn) => fn(); -const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; -const isArrayBufferLike = (b) => b instanceof ArrayBuffer || - (!!b && - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0); -const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); -/** - * Internal class representing a pipe to a destination stream. - * - * @internal - */ -class Pipe { - src; - dest; - opts; - ondrain; - constructor(src, dest, opts) { - this.src = src; - this.dest = dest; - this.opts = opts; - this.ondrain = () => src[RESUME](); - this.dest.on('drain', this.ondrain); - } - unpipe() { - this.dest.removeListener('drain', this.ondrain); - } - // only here for the prototype - /* c8 ignore start */ - proxyErrors(_er) { } - /* c8 ignore stop */ - end() { - this.unpipe(); - if (this.opts.end) - this.dest.end(); - } -} -/** - * Internal class representing a pipe to a destination stream where - * errors are proxied. - * - * @internal - */ -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors); - super.unpipe(); - } - constructor(src, dest, opts) { - super(src, dest, opts); - this.proxyErrors = er => dest.emit('error', er); - src.on('error', this.proxyErrors); - } -} -const isObjectModeOptions = (o) => !!o.objectMode; -const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; -/** - * Main export, the Minipass class - * - * `RType` is the type of data emitted, defaults to Buffer - * - * `WType` is the type of data to be written, if RType is buffer or string, - * then any {@link Minipass.ContiguousData} is allowed. - * - * `Events` is the set of event handler signatures that this object - * will emit, see {@link Minipass.Events} - */ -class Minipass extends node_events_1.EventEmitter { - [FLOWING] = false; - [PAUSED] = false; - [PIPES] = []; - [BUFFER] = []; - [OBJECTMODE]; - [ENCODING]; - [ASYNC]; - [DECODER]; - [EOF] = false; - [EMITTED_END] = false; - [EMITTING_END] = false; - [CLOSED] = false; - [EMITTED_ERROR] = null; - [BUFFERLENGTH] = 0; - [DESTROYED] = false; - [SIGNAL]; - [ABORTED] = false; - [DATALISTENERS] = 0; - [DISCARDED] = false; - /** - * true if the stream can be written - */ - writable = true; - /** - * true if the stream can be read - */ - readable = true; - /** - * If `RType` is Buffer, then options do not need to be provided. - * Otherwise, an options object must be provided to specify either - * {@link Minipass.SharedOptions.objectMode} or - * {@link Minipass.SharedOptions.encoding}, as appropriate. - */ - constructor(...args) { - const options = (args[0] || - {}); - super(); - if (options.objectMode && typeof options.encoding === 'string') { - throw new TypeError('Encoding and objectMode may not be used together'); - } - if (isObjectModeOptions(options)) { - this[OBJECTMODE] = true; - this[ENCODING] = null; - } - else if (isEncodingOptions(options)) { - this[ENCODING] = options.encoding; - this[OBJECTMODE] = false; - } - else { - this[OBJECTMODE] = false; - this[ENCODING] = null; - } - this[ASYNC] = !!options.async; - this[DECODER] = this[ENCODING] - ? new node_string_decoder_1.StringDecoder(this[ENCODING]) - : null; - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); - } - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); - } - const { signal } = options; - if (signal) { - this[SIGNAL] = signal; - if (signal.aborted) { - this[ABORT](); - } - else { - signal.addEventListener('abort', () => this[ABORT]()); - } - } - } - /** - * The amount of data stored in the buffer waiting to be read. - * - * For Buffer strings, this will be the total byte length. - * For string encoding streams, this will be the string character length, - * according to JavaScript's `string.length` logic. - * For objectMode streams, this is a count of the items waiting to be - * emitted. - */ - get bufferLength() { - return this[BUFFERLENGTH]; - } - /** - * The `BufferEncoding` currently in use, or `null` - */ - get encoding() { - return this[ENCODING]; - } - /** - * @deprecated - This is a read only property - */ - set encoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * @deprecated - Encoding may only be set at instantiation time - */ - setEncoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * True if this is an objectMode stream - */ - get objectMode() { - return this[OBJECTMODE]; - } - /** - * @deprecated - This is a read-only property - */ - set objectMode(_om) { - throw new Error('objectMode must be set at instantiation time'); - } - /** - * true if this is an async stream - */ - get ['async']() { - return this[ASYNC]; - } - /** - * Set to true to make this stream async. - * - * Once set, it cannot be unset, as this would potentially cause incorrect - * behavior. Ie, a sync stream can be made async, but an async stream - * cannot be safely made sync. - */ - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a; - } - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true; - this.emit('abort', this[SIGNAL]?.reason); - this.destroy(this[SIGNAL]?.reason); - } - /** - * True if the stream has been aborted. - */ - get aborted() { - return this[ABORTED]; - } - /** - * No-op setter. Stream aborted status is set via the AbortSignal provided - * in the constructor options. - */ - set aborted(_) { } - write(chunk, encoding, cb) { - if (this[ABORTED]) - return false; - if (this[EOF]) - throw new Error('write after end'); - if (this[DESTROYED]) { - this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); - return true; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (!encoding) - encoding = 'utf8'; - const fn = this[ASYNC] ? defer : nodefer; - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything is only allowed if in object mode, so throw - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); - } - else if (isArrayBufferLike(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk); - } - else if (typeof chunk !== 'string') { - throw new Error('Non-contiguous data written to non-objectMode stream'); - } - } - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - // maybe impossible? - /* c8 ignore start */ - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - /* c8 ignore stop */ - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { - //@ts-ignore - sinful unsafe type change - chunk = Buffer.from(chunk, encoding); - } - if (Buffer.isBuffer(chunk) && this[ENCODING]) { - //@ts-ignore - sinful unsafe type change - chunk = this[DECODER].write(chunk); - } - // Note: flushing CAN potentially switch us into not-flowing mode - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - /** - * Low-level explicit read method. - * - * In objectMode, the argument is ignored, and one item is returned if - * available. - * - * `n` is the number of bytes (or in the case of encoding streams, - * characters) to consume. If `n` is not provided, then the entire buffer - * is returned, or `null` is returned if no data is available. - * - * If `n` is greater that the amount of data in the internal buffer, - * then `null` is returned. - */ - read(n) { - if (this[DESTROYED]) - return null; - this[DISCARDED] = false; - if (this[BUFFERLENGTH] === 0 || - n === 0 || - (n && n > this[BUFFERLENGTH])) { - this[MAYBE_EMIT_END](); - return null; - } - if (this[OBJECTMODE]) - n = null; - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - // not object mode, so if we have an encoding, then RType is string - // otherwise, must be Buffer - this[BUFFER] = [ - (this[ENCODING] - ? this[BUFFER].join('') - : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), - ]; - } - const ret = this[READ](n || null, this[BUFFER][0]); - this[MAYBE_EMIT_END](); - return ret; - } - [READ](n, chunk) { - if (this[OBJECTMODE]) - this[BUFFERSHIFT](); - else { - const c = chunk; - if (n === c.length || n === null) - this[BUFFERSHIFT](); - else if (typeof c === 'string') { - this[BUFFER][0] = c.slice(n); - chunk = c.slice(0, n); - this[BUFFERLENGTH] -= n; - } - else { - this[BUFFER][0] = c.subarray(n); - chunk = c.subarray(0, n); - this[BUFFERLENGTH] -= n; - } - } - this.emit('data', chunk); - if (!this[BUFFER].length && !this[EOF]) - this.emit('drain'); - return chunk; - } - end(chunk, encoding, cb) { - if (typeof chunk === 'function') { - cb = chunk; - chunk = undefined; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (chunk !== undefined) - this.write(chunk, encoding); - if (cb) - this.once('end', cb); - this[EOF] = true; - this.writable = false; - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this[FLOWING] || !this[PAUSED]) - this[MAYBE_EMIT_END](); - return this; - } - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) - return; - if (!this[DATALISTENERS] && !this[PIPES].length) { - this[DISCARDED] = true; - } - this[PAUSED] = false; - this[FLOWING] = true; - this.emit('resume'); - if (this[BUFFER].length) - this[FLUSH](); - else if (this[EOF]) - this[MAYBE_EMIT_END](); - else - this.emit('drain'); - } - /** - * Resume the stream if it is currently in a paused state - * - * If called when there are no pipe destinations or `data` event listeners, - * this will place the stream in a "discarded" state, where all data will - * be thrown away. The discarded state is removed if a pipe destination or - * data handler is added, if pause() is called, or if any synchronous or - * asynchronous iteration is started. - */ - resume() { - return this[RESUME](); - } - /** - * Pause the stream - */ - pause() { - this[FLOWING] = false; - this[PAUSED] = true; - this[DISCARDED] = false; - } - /** - * true if the stream has been forcibly destroyed - */ - get destroyed() { - return this[DESTROYED]; - } - /** - * true if the stream is currently in a flowing state, meaning that - * any writes will be immediately emitted. - */ - get flowing() { - return this[FLOWING]; - } - /** - * true if the stream is currently in a paused state - */ - get paused() { - return this[PAUSED]; - } - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1; - else - this[BUFFERLENGTH] += chunk.length; - this[BUFFER].push(chunk); - } - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1; - else - this[BUFFERLENGTH] -= this[BUFFER][0].length; - return this[BUFFER].shift(); - } - [FLUSH](noDrain = false) { - do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && - this[BUFFER].length); - if (!noDrain && !this[BUFFER].length && !this[EOF]) - this.emit('drain'); - } - [FLUSHCHUNK](chunk) { - this.emit('data', chunk); - return this[FLOWING]; - } - /** - * Pipe all data emitted by this stream into the destination provided. - * - * Triggers the flow of data. - */ - pipe(dest, opts) { - if (this[DESTROYED]) - return dest; - this[DISCARDED] = false; - const ended = this[EMITTED_END]; - opts = opts || {}; - if (dest === proc.stdout || dest === proc.stderr) - opts.end = false; - else - opts.end = opts.end !== false; - opts.proxyErrors = !!opts.proxyErrors; - // piping an ended stream ends immediately - if (ended) { - if (opts.end) - dest.end(); - } - else { - // "as" here just ignores the WType, which pipes don't care about, - // since they're only consuming from us, and writing to the dest - this[PIPES].push(!opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts)); - if (this[ASYNC]) - defer(() => this[RESUME]()); - else - this[RESUME](); - } - return dest; - } - /** - * Fully unhook a piped destination stream. - * - * If the destination stream was the only consumer of this stream (ie, - * there are no other piped destinations or `'data'` event listeners) - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest); - if (p) { - if (this[PIPES].length === 1) { - if (this[FLOWING] && this[DATALISTENERS] === 0) { - this[FLOWING] = false; - } - this[PIPES] = []; - } - else - this[PIPES].splice(this[PIPES].indexOf(p), 1); - p.unpipe(); - } - } - /** - * Alias for {@link Minipass#on} - */ - addListener(ev, handler) { - return this.on(ev, handler); - } - /** - * Mostly identical to `EventEmitter.on`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * - Adding a 'data' event handler will trigger the flow of data - * - * - Adding a 'readable' event handler when there is data waiting to be read - * will cause 'readable' to be emitted immediately. - * - * - Adding an 'endish' event handler ('end', 'finish', etc.) which has - * already passed will cause the event to be emitted immediately and all - * handlers removed. - * - * - Adding an 'error' event handler after an error has been emitted will - * cause the event to be re-emitted immediately with the error previously - * raised. - */ - on(ev, handler) { - const ret = super.on(ev, handler); - if (ev === 'data') { - this[DISCARDED] = false; - this[DATALISTENERS]++; - if (!this[PIPES].length && !this[FLOWING]) { - this[RESUME](); - } - } - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { - super.emit('readable'); - } - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev); - this.removeAllListeners(ev); - } - else if (ev === 'error' && this[EMITTED_ERROR]) { - const h = handler; - if (this[ASYNC]) - defer(() => h.call(this, this[EMITTED_ERROR])); - else - h.call(this, this[EMITTED_ERROR]); - } - return ret; - } - /** - * Alias for {@link Minipass#off} - */ - removeListener(ev, handler) { - return this.off(ev, handler); - } - /** - * Mostly identical to `EventEmitter.off` - * - * If a 'data' event handler is removed, and it was the last consumer - * (ie, there are no pipe destinations or other 'data' event listeners), - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - off(ev, handler) { - const ret = super.off(ev, handler); - // if we previously had listeners, and now we don't, and we don't - // have any pipes, then stop the flow, unless it's been explicitly - // put in a discarded flowing state via stream.resume(). - if (ev === 'data') { - this[DATALISTENERS] = this.listeners('data').length; - if (this[DATALISTENERS] === 0 && - !this[DISCARDED] && - !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * Mostly identical to `EventEmitter.removeAllListeners` - * - * If all 'data' event handlers are removed, and they were the last consumer - * (ie, there are no pipe destinations), then the flow of data will stop - * until there is another consumer or {@link Minipass#resume} is explicitly - * called. - */ - removeAllListeners(ev) { - const ret = super.removeAllListeners(ev); - if (ev === 'data' || ev === undefined) { - this[DATALISTENERS] = 0; - if (!this[DISCARDED] && !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * true if the 'end' event has been emitted - */ - get emittedEnd() { - return this[EMITTED_END]; - } - [MAYBE_EMIT_END]() { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF]) { - this[EMITTING_END] = true; - this.emit('end'); - this.emit('prefinish'); - this.emit('finish'); - if (this[CLOSED]) - this.emit('close'); - this[EMITTING_END] = false; - } - } - /** - * Mostly identical to `EventEmitter.emit`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * If the stream has been destroyed, and the event is something other - * than 'close' or 'error', then `false` is returned and no handlers - * are called. - * - * If the event is 'end', and has already been emitted, then the event - * is ignored. If the stream is in a paused or non-flowing state, then - * the event will be deferred until data flow resumes. If the stream is - * async, then handlers will be called on the next tick rather than - * immediately. - * - * If the event is 'close', and 'end' has not yet been emitted, then - * the event will be deferred until after 'end' is emitted. - * - * If the event is 'error', and an AbortSignal was provided for the stream, - * and there are no listeners, then the event is ignored, matching the - * behavior of node core streams in the presense of an AbortSignal. - * - * If the event is 'finish' or 'prefinish', then all listeners will be - * removed after emitting the event, to prevent double-firing. - */ - emit(ev, ...args) { - const data = args[0]; - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && - ev !== 'close' && - ev !== DESTROYED && - this[DESTROYED]) { - return false; - } - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? (defer(() => this[EMITDATA](data)), true) - : this[EMITDATA](data); - } - else if (ev === 'end') { - return this[EMITEND](); - } - else if (ev === 'close') { - this[CLOSED] = true; - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return false; - const ret = super.emit('close'); - this.removeAllListeners('close'); - return ret; - } - else if (ev === 'error') { - this[EMITTED_ERROR] = data; - super.emit(ERROR, data); - const ret = !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false; - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'resume') { - const ret = super.emit('resume'); - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev); - this.removeAllListeners(ev); - return ret; - } - // Some other unknown event - const ret = super.emit(ev, ...args); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) - this.pause(); - } - const ret = this[DISCARDED] ? false : super.emit('data', data); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITEND]() { - if (this[EMITTED_END]) - return false; - this[EMITTED_END] = true; - this.readable = false; - return this[ASYNC] - ? (defer(() => this[EMITEND2]()), true) - : this[EMITEND2](); - } - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end(); - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data); - } - if (!this[DISCARDED]) - super.emit('data', data); - } - } - for (const p of this[PIPES]) { - p.end(); - } - const ret = super.emit('end'); - this.removeAllListeners('end'); - return ret; - } - /** - * Return a Promise that resolves to an array of all emitted data once - * the stream ends. - */ - async collect() { - const buf = Object.assign([], { - dataLength: 0, - }); - if (!this[OBJECTMODE]) - buf.dataLength = 0; - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise(); - this.on('data', c => { - buf.push(c); - if (!this[OBJECTMODE]) - buf.dataLength += c.length; - }); - await p; - return buf; - } - /** - * Return a Promise that resolves to the concatenation of all emitted data - * once the stream ends. - * - * Not allowed on objectMode streams. - */ - async concat() { - if (this[OBJECTMODE]) { - throw new Error('cannot concat in objectMode'); - } - const buf = await this.collect(); - return (this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength)); - } - /** - * Return a void Promise that resolves once the stream ends. - */ - async promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))); - this.on('error', er => reject(er)); - this.on('end', () => resolve()); - }); - } - /** - * Asynchronous `for await of` iteration. - * - * This will continue emitting all chunks until the stream terminates. - */ - [Symbol.asyncIterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = async () => { - this.pause(); - stopped = true; - return { value: undefined, done: true }; - }; - const next = () => { - if (stopped) - return stop(); - const res = this.read(); - if (res !== null) - return Promise.resolve({ done: false, value: res }); - if (this[EOF]) - return stop(); - let resolve; - let reject; - const onerr = (er) => { - this.off('data', ondata); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - stop(); - reject(er); - }; - const ondata = (value) => { - this.off('error', onerr); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - this.pause(); - resolve({ value, done: !!this[EOF] }); - }; - const onend = () => { - this.off('error', onerr); - this.off('data', ondata); - this.off(DESTROYED, ondestroy); - stop(); - resolve({ done: true, value: undefined }); - }; - const ondestroy = () => onerr(new Error('stream destroyed')); - return new Promise((res, rej) => { - reject = rej; - resolve = res; - this.once(DESTROYED, ondestroy); - this.once('error', onerr); - this.once('end', onend); - this.once('data', ondata); - }); - }; - return { - next, - throw: stop, - return: stop, - [Symbol.asyncIterator]() { - return this; - }, - }; - } - /** - * Synchronous `for of` iteration. - * - * The iteration will terminate when the internal buffer runs out, even - * if the stream has not yet terminated. - */ - [Symbol.iterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = () => { - this.pause(); - this.off(ERROR, stop); - this.off(DESTROYED, stop); - this.off('end', stop); - stopped = true; - return { done: true, value: undefined }; - }; - const next = () => { - if (stopped) - return stop(); - const value = this.read(); - return value === null ? stop() : { done: false, value }; - }; - this.once('end', stop); - this.once(ERROR, stop); - this.once(DESTROYED, stop); - return { - next, - throw: stop, - return: stop, - [Symbol.iterator]() { - return this; - }, - }; - } - /** - * Destroy a stream, preventing it from being used for any further purpose. - * - * If the stream has a `close()` method, then it will be called on - * destruction. - * - * After destruction, any attempt to write data, read data, or emit most - * events will be ignored. - * - * If an error argument is provided, then it will be emitted in an - * 'error' event. - */ - destroy(er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er); - else - this.emit(DESTROYED); - return this; - } - this[DESTROYED] = true; - this[DISCARDED] = true; - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0; - this[BUFFERLENGTH] = 0; - const wc = this; - if (typeof wc.close === 'function' && !this[CLOSED]) - wc.close(); - if (er) - this.emit('error', er); - // if no error to emit, still reject pending promises - else - this.emit(DESTROYED); - return this; - } - /** - * Alias for {@link isStream} - * - * Former export location, maintained for backwards compatibility. - * - * @deprecated - */ - static get isStream() { - return exports.isStream; - } -} -exports.Minipass = Minipass; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/pacote/node_modules/minipass/dist/cjs/package.json b/node_modules/pacote/node_modules/minipass/dist/cjs/package.json deleted file mode 100644 index 5bbefffbabee3..0000000000000 --- a/node_modules/pacote/node_modules/minipass/dist/cjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "commonjs" -} diff --git a/node_modules/pacote/node_modules/minipass/dist/mjs/index.js b/node_modules/pacote/node_modules/minipass/dist/mjs/index.js deleted file mode 100644 index b5fa4513c9083..0000000000000 --- a/node_modules/pacote/node_modules/minipass/dist/mjs/index.js +++ /dev/null @@ -1,1018 +0,0 @@ -const proc = typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - }; -import { EventEmitter } from 'node:events'; -import Stream from 'node:stream'; -import { StringDecoder } from 'node:string_decoder'; -/** - * Return true if the argument is a Minipass stream, Node stream, or something - * else that Minipass can interact with. - */ -export const isStream = (s) => !!s && - typeof s === 'object' && - (s instanceof Minipass || - s instanceof Stream || - isReadable(s) || - isWritable(s)); -/** - * Return true if the argument is a valid {@link Minipass.Readable} - */ -export const isReadable = (s) => !!s && - typeof s === 'object' && - s instanceof EventEmitter && - typeof s.pipe === 'function' && - // node core Writable streams have a pipe() method, but it throws - s.pipe !== Stream.Writable.prototype.pipe; -/** - * Return true if the argument is a valid {@link Minipass.Writable} - */ -export const isWritable = (s) => !!s && - typeof s === 'object' && - s instanceof EventEmitter && - typeof s.write === 'function' && - typeof s.end === 'function'; -const EOF = Symbol('EOF'); -const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); -const EMITTED_END = Symbol('emittedEnd'); -const EMITTING_END = Symbol('emittingEnd'); -const EMITTED_ERROR = Symbol('emittedError'); -const CLOSED = Symbol('closed'); -const READ = Symbol('read'); -const FLUSH = Symbol('flush'); -const FLUSHCHUNK = Symbol('flushChunk'); -const ENCODING = Symbol('encoding'); -const DECODER = Symbol('decoder'); -const FLOWING = Symbol('flowing'); -const PAUSED = Symbol('paused'); -const RESUME = Symbol('resume'); -const BUFFER = Symbol('buffer'); -const PIPES = Symbol('pipes'); -const BUFFERLENGTH = Symbol('bufferLength'); -const BUFFERPUSH = Symbol('bufferPush'); -const BUFFERSHIFT = Symbol('bufferShift'); -const OBJECTMODE = Symbol('objectMode'); -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed'); -// internal event when stream has an error -const ERROR = Symbol('error'); -const EMITDATA = Symbol('emitData'); -const EMITEND = Symbol('emitEnd'); -const EMITEND2 = Symbol('emitEnd2'); -const ASYNC = Symbol('async'); -const ABORT = Symbol('abort'); -const ABORTED = Symbol('aborted'); -const SIGNAL = Symbol('signal'); -const DATALISTENERS = Symbol('dataListeners'); -const DISCARDED = Symbol('discarded'); -const defer = (fn) => Promise.resolve().then(fn); -const nodefer = (fn) => fn(); -const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; -const isArrayBufferLike = (b) => b instanceof ArrayBuffer || - (!!b && - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0); -const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); -/** - * Internal class representing a pipe to a destination stream. - * - * @internal - */ -class Pipe { - src; - dest; - opts; - ondrain; - constructor(src, dest, opts) { - this.src = src; - this.dest = dest; - this.opts = opts; - this.ondrain = () => src[RESUME](); - this.dest.on('drain', this.ondrain); - } - unpipe() { - this.dest.removeListener('drain', this.ondrain); - } - // only here for the prototype - /* c8 ignore start */ - proxyErrors(_er) { } - /* c8 ignore stop */ - end() { - this.unpipe(); - if (this.opts.end) - this.dest.end(); - } -} -/** - * Internal class representing a pipe to a destination stream where - * errors are proxied. - * - * @internal - */ -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors); - super.unpipe(); - } - constructor(src, dest, opts) { - super(src, dest, opts); - this.proxyErrors = er => dest.emit('error', er); - src.on('error', this.proxyErrors); - } -} -const isObjectModeOptions = (o) => !!o.objectMode; -const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; -/** - * Main export, the Minipass class - * - * `RType` is the type of data emitted, defaults to Buffer - * - * `WType` is the type of data to be written, if RType is buffer or string, - * then any {@link Minipass.ContiguousData} is allowed. - * - * `Events` is the set of event handler signatures that this object - * will emit, see {@link Minipass.Events} - */ -export class Minipass extends EventEmitter { - [FLOWING] = false; - [PAUSED] = false; - [PIPES] = []; - [BUFFER] = []; - [OBJECTMODE]; - [ENCODING]; - [ASYNC]; - [DECODER]; - [EOF] = false; - [EMITTED_END] = false; - [EMITTING_END] = false; - [CLOSED] = false; - [EMITTED_ERROR] = null; - [BUFFERLENGTH] = 0; - [DESTROYED] = false; - [SIGNAL]; - [ABORTED] = false; - [DATALISTENERS] = 0; - [DISCARDED] = false; - /** - * true if the stream can be written - */ - writable = true; - /** - * true if the stream can be read - */ - readable = true; - /** - * If `RType` is Buffer, then options do not need to be provided. - * Otherwise, an options object must be provided to specify either - * {@link Minipass.SharedOptions.objectMode} or - * {@link Minipass.SharedOptions.encoding}, as appropriate. - */ - constructor(...args) { - const options = (args[0] || - {}); - super(); - if (options.objectMode && typeof options.encoding === 'string') { - throw new TypeError('Encoding and objectMode may not be used together'); - } - if (isObjectModeOptions(options)) { - this[OBJECTMODE] = true; - this[ENCODING] = null; - } - else if (isEncodingOptions(options)) { - this[ENCODING] = options.encoding; - this[OBJECTMODE] = false; - } - else { - this[OBJECTMODE] = false; - this[ENCODING] = null; - } - this[ASYNC] = !!options.async; - this[DECODER] = this[ENCODING] - ? new StringDecoder(this[ENCODING]) - : null; - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); - } - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); - } - const { signal } = options; - if (signal) { - this[SIGNAL] = signal; - if (signal.aborted) { - this[ABORT](); - } - else { - signal.addEventListener('abort', () => this[ABORT]()); - } - } - } - /** - * The amount of data stored in the buffer waiting to be read. - * - * For Buffer strings, this will be the total byte length. - * For string encoding streams, this will be the string character length, - * according to JavaScript's `string.length` logic. - * For objectMode streams, this is a count of the items waiting to be - * emitted. - */ - get bufferLength() { - return this[BUFFERLENGTH]; - } - /** - * The `BufferEncoding` currently in use, or `null` - */ - get encoding() { - return this[ENCODING]; - } - /** - * @deprecated - This is a read only property - */ - set encoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * @deprecated - Encoding may only be set at instantiation time - */ - setEncoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * True if this is an objectMode stream - */ - get objectMode() { - return this[OBJECTMODE]; - } - /** - * @deprecated - This is a read-only property - */ - set objectMode(_om) { - throw new Error('objectMode must be set at instantiation time'); - } - /** - * true if this is an async stream - */ - get ['async']() { - return this[ASYNC]; - } - /** - * Set to true to make this stream async. - * - * Once set, it cannot be unset, as this would potentially cause incorrect - * behavior. Ie, a sync stream can be made async, but an async stream - * cannot be safely made sync. - */ - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a; - } - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true; - this.emit('abort', this[SIGNAL]?.reason); - this.destroy(this[SIGNAL]?.reason); - } - /** - * True if the stream has been aborted. - */ - get aborted() { - return this[ABORTED]; - } - /** - * No-op setter. Stream aborted status is set via the AbortSignal provided - * in the constructor options. - */ - set aborted(_) { } - write(chunk, encoding, cb) { - if (this[ABORTED]) - return false; - if (this[EOF]) - throw new Error('write after end'); - if (this[DESTROYED]) { - this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); - return true; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (!encoding) - encoding = 'utf8'; - const fn = this[ASYNC] ? defer : nodefer; - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything is only allowed if in object mode, so throw - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); - } - else if (isArrayBufferLike(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk); - } - else if (typeof chunk !== 'string') { - throw new Error('Non-contiguous data written to non-objectMode stream'); - } - } - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - // maybe impossible? - /* c8 ignore start */ - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - /* c8 ignore stop */ - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { - //@ts-ignore - sinful unsafe type change - chunk = Buffer.from(chunk, encoding); - } - if (Buffer.isBuffer(chunk) && this[ENCODING]) { - //@ts-ignore - sinful unsafe type change - chunk = this[DECODER].write(chunk); - } - // Note: flushing CAN potentially switch us into not-flowing mode - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - /** - * Low-level explicit read method. - * - * In objectMode, the argument is ignored, and one item is returned if - * available. - * - * `n` is the number of bytes (or in the case of encoding streams, - * characters) to consume. If `n` is not provided, then the entire buffer - * is returned, or `null` is returned if no data is available. - * - * If `n` is greater that the amount of data in the internal buffer, - * then `null` is returned. - */ - read(n) { - if (this[DESTROYED]) - return null; - this[DISCARDED] = false; - if (this[BUFFERLENGTH] === 0 || - n === 0 || - (n && n > this[BUFFERLENGTH])) { - this[MAYBE_EMIT_END](); - return null; - } - if (this[OBJECTMODE]) - n = null; - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - // not object mode, so if we have an encoding, then RType is string - // otherwise, must be Buffer - this[BUFFER] = [ - (this[ENCODING] - ? this[BUFFER].join('') - : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), - ]; - } - const ret = this[READ](n || null, this[BUFFER][0]); - this[MAYBE_EMIT_END](); - return ret; - } - [READ](n, chunk) { - if (this[OBJECTMODE]) - this[BUFFERSHIFT](); - else { - const c = chunk; - if (n === c.length || n === null) - this[BUFFERSHIFT](); - else if (typeof c === 'string') { - this[BUFFER][0] = c.slice(n); - chunk = c.slice(0, n); - this[BUFFERLENGTH] -= n; - } - else { - this[BUFFER][0] = c.subarray(n); - chunk = c.subarray(0, n); - this[BUFFERLENGTH] -= n; - } - } - this.emit('data', chunk); - if (!this[BUFFER].length && !this[EOF]) - this.emit('drain'); - return chunk; - } - end(chunk, encoding, cb) { - if (typeof chunk === 'function') { - cb = chunk; - chunk = undefined; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (chunk !== undefined) - this.write(chunk, encoding); - if (cb) - this.once('end', cb); - this[EOF] = true; - this.writable = false; - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this[FLOWING] || !this[PAUSED]) - this[MAYBE_EMIT_END](); - return this; - } - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) - return; - if (!this[DATALISTENERS] && !this[PIPES].length) { - this[DISCARDED] = true; - } - this[PAUSED] = false; - this[FLOWING] = true; - this.emit('resume'); - if (this[BUFFER].length) - this[FLUSH](); - else if (this[EOF]) - this[MAYBE_EMIT_END](); - else - this.emit('drain'); - } - /** - * Resume the stream if it is currently in a paused state - * - * If called when there are no pipe destinations or `data` event listeners, - * this will place the stream in a "discarded" state, where all data will - * be thrown away. The discarded state is removed if a pipe destination or - * data handler is added, if pause() is called, or if any synchronous or - * asynchronous iteration is started. - */ - resume() { - return this[RESUME](); - } - /** - * Pause the stream - */ - pause() { - this[FLOWING] = false; - this[PAUSED] = true; - this[DISCARDED] = false; - } - /** - * true if the stream has been forcibly destroyed - */ - get destroyed() { - return this[DESTROYED]; - } - /** - * true if the stream is currently in a flowing state, meaning that - * any writes will be immediately emitted. - */ - get flowing() { - return this[FLOWING]; - } - /** - * true if the stream is currently in a paused state - */ - get paused() { - return this[PAUSED]; - } - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1; - else - this[BUFFERLENGTH] += chunk.length; - this[BUFFER].push(chunk); - } - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1; - else - this[BUFFERLENGTH] -= this[BUFFER][0].length; - return this[BUFFER].shift(); - } - [FLUSH](noDrain = false) { - do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && - this[BUFFER].length); - if (!noDrain && !this[BUFFER].length && !this[EOF]) - this.emit('drain'); - } - [FLUSHCHUNK](chunk) { - this.emit('data', chunk); - return this[FLOWING]; - } - /** - * Pipe all data emitted by this stream into the destination provided. - * - * Triggers the flow of data. - */ - pipe(dest, opts) { - if (this[DESTROYED]) - return dest; - this[DISCARDED] = false; - const ended = this[EMITTED_END]; - opts = opts || {}; - if (dest === proc.stdout || dest === proc.stderr) - opts.end = false; - else - opts.end = opts.end !== false; - opts.proxyErrors = !!opts.proxyErrors; - // piping an ended stream ends immediately - if (ended) { - if (opts.end) - dest.end(); - } - else { - // "as" here just ignores the WType, which pipes don't care about, - // since they're only consuming from us, and writing to the dest - this[PIPES].push(!opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts)); - if (this[ASYNC]) - defer(() => this[RESUME]()); - else - this[RESUME](); - } - return dest; - } - /** - * Fully unhook a piped destination stream. - * - * If the destination stream was the only consumer of this stream (ie, - * there are no other piped destinations or `'data'` event listeners) - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest); - if (p) { - if (this[PIPES].length === 1) { - if (this[FLOWING] && this[DATALISTENERS] === 0) { - this[FLOWING] = false; - } - this[PIPES] = []; - } - else - this[PIPES].splice(this[PIPES].indexOf(p), 1); - p.unpipe(); - } - } - /** - * Alias for {@link Minipass#on} - */ - addListener(ev, handler) { - return this.on(ev, handler); - } - /** - * Mostly identical to `EventEmitter.on`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * - Adding a 'data' event handler will trigger the flow of data - * - * - Adding a 'readable' event handler when there is data waiting to be read - * will cause 'readable' to be emitted immediately. - * - * - Adding an 'endish' event handler ('end', 'finish', etc.) which has - * already passed will cause the event to be emitted immediately and all - * handlers removed. - * - * - Adding an 'error' event handler after an error has been emitted will - * cause the event to be re-emitted immediately with the error previously - * raised. - */ - on(ev, handler) { - const ret = super.on(ev, handler); - if (ev === 'data') { - this[DISCARDED] = false; - this[DATALISTENERS]++; - if (!this[PIPES].length && !this[FLOWING]) { - this[RESUME](); - } - } - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { - super.emit('readable'); - } - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev); - this.removeAllListeners(ev); - } - else if (ev === 'error' && this[EMITTED_ERROR]) { - const h = handler; - if (this[ASYNC]) - defer(() => h.call(this, this[EMITTED_ERROR])); - else - h.call(this, this[EMITTED_ERROR]); - } - return ret; - } - /** - * Alias for {@link Minipass#off} - */ - removeListener(ev, handler) { - return this.off(ev, handler); - } - /** - * Mostly identical to `EventEmitter.off` - * - * If a 'data' event handler is removed, and it was the last consumer - * (ie, there are no pipe destinations or other 'data' event listeners), - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - off(ev, handler) { - const ret = super.off(ev, handler); - // if we previously had listeners, and now we don't, and we don't - // have any pipes, then stop the flow, unless it's been explicitly - // put in a discarded flowing state via stream.resume(). - if (ev === 'data') { - this[DATALISTENERS] = this.listeners('data').length; - if (this[DATALISTENERS] === 0 && - !this[DISCARDED] && - !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * Mostly identical to `EventEmitter.removeAllListeners` - * - * If all 'data' event handlers are removed, and they were the last consumer - * (ie, there are no pipe destinations), then the flow of data will stop - * until there is another consumer or {@link Minipass#resume} is explicitly - * called. - */ - removeAllListeners(ev) { - const ret = super.removeAllListeners(ev); - if (ev === 'data' || ev === undefined) { - this[DATALISTENERS] = 0; - if (!this[DISCARDED] && !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * true if the 'end' event has been emitted - */ - get emittedEnd() { - return this[EMITTED_END]; - } - [MAYBE_EMIT_END]() { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF]) { - this[EMITTING_END] = true; - this.emit('end'); - this.emit('prefinish'); - this.emit('finish'); - if (this[CLOSED]) - this.emit('close'); - this[EMITTING_END] = false; - } - } - /** - * Mostly identical to `EventEmitter.emit`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * If the stream has been destroyed, and the event is something other - * than 'close' or 'error', then `false` is returned and no handlers - * are called. - * - * If the event is 'end', and has already been emitted, then the event - * is ignored. If the stream is in a paused or non-flowing state, then - * the event will be deferred until data flow resumes. If the stream is - * async, then handlers will be called on the next tick rather than - * immediately. - * - * If the event is 'close', and 'end' has not yet been emitted, then - * the event will be deferred until after 'end' is emitted. - * - * If the event is 'error', and an AbortSignal was provided for the stream, - * and there are no listeners, then the event is ignored, matching the - * behavior of node core streams in the presense of an AbortSignal. - * - * If the event is 'finish' or 'prefinish', then all listeners will be - * removed after emitting the event, to prevent double-firing. - */ - emit(ev, ...args) { - const data = args[0]; - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && - ev !== 'close' && - ev !== DESTROYED && - this[DESTROYED]) { - return false; - } - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? (defer(() => this[EMITDATA](data)), true) - : this[EMITDATA](data); - } - else if (ev === 'end') { - return this[EMITEND](); - } - else if (ev === 'close') { - this[CLOSED] = true; - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return false; - const ret = super.emit('close'); - this.removeAllListeners('close'); - return ret; - } - else if (ev === 'error') { - this[EMITTED_ERROR] = data; - super.emit(ERROR, data); - const ret = !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false; - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'resume') { - const ret = super.emit('resume'); - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev); - this.removeAllListeners(ev); - return ret; - } - // Some other unknown event - const ret = super.emit(ev, ...args); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) - this.pause(); - } - const ret = this[DISCARDED] ? false : super.emit('data', data); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITEND]() { - if (this[EMITTED_END]) - return false; - this[EMITTED_END] = true; - this.readable = false; - return this[ASYNC] - ? (defer(() => this[EMITEND2]()), true) - : this[EMITEND2](); - } - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end(); - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data); - } - if (!this[DISCARDED]) - super.emit('data', data); - } - } - for (const p of this[PIPES]) { - p.end(); - } - const ret = super.emit('end'); - this.removeAllListeners('end'); - return ret; - } - /** - * Return a Promise that resolves to an array of all emitted data once - * the stream ends. - */ - async collect() { - const buf = Object.assign([], { - dataLength: 0, - }); - if (!this[OBJECTMODE]) - buf.dataLength = 0; - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise(); - this.on('data', c => { - buf.push(c); - if (!this[OBJECTMODE]) - buf.dataLength += c.length; - }); - await p; - return buf; - } - /** - * Return a Promise that resolves to the concatenation of all emitted data - * once the stream ends. - * - * Not allowed on objectMode streams. - */ - async concat() { - if (this[OBJECTMODE]) { - throw new Error('cannot concat in objectMode'); - } - const buf = await this.collect(); - return (this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength)); - } - /** - * Return a void Promise that resolves once the stream ends. - */ - async promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))); - this.on('error', er => reject(er)); - this.on('end', () => resolve()); - }); - } - /** - * Asynchronous `for await of` iteration. - * - * This will continue emitting all chunks until the stream terminates. - */ - [Symbol.asyncIterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = async () => { - this.pause(); - stopped = true; - return { value: undefined, done: true }; - }; - const next = () => { - if (stopped) - return stop(); - const res = this.read(); - if (res !== null) - return Promise.resolve({ done: false, value: res }); - if (this[EOF]) - return stop(); - let resolve; - let reject; - const onerr = (er) => { - this.off('data', ondata); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - stop(); - reject(er); - }; - const ondata = (value) => { - this.off('error', onerr); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - this.pause(); - resolve({ value, done: !!this[EOF] }); - }; - const onend = () => { - this.off('error', onerr); - this.off('data', ondata); - this.off(DESTROYED, ondestroy); - stop(); - resolve({ done: true, value: undefined }); - }; - const ondestroy = () => onerr(new Error('stream destroyed')); - return new Promise((res, rej) => { - reject = rej; - resolve = res; - this.once(DESTROYED, ondestroy); - this.once('error', onerr); - this.once('end', onend); - this.once('data', ondata); - }); - }; - return { - next, - throw: stop, - return: stop, - [Symbol.asyncIterator]() { - return this; - }, - }; - } - /** - * Synchronous `for of` iteration. - * - * The iteration will terminate when the internal buffer runs out, even - * if the stream has not yet terminated. - */ - [Symbol.iterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = () => { - this.pause(); - this.off(ERROR, stop); - this.off(DESTROYED, stop); - this.off('end', stop); - stopped = true; - return { done: true, value: undefined }; - }; - const next = () => { - if (stopped) - return stop(); - const value = this.read(); - return value === null ? stop() : { done: false, value }; - }; - this.once('end', stop); - this.once(ERROR, stop); - this.once(DESTROYED, stop); - return { - next, - throw: stop, - return: stop, - [Symbol.iterator]() { - return this; - }, - }; - } - /** - * Destroy a stream, preventing it from being used for any further purpose. - * - * If the stream has a `close()` method, then it will be called on - * destruction. - * - * After destruction, any attempt to write data, read data, or emit most - * events will be ignored. - * - * If an error argument is provided, then it will be emitted in an - * 'error' event. - */ - destroy(er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er); - else - this.emit(DESTROYED); - return this; - } - this[DESTROYED] = true; - this[DISCARDED] = true; - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0; - this[BUFFERLENGTH] = 0; - const wc = this; - if (typeof wc.close === 'function' && !this[CLOSED]) - wc.close(); - if (er) - this.emit('error', er); - // if no error to emit, still reject pending promises - else - this.emit(DESTROYED); - return this; - } - /** - * Alias for {@link isStream} - * - * Former export location, maintained for backwards compatibility. - * - * @deprecated - */ - static get isStream() { - return isStream; - } -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/pacote/node_modules/minipass/dist/mjs/package.json b/node_modules/pacote/node_modules/minipass/dist/mjs/package.json deleted file mode 100644 index 3dbc1ca591c05..0000000000000 --- a/node_modules/pacote/node_modules/minipass/dist/mjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "module" -} diff --git a/node_modules/pacote/node_modules/minipass/package.json b/node_modules/pacote/node_modules/minipass/package.json deleted file mode 100644 index 355501c0a10c1..0000000000000 --- a/node_modules/pacote/node_modules/minipass/package.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "name": "minipass", - "version": "7.0.2", - "description": "minimal implementation of a PassThrough stream", - "main": "./dist/cjs/index.js", - "module": "./dist/mjs/index.js", - "types": "./dist/cjs/index.js", - "exports": { - ".": { - "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.js" - }, - "require": { - "types": "./dist/cjs/index.d.ts", - "default": "./dist/cjs/index.js" - } - }, - "./package.json": "./package.json" - }, - "files": [ - "dist" - ], - "scripts": { - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "preprepare": "rm -rf dist", - "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh", - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "test": "c8 tap", - "snap": "c8 tap", - "format": "prettier --write . --loglevel warn", - "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" - }, - "tap": { - "coverage": false, - "node-arg": [ - "--enable-source-maps", - "--no-warnings", - "--loader", - "ts-node/esm" - ], - "ts": false - }, - "prettier": { - "semi": false, - "printWidth": 75, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - }, - "devDependencies": { - "@types/node": "^20.1.2", - "@types/tap": "^15.0.8", - "c8": "^7.13.0", - "prettier": "^2.6.2", - "tap": "^16.3.0", - "ts-node": "^10.9.1", - "typedoc": "^0.24.8", - "typescript": "^5.1.3", - "end-of-stream": "^1.4.0", - "node-abort-controller": "^3.1.1", - "sync-content": "^1.0.2", - "through2": "^2.0.3" - }, - "repository": "https://github.com/isaacs/minipass", - "keywords": [ - "passthrough", - "stream" - ], - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } -} diff --git a/node_modules/npm-registry-fetch/node_modules/minipass/LICENSE b/node_modules/sigstore/node_modules/minipass/LICENSE similarity index 100% rename from node_modules/npm-registry-fetch/node_modules/minipass/LICENSE rename to node_modules/sigstore/node_modules/minipass/LICENSE diff --git a/node_modules/sigstore/node_modules/minipass/index.js b/node_modules/sigstore/node_modules/minipass/index.js new file mode 100644 index 0000000000000..ed07c17acd97b --- /dev/null +++ b/node_modules/sigstore/node_modules/minipass/index.js @@ -0,0 +1,702 @@ +'use strict' +const proc = + typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + } +const EE = require('events') +const Stream = require('stream') +const stringdecoder = require('string_decoder') +const SD = stringdecoder.StringDecoder + +const EOF = Symbol('EOF') +const MAYBE_EMIT_END = Symbol('maybeEmitEnd') +const EMITTED_END = Symbol('emittedEnd') +const EMITTING_END = Symbol('emittingEnd') +const EMITTED_ERROR = Symbol('emittedError') +const CLOSED = Symbol('closed') +const READ = Symbol('read') +const FLUSH = Symbol('flush') +const FLUSHCHUNK = Symbol('flushChunk') +const ENCODING = Symbol('encoding') +const DECODER = Symbol('decoder') +const FLOWING = Symbol('flowing') +const PAUSED = Symbol('paused') +const RESUME = Symbol('resume') +const BUFFER = Symbol('buffer') +const PIPES = Symbol('pipes') +const BUFFERLENGTH = Symbol('bufferLength') +const BUFFERPUSH = Symbol('bufferPush') +const BUFFERSHIFT = Symbol('bufferShift') +const OBJECTMODE = Symbol('objectMode') +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed') +// internal event when stream has an error +const ERROR = Symbol('error') +const EMITDATA = Symbol('emitData') +const EMITEND = Symbol('emitEnd') +const EMITEND2 = Symbol('emitEnd2') +const ASYNC = Symbol('async') +const ABORT = Symbol('abort') +const ABORTED = Symbol('aborted') +const SIGNAL = Symbol('signal') + +const defer = fn => Promise.resolve().then(fn) + +// TODO remove when Node v8 support drops +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' +const ASYNCITERATOR = + (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') +const ITERATOR = + (doIter && Symbol.iterator) || Symbol('iterator not implemented') + +// events that mean 'the stream is over' +// these are treated specially, and re-emitted +// if they are listened for after emitting. +const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' + +const isArrayBuffer = b => + b instanceof ArrayBuffer || + (typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0) + +const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) + +class Pipe { + constructor(src, dest, opts) { + this.src = src + this.dest = dest + this.opts = opts + this.ondrain = () => src[RESUME]() + dest.on('drain', this.ondrain) + } + unpipe() { + this.dest.removeListener('drain', this.ondrain) + } + // istanbul ignore next - only here for the prototype + proxyErrors() {} + end() { + this.unpipe() + if (this.opts.end) this.dest.end() + } +} + +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors) + super.unpipe() + } + constructor(src, dest, opts) { + super(src, dest, opts) + this.proxyErrors = er => dest.emit('error', er) + src.on('error', this.proxyErrors) + } +} + +class Minipass extends Stream { + constructor(options) { + super() + this[FLOWING] = false + // whether we're explicitly paused + this[PAUSED] = false + this[PIPES] = [] + this[BUFFER] = [] + this[OBJECTMODE] = (options && options.objectMode) || false + if (this[OBJECTMODE]) this[ENCODING] = null + else this[ENCODING] = (options && options.encoding) || null + if (this[ENCODING] === 'buffer') this[ENCODING] = null + this[ASYNC] = (options && !!options.async) || false + this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null + this[EOF] = false + this[EMITTED_END] = false + this[EMITTING_END] = false + this[CLOSED] = false + this[EMITTED_ERROR] = null + this.writable = true + this.readable = true + this[BUFFERLENGTH] = 0 + this[DESTROYED] = false + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) + } + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) + } + this[SIGNAL] = options && options.signal + this[ABORTED] = false + if (this[SIGNAL]) { + this[SIGNAL].addEventListener('abort', () => this[ABORT]()) + if (this[SIGNAL].aborted) { + this[ABORT]() + } + } + } + + get bufferLength() { + return this[BUFFERLENGTH] + } + + get encoding() { + return this[ENCODING] + } + set encoding(enc) { + if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') + + if ( + this[ENCODING] && + enc !== this[ENCODING] && + ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) + ) + throw new Error('cannot change encoding') + + if (this[ENCODING] !== enc) { + this[DECODER] = enc ? new SD(enc) : null + if (this[BUFFER].length) + this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) + } + + this[ENCODING] = enc + } + + setEncoding(enc) { + this.encoding = enc + } + + get objectMode() { + return this[OBJECTMODE] + } + set objectMode(om) { + this[OBJECTMODE] = this[OBJECTMODE] || !!om + } + + get ['async']() { + return this[ASYNC] + } + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a + } + + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true + this.emit('abort', this[SIGNAL].reason) + this.destroy(this[SIGNAL].reason) + } + + get aborted() { + return this[ABORTED] + } + set aborted(_) {} + + write(chunk, encoding, cb) { + if (this[ABORTED]) return false + if (this[EOF]) throw new Error('write after end') + + if (this[DESTROYED]) { + this.emit( + 'error', + Object.assign( + new Error('Cannot call write after a stream was destroyed'), + { code: 'ERR_STREAM_DESTROYED' } + ) + ) + return true + } + + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + + if (!encoding) encoding = 'utf8' + + const fn = this[ASYNC] ? defer : f => f() + + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything else switches us into object mode + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) + else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) + else if (typeof chunk !== 'string') + // use the setter so we throw if we have encoding set + this.objectMode = true + } + + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + /* istanbul ignore if - maybe impossible? */ + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + if (cb) fn(cb) + return this.flowing + } + + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if ( + typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER].lastNeed) + ) { + chunk = Buffer.from(chunk, encoding) + } + + if (Buffer.isBuffer(chunk) && this[ENCODING]) + chunk = this[DECODER].write(chunk) + + // Note: flushing CAN potentially switch us into not-flowing mode + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + read(n) { + if (this[DESTROYED]) return null + + if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { + this[MAYBE_EMIT_END]() + return null + } + + if (this[OBJECTMODE]) n = null + + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] + else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] + } + + const ret = this[READ](n || null, this[BUFFER][0]) + this[MAYBE_EMIT_END]() + return ret + } + + [READ](n, chunk) { + if (n === chunk.length || n === null) this[BUFFERSHIFT]() + else { + this[BUFFER][0] = chunk.slice(n) + chunk = chunk.slice(0, n) + this[BUFFERLENGTH] -= n + } + + this.emit('data', chunk) + + if (!this[BUFFER].length && !this[EOF]) this.emit('drain') + + return chunk + } + + end(chunk, encoding, cb) { + if (typeof chunk === 'function') (cb = chunk), (chunk = null) + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + if (chunk) this.write(chunk, encoding) + if (cb) this.once('end', cb) + this[EOF] = true + this.writable = false + + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() + return this + } + + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) return + + this[PAUSED] = false + this[FLOWING] = true + this.emit('resume') + if (this[BUFFER].length) this[FLUSH]() + else if (this[EOF]) this[MAYBE_EMIT_END]() + else this.emit('drain') + } + + resume() { + return this[RESUME]() + } + + pause() { + this[FLOWING] = false + this[PAUSED] = true + } + + get destroyed() { + return this[DESTROYED] + } + + get flowing() { + return this[FLOWING] + } + + get paused() { + return this[PAUSED] + } + + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 + else this[BUFFERLENGTH] += chunk.length + this[BUFFER].push(chunk) + } + + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 + else this[BUFFERLENGTH] -= this[BUFFER][0].length + return this[BUFFER].shift() + } + + [FLUSH](noDrain) { + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) + + if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') + } + + [FLUSHCHUNK](chunk) { + this.emit('data', chunk) + return this.flowing + } + + pipe(dest, opts) { + if (this[DESTROYED]) return + + const ended = this[EMITTED_END] + opts = opts || {} + if (dest === proc.stdout || dest === proc.stderr) opts.end = false + else opts.end = opts.end !== false + opts.proxyErrors = !!opts.proxyErrors + + // piping an ended stream ends immediately + if (ended) { + if (opts.end) dest.end() + } else { + this[PIPES].push( + !opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts) + ) + if (this[ASYNC]) defer(() => this[RESUME]()) + else this[RESUME]() + } + + return dest + } + + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest) + if (p) { + this[PIPES].splice(this[PIPES].indexOf(p), 1) + p.unpipe() + } + } + + addListener(ev, fn) { + return this.on(ev, fn) + } + + on(ev, fn) { + const ret = super.on(ev, fn) + if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) + super.emit('readable') + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev) + this.removeAllListeners(ev) + } else if (ev === 'error' && this[EMITTED_ERROR]) { + if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) + else fn.call(this, this[EMITTED_ERROR]) + } + return ret + } + + get emittedEnd() { + return this[EMITTED_END] + } + + [MAYBE_EMIT_END]() { + if ( + !this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF] + ) { + this[EMITTING_END] = true + this.emit('end') + this.emit('prefinish') + this.emit('finish') + if (this[CLOSED]) this.emit('close') + this[EMITTING_END] = false + } + } + + emit(ev, data, ...extra) { + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) + return + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? defer(() => this[EMITDATA](data)) + : this[EMITDATA](data) + } else if (ev === 'end') { + return this[EMITEND]() + } else if (ev === 'close') { + this[CLOSED] = true + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) return + const ret = super.emit('close') + this.removeAllListeners('close') + return ret + } else if (ev === 'error') { + this[EMITTED_ERROR] = data + super.emit(ERROR, data) + const ret = + !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'resume') { + const ret = super.emit('resume') + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev) + this.removeAllListeners(ev) + return ret + } + + // Some other unknown event + const ret = super.emit(ev, data, ...extra) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) this.pause() + } + const ret = super.emit('data', data) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITEND]() { + if (this[EMITTED_END]) return + + this[EMITTED_END] = true + this.readable = false + if (this[ASYNC]) defer(() => this[EMITEND2]()) + else this[EMITEND2]() + } + + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end() + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data) + } + super.emit('data', data) + } + } + + for (const p of this[PIPES]) { + p.end() + } + const ret = super.emit('end') + this.removeAllListeners('end') + return ret + } + + // const all = await stream.collect() + collect() { + const buf = [] + if (!this[OBJECTMODE]) buf.dataLength = 0 + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise() + this.on('data', c => { + buf.push(c) + if (!this[OBJECTMODE]) buf.dataLength += c.length + }) + return p.then(() => buf) + } + + // const data = await stream.concat() + concat() { + return this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this.collect().then(buf => + this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength) + ) + } + + // stream.promise().then(() => done, er => emitted error) + promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))) + this.on('error', er => reject(er)) + this.on('end', () => resolve()) + }) + } + + // for await (let chunk of stream) + [ASYNCITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + stopped = true + return Promise.resolve({ done: true }) + } + const next = () => { + if (stopped) return stop() + const res = this.read() + if (res !== null) return Promise.resolve({ done: false, value: res }) + + if (this[EOF]) return stop() + + let resolve = null + let reject = null + const onerr = er => { + this.removeListener('data', ondata) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + stop() + reject(er) + } + const ondata = value => { + this.removeListener('error', onerr) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + this.pause() + resolve({ value: value, done: !!this[EOF] }) + } + const onend = () => { + this.removeListener('error', onerr) + this.removeListener('data', ondata) + this.removeListener(DESTROYED, ondestroy) + stop() + resolve({ done: true }) + } + const ondestroy = () => onerr(new Error('stream destroyed')) + return new Promise((res, rej) => { + reject = rej + resolve = res + this.once(DESTROYED, ondestroy) + this.once('error', onerr) + this.once('end', onend) + this.once('data', ondata) + }) + } + + return { + next, + throw: stop, + return: stop, + [ASYNCITERATOR]() { + return this + }, + } + } + + // for (let chunk of stream) + [ITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + this.removeListener(ERROR, stop) + this.removeListener(DESTROYED, stop) + this.removeListener('end', stop) + stopped = true + return { done: true } + } + + const next = () => { + if (stopped) return stop() + const value = this.read() + return value === null ? stop() : { value } + } + this.once('end', stop) + this.once(ERROR, stop) + this.once(DESTROYED, stop) + + return { + next, + throw: stop, + return: stop, + [ITERATOR]() { + return this + }, + } + } + + destroy(er) { + if (this[DESTROYED]) { + if (er) this.emit('error', er) + else this.emit(DESTROYED) + return this + } + + this[DESTROYED] = true + + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0 + this[BUFFERLENGTH] = 0 + + if (typeof this.close === 'function' && !this[CLOSED]) this.close() + + if (er) this.emit('error', er) + // if no error to emit, still reject pending promises + else this.emit(DESTROYED) + + return this + } + + static isStream(s) { + return ( + !!s && + (s instanceof Minipass || + s instanceof Stream || + (s instanceof EE && + // readable + (typeof s.pipe === 'function' || + // writable + (typeof s.write === 'function' && typeof s.end === 'function')))) + ) + } +} + +exports.Minipass = Minipass diff --git a/node_modules/sigstore/node_modules/minipass/index.mjs b/node_modules/sigstore/node_modules/minipass/index.mjs new file mode 100644 index 0000000000000..6ef6cd8cf0703 --- /dev/null +++ b/node_modules/sigstore/node_modules/minipass/index.mjs @@ -0,0 +1,702 @@ +'use strict' +const proc = + typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + } +import EE from 'events' +import Stream from 'stream' +import stringdecoder from 'string_decoder' +const SD = stringdecoder.StringDecoder + +const EOF = Symbol('EOF') +const MAYBE_EMIT_END = Symbol('maybeEmitEnd') +const EMITTED_END = Symbol('emittedEnd') +const EMITTING_END = Symbol('emittingEnd') +const EMITTED_ERROR = Symbol('emittedError') +const CLOSED = Symbol('closed') +const READ = Symbol('read') +const FLUSH = Symbol('flush') +const FLUSHCHUNK = Symbol('flushChunk') +const ENCODING = Symbol('encoding') +const DECODER = Symbol('decoder') +const FLOWING = Symbol('flowing') +const PAUSED = Symbol('paused') +const RESUME = Symbol('resume') +const BUFFER = Symbol('buffer') +const PIPES = Symbol('pipes') +const BUFFERLENGTH = Symbol('bufferLength') +const BUFFERPUSH = Symbol('bufferPush') +const BUFFERSHIFT = Symbol('bufferShift') +const OBJECTMODE = Symbol('objectMode') +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed') +// internal event when stream has an error +const ERROR = Symbol('error') +const EMITDATA = Symbol('emitData') +const EMITEND = Symbol('emitEnd') +const EMITEND2 = Symbol('emitEnd2') +const ASYNC = Symbol('async') +const ABORT = Symbol('abort') +const ABORTED = Symbol('aborted') +const SIGNAL = Symbol('signal') + +const defer = fn => Promise.resolve().then(fn) + +// TODO remove when Node v8 support drops +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' +const ASYNCITERATOR = + (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') +const ITERATOR = + (doIter && Symbol.iterator) || Symbol('iterator not implemented') + +// events that mean 'the stream is over' +// these are treated specially, and re-emitted +// if they are listened for after emitting. +const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' + +const isArrayBuffer = b => + b instanceof ArrayBuffer || + (typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0) + +const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) + +class Pipe { + constructor(src, dest, opts) { + this.src = src + this.dest = dest + this.opts = opts + this.ondrain = () => src[RESUME]() + dest.on('drain', this.ondrain) + } + unpipe() { + this.dest.removeListener('drain', this.ondrain) + } + // istanbul ignore next - only here for the prototype + proxyErrors() {} + end() { + this.unpipe() + if (this.opts.end) this.dest.end() + } +} + +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors) + super.unpipe() + } + constructor(src, dest, opts) { + super(src, dest, opts) + this.proxyErrors = er => dest.emit('error', er) + src.on('error', this.proxyErrors) + } +} + +export class Minipass extends Stream { + constructor(options) { + super() + this[FLOWING] = false + // whether we're explicitly paused + this[PAUSED] = false + this[PIPES] = [] + this[BUFFER] = [] + this[OBJECTMODE] = (options && options.objectMode) || false + if (this[OBJECTMODE]) this[ENCODING] = null + else this[ENCODING] = (options && options.encoding) || null + if (this[ENCODING] === 'buffer') this[ENCODING] = null + this[ASYNC] = (options && !!options.async) || false + this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null + this[EOF] = false + this[EMITTED_END] = false + this[EMITTING_END] = false + this[CLOSED] = false + this[EMITTED_ERROR] = null + this.writable = true + this.readable = true + this[BUFFERLENGTH] = 0 + this[DESTROYED] = false + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) + } + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) + } + this[SIGNAL] = options && options.signal + this[ABORTED] = false + if (this[SIGNAL]) { + this[SIGNAL].addEventListener('abort', () => this[ABORT]()) + if (this[SIGNAL].aborted) { + this[ABORT]() + } + } + } + + get bufferLength() { + return this[BUFFERLENGTH] + } + + get encoding() { + return this[ENCODING] + } + set encoding(enc) { + if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') + + if ( + this[ENCODING] && + enc !== this[ENCODING] && + ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) + ) + throw new Error('cannot change encoding') + + if (this[ENCODING] !== enc) { + this[DECODER] = enc ? new SD(enc) : null + if (this[BUFFER].length) + this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) + } + + this[ENCODING] = enc + } + + setEncoding(enc) { + this.encoding = enc + } + + get objectMode() { + return this[OBJECTMODE] + } + set objectMode(om) { + this[OBJECTMODE] = this[OBJECTMODE] || !!om + } + + get ['async']() { + return this[ASYNC] + } + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a + } + + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true + this.emit('abort', this[SIGNAL].reason) + this.destroy(this[SIGNAL].reason) + } + + get aborted() { + return this[ABORTED] + } + set aborted(_) {} + + write(chunk, encoding, cb) { + if (this[ABORTED]) return false + if (this[EOF]) throw new Error('write after end') + + if (this[DESTROYED]) { + this.emit( + 'error', + Object.assign( + new Error('Cannot call write after a stream was destroyed'), + { code: 'ERR_STREAM_DESTROYED' } + ) + ) + return true + } + + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + + if (!encoding) encoding = 'utf8' + + const fn = this[ASYNC] ? defer : f => f() + + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything else switches us into object mode + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) + else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) + else if (typeof chunk !== 'string') + // use the setter so we throw if we have encoding set + this.objectMode = true + } + + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + /* istanbul ignore if - maybe impossible? */ + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + if (cb) fn(cb) + return this.flowing + } + + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if ( + typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER].lastNeed) + ) { + chunk = Buffer.from(chunk, encoding) + } + + if (Buffer.isBuffer(chunk) && this[ENCODING]) + chunk = this[DECODER].write(chunk) + + // Note: flushing CAN potentially switch us into not-flowing mode + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + read(n) { + if (this[DESTROYED]) return null + + if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { + this[MAYBE_EMIT_END]() + return null + } + + if (this[OBJECTMODE]) n = null + + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] + else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] + } + + const ret = this[READ](n || null, this[BUFFER][0]) + this[MAYBE_EMIT_END]() + return ret + } + + [READ](n, chunk) { + if (n === chunk.length || n === null) this[BUFFERSHIFT]() + else { + this[BUFFER][0] = chunk.slice(n) + chunk = chunk.slice(0, n) + this[BUFFERLENGTH] -= n + } + + this.emit('data', chunk) + + if (!this[BUFFER].length && !this[EOF]) this.emit('drain') + + return chunk + } + + end(chunk, encoding, cb) { + if (typeof chunk === 'function') (cb = chunk), (chunk = null) + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + if (chunk) this.write(chunk, encoding) + if (cb) this.once('end', cb) + this[EOF] = true + this.writable = false + + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() + return this + } + + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) return + + this[PAUSED] = false + this[FLOWING] = true + this.emit('resume') + if (this[BUFFER].length) this[FLUSH]() + else if (this[EOF]) this[MAYBE_EMIT_END]() + else this.emit('drain') + } + + resume() { + return this[RESUME]() + } + + pause() { + this[FLOWING] = false + this[PAUSED] = true + } + + get destroyed() { + return this[DESTROYED] + } + + get flowing() { + return this[FLOWING] + } + + get paused() { + return this[PAUSED] + } + + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 + else this[BUFFERLENGTH] += chunk.length + this[BUFFER].push(chunk) + } + + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 + else this[BUFFERLENGTH] -= this[BUFFER][0].length + return this[BUFFER].shift() + } + + [FLUSH](noDrain) { + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) + + if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') + } + + [FLUSHCHUNK](chunk) { + this.emit('data', chunk) + return this.flowing + } + + pipe(dest, opts) { + if (this[DESTROYED]) return + + const ended = this[EMITTED_END] + opts = opts || {} + if (dest === proc.stdout || dest === proc.stderr) opts.end = false + else opts.end = opts.end !== false + opts.proxyErrors = !!opts.proxyErrors + + // piping an ended stream ends immediately + if (ended) { + if (opts.end) dest.end() + } else { + this[PIPES].push( + !opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts) + ) + if (this[ASYNC]) defer(() => this[RESUME]()) + else this[RESUME]() + } + + return dest + } + + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest) + if (p) { + this[PIPES].splice(this[PIPES].indexOf(p), 1) + p.unpipe() + } + } + + addListener(ev, fn) { + return this.on(ev, fn) + } + + on(ev, fn) { + const ret = super.on(ev, fn) + if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) + super.emit('readable') + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev) + this.removeAllListeners(ev) + } else if (ev === 'error' && this[EMITTED_ERROR]) { + if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) + else fn.call(this, this[EMITTED_ERROR]) + } + return ret + } + + get emittedEnd() { + return this[EMITTED_END] + } + + [MAYBE_EMIT_END]() { + if ( + !this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF] + ) { + this[EMITTING_END] = true + this.emit('end') + this.emit('prefinish') + this.emit('finish') + if (this[CLOSED]) this.emit('close') + this[EMITTING_END] = false + } + } + + emit(ev, data, ...extra) { + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) + return + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? defer(() => this[EMITDATA](data)) + : this[EMITDATA](data) + } else if (ev === 'end') { + return this[EMITEND]() + } else if (ev === 'close') { + this[CLOSED] = true + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) return + const ret = super.emit('close') + this.removeAllListeners('close') + return ret + } else if (ev === 'error') { + this[EMITTED_ERROR] = data + super.emit(ERROR, data) + const ret = + !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'resume') { + const ret = super.emit('resume') + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev) + this.removeAllListeners(ev) + return ret + } + + // Some other unknown event + const ret = super.emit(ev, data, ...extra) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) this.pause() + } + const ret = super.emit('data', data) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITEND]() { + if (this[EMITTED_END]) return + + this[EMITTED_END] = true + this.readable = false + if (this[ASYNC]) defer(() => this[EMITEND2]()) + else this[EMITEND2]() + } + + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end() + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data) + } + super.emit('data', data) + } + } + + for (const p of this[PIPES]) { + p.end() + } + const ret = super.emit('end') + this.removeAllListeners('end') + return ret + } + + // const all = await stream.collect() + collect() { + const buf = [] + if (!this[OBJECTMODE]) buf.dataLength = 0 + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise() + this.on('data', c => { + buf.push(c) + if (!this[OBJECTMODE]) buf.dataLength += c.length + }) + return p.then(() => buf) + } + + // const data = await stream.concat() + concat() { + return this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this.collect().then(buf => + this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength) + ) + } + + // stream.promise().then(() => done, er => emitted error) + promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))) + this.on('error', er => reject(er)) + this.on('end', () => resolve()) + }) + } + + // for await (let chunk of stream) + [ASYNCITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + stopped = true + return Promise.resolve({ done: true }) + } + const next = () => { + if (stopped) return stop() + const res = this.read() + if (res !== null) return Promise.resolve({ done: false, value: res }) + + if (this[EOF]) return stop() + + let resolve = null + let reject = null + const onerr = er => { + this.removeListener('data', ondata) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + stop() + reject(er) + } + const ondata = value => { + this.removeListener('error', onerr) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + this.pause() + resolve({ value: value, done: !!this[EOF] }) + } + const onend = () => { + this.removeListener('error', onerr) + this.removeListener('data', ondata) + this.removeListener(DESTROYED, ondestroy) + stop() + resolve({ done: true }) + } + const ondestroy = () => onerr(new Error('stream destroyed')) + return new Promise((res, rej) => { + reject = rej + resolve = res + this.once(DESTROYED, ondestroy) + this.once('error', onerr) + this.once('end', onend) + this.once('data', ondata) + }) + } + + return { + next, + throw: stop, + return: stop, + [ASYNCITERATOR]() { + return this + }, + } + } + + // for (let chunk of stream) + [ITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + this.removeListener(ERROR, stop) + this.removeListener(DESTROYED, stop) + this.removeListener('end', stop) + stopped = true + return { done: true } + } + + const next = () => { + if (stopped) return stop() + const value = this.read() + return value === null ? stop() : { value } + } + this.once('end', stop) + this.once(ERROR, stop) + this.once(DESTROYED, stop) + + return { + next, + throw: stop, + return: stop, + [ITERATOR]() { + return this + }, + } + } + + destroy(er) { + if (this[DESTROYED]) { + if (er) this.emit('error', er) + else this.emit(DESTROYED) + return this + } + + this[DESTROYED] = true + + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0 + this[BUFFERLENGTH] = 0 + + if (typeof this.close === 'function' && !this[CLOSED]) this.close() + + if (er) this.emit('error', er) + // if no error to emit, still reject pending promises + else this.emit(DESTROYED) + + return this + } + + static isStream(s) { + return ( + !!s && + (s instanceof Minipass || + s instanceof Stream || + (s instanceof EE && + // readable + (typeof s.pipe === 'function' || + // writable + (typeof s.write === 'function' && typeof s.end === 'function')))) + ) + } +} + + diff --git a/node_modules/sigstore/node_modules/minipass/package.json b/node_modules/sigstore/node_modules/minipass/package.json new file mode 100644 index 0000000000000..0e20e988047f2 --- /dev/null +++ b/node_modules/sigstore/node_modules/minipass/package.json @@ -0,0 +1,76 @@ +{ + "name": "minipass", + "version": "5.0.0", + "description": "minimal implementation of a PassThrough stream", + "main": "./index.js", + "module": "./index.mjs", + "types": "./index.d.ts", + "exports": { + ".": { + "import": { + "types": "./index.d.ts", + "default": "./index.mjs" + }, + "require": { + "types": "./index.d.ts", + "default": "./index.js" + } + }, + "./package.json": "./package.json" + }, + "devDependencies": { + "@types/node": "^17.0.41", + "end-of-stream": "^1.4.0", + "node-abort-controller": "^3.1.1", + "prettier": "^2.6.2", + "tap": "^16.2.0", + "through2": "^2.0.3", + "ts-node": "^10.8.1", + "typedoc": "^0.23.24", + "typescript": "^4.7.3" + }, + "scripts": { + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "prepare": "node ./scripts/transpile-to-esm.js", + "snap": "tap", + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags", + "typedoc": "typedoc ./index.d.ts", + "format": "prettier --write . --loglevel warn" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minipass.git" + }, + "keywords": [ + "passthrough", + "stream" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "files": [ + "index.d.ts", + "index.js", + "index.mjs" + ], + "tap": { + "check-coverage": true + }, + "engines": { + "node": ">=8" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/node_modules/ssri/node_modules/minipass/dist/cjs/index.js b/node_modules/ssri/node_modules/minipass/dist/cjs/index.js deleted file mode 100644 index b6cdae8eb514b..0000000000000 --- a/node_modules/ssri/node_modules/minipass/dist/cjs/index.js +++ /dev/null @@ -1,1028 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0; -const proc = typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - }; -const events_1 = require("events"); -const stream_1 = __importDefault(require("stream")); -const string_decoder_1 = require("string_decoder"); -/** - * Return true if the argument is a Minipass stream, Node stream, or something - * else that Minipass can interact with. - */ -const isStream = (s) => !!s && - typeof s === 'object' && - (s instanceof Minipass || - s instanceof stream_1.default || - (0, exports.isReadable)(s) || - (0, exports.isWritable)(s)); -exports.isStream = isStream; -/** - * Return true if the argument is a valid {@link Minipass.Readable} - */ -const isReadable = (s) => !!s && - typeof s === 'object' && - s instanceof events_1.EventEmitter && - typeof s.pipe === 'function' && - // node core Writable streams have a pipe() method, but it throws - s.pipe !== stream_1.default.Writable.prototype.pipe; -exports.isReadable = isReadable; -/** - * Return true if the argument is a valid {@link Minipass.Writable} - */ -const isWritable = (s) => !!s && - typeof s === 'object' && - s instanceof events_1.EventEmitter && - typeof s.write === 'function' && - typeof s.end === 'function'; -exports.isWritable = isWritable; -const EOF = Symbol('EOF'); -const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); -const EMITTED_END = Symbol('emittedEnd'); -const EMITTING_END = Symbol('emittingEnd'); -const EMITTED_ERROR = Symbol('emittedError'); -const CLOSED = Symbol('closed'); -const READ = Symbol('read'); -const FLUSH = Symbol('flush'); -const FLUSHCHUNK = Symbol('flushChunk'); -const ENCODING = Symbol('encoding'); -const DECODER = Symbol('decoder'); -const FLOWING = Symbol('flowing'); -const PAUSED = Symbol('paused'); -const RESUME = Symbol('resume'); -const BUFFER = Symbol('buffer'); -const PIPES = Symbol('pipes'); -const BUFFERLENGTH = Symbol('bufferLength'); -const BUFFERPUSH = Symbol('bufferPush'); -const BUFFERSHIFT = Symbol('bufferShift'); -const OBJECTMODE = Symbol('objectMode'); -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed'); -// internal event when stream has an error -const ERROR = Symbol('error'); -const EMITDATA = Symbol('emitData'); -const EMITEND = Symbol('emitEnd'); -const EMITEND2 = Symbol('emitEnd2'); -const ASYNC = Symbol('async'); -const ABORT = Symbol('abort'); -const ABORTED = Symbol('aborted'); -const SIGNAL = Symbol('signal'); -const DATALISTENERS = Symbol('dataListeners'); -const DISCARDED = Symbol('discarded'); -const defer = (fn) => Promise.resolve().then(fn); -const nodefer = (fn) => fn(); -const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; -const isArrayBufferLike = (b) => b instanceof ArrayBuffer || - (!!b && - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0); -const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); -/** - * Internal class representing a pipe to a destination stream. - * - * @internal - */ -class Pipe { - src; - dest; - opts; - ondrain; - constructor(src, dest, opts) { - this.src = src; - this.dest = dest; - this.opts = opts; - this.ondrain = () => src[RESUME](); - this.dest.on('drain', this.ondrain); - } - unpipe() { - this.dest.removeListener('drain', this.ondrain); - } - // only here for the prototype - /* c8 ignore start */ - proxyErrors(_er) { } - /* c8 ignore stop */ - end() { - this.unpipe(); - if (this.opts.end) - this.dest.end(); - } -} -/** - * Internal class representing a pipe to a destination stream where - * errors are proxied. - * - * @internal - */ -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors); - super.unpipe(); - } - constructor(src, dest, opts) { - super(src, dest, opts); - this.proxyErrors = er => dest.emit('error', er); - src.on('error', this.proxyErrors); - } -} -const isObjectModeOptions = (o) => !!o.objectMode; -const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; -/** - * Main export, the Minipass class - * - * `RType` is the type of data emitted, defaults to Buffer - * - * `WType` is the type of data to be written, if RType is buffer or string, - * then any {@link Minipass.ContiguousData} is allowed. - * - * `Events` is the set of event handler signatures that this object - * will emit, see {@link Minipass.Events} - */ -class Minipass extends events_1.EventEmitter { - [FLOWING] = false; - [PAUSED] = false; - [PIPES] = []; - [BUFFER] = []; - [OBJECTMODE]; - [ENCODING]; - [ASYNC]; - [DECODER]; - [EOF] = false; - [EMITTED_END] = false; - [EMITTING_END] = false; - [CLOSED] = false; - [EMITTED_ERROR] = null; - [BUFFERLENGTH] = 0; - [DESTROYED] = false; - [SIGNAL]; - [ABORTED] = false; - [DATALISTENERS] = 0; - [DISCARDED] = false; - /** - * true if the stream can be written - */ - writable = true; - /** - * true if the stream can be read - */ - readable = true; - /** - * If `RType` is Buffer, then options do not need to be provided. - * Otherwise, an options object must be provided to specify either - * {@link Minipass.SharedOptions.objectMode} or - * {@link Minipass.SharedOptions.encoding}, as appropriate. - */ - constructor(...args) { - const options = (args[0] || - {}); - super(); - if (options.objectMode && typeof options.encoding === 'string') { - throw new TypeError('Encoding and objectMode may not be used together'); - } - if (isObjectModeOptions(options)) { - this[OBJECTMODE] = true; - this[ENCODING] = null; - } - else if (isEncodingOptions(options)) { - this[ENCODING] = options.encoding; - this[OBJECTMODE] = false; - } - else { - this[OBJECTMODE] = false; - this[ENCODING] = null; - } - this[ASYNC] = !!options.async; - this[DECODER] = this[ENCODING] - ? new string_decoder_1.StringDecoder(this[ENCODING]) - : null; - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); - } - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); - } - const { signal } = options; - if (signal) { - this[SIGNAL] = signal; - if (signal.aborted) { - this[ABORT](); - } - else { - signal.addEventListener('abort', () => this[ABORT]()); - } - } - } - /** - * The amount of data stored in the buffer waiting to be read. - * - * For Buffer strings, this will be the total byte length. - * For string encoding streams, this will be the string character length, - * according to JavaScript's `string.length` logic. - * For objectMode streams, this is a count of the items waiting to be - * emitted. - */ - get bufferLength() { - return this[BUFFERLENGTH]; - } - /** - * The `BufferEncoding` currently in use, or `null` - */ - get encoding() { - return this[ENCODING]; - } - /** - * @deprecated - This is a read only property - */ - set encoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * @deprecated - Encoding may only be set at instantiation time - */ - setEncoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * True if this is an objectMode stream - */ - get objectMode() { - return this[OBJECTMODE]; - } - /** - * @deprecated - This is a read-only property - */ - set objectMode(_om) { - throw new Error('objectMode must be set at instantiation time'); - } - /** - * true if this is an async stream - */ - get ['async']() { - return this[ASYNC]; - } - /** - * Set to true to make this stream async. - * - * Once set, it cannot be unset, as this would potentially cause incorrect - * behavior. Ie, a sync stream can be made async, but an async stream - * cannot be safely made sync. - */ - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a; - } - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true; - this.emit('abort', this[SIGNAL]?.reason); - this.destroy(this[SIGNAL]?.reason); - } - /** - * True if the stream has been aborted. - */ - get aborted() { - return this[ABORTED]; - } - /** - * No-op setter. Stream aborted status is set via the AbortSignal provided - * in the constructor options. - */ - set aborted(_) { } - write(chunk, encoding, cb) { - if (this[ABORTED]) - return false; - if (this[EOF]) - throw new Error('write after end'); - if (this[DESTROYED]) { - this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); - return true; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (!encoding) - encoding = 'utf8'; - const fn = this[ASYNC] ? defer : nodefer; - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything is only allowed if in object mode, so throw - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); - } - else if (isArrayBufferLike(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk); - } - else if (typeof chunk !== 'string') { - throw new Error('Non-contiguous data written to non-objectMode stream'); - } - } - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - // maybe impossible? - /* c8 ignore start */ - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - /* c8 ignore stop */ - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { - //@ts-ignore - sinful unsafe type change - chunk = Buffer.from(chunk, encoding); - } - if (Buffer.isBuffer(chunk) && this[ENCODING]) { - //@ts-ignore - sinful unsafe type change - chunk = this[DECODER].write(chunk); - } - // Note: flushing CAN potentially switch us into not-flowing mode - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - /** - * Low-level explicit read method. - * - * In objectMode, the argument is ignored, and one item is returned if - * available. - * - * `n` is the number of bytes (or in the case of encoding streams, - * characters) to consume. If `n` is not provided, then the entire buffer - * is returned, or `null` is returned if no data is available. - * - * If `n` is greater that the amount of data in the internal buffer, - * then `null` is returned. - */ - read(n) { - if (this[DESTROYED]) - return null; - this[DISCARDED] = false; - if (this[BUFFERLENGTH] === 0 || - n === 0 || - (n && n > this[BUFFERLENGTH])) { - this[MAYBE_EMIT_END](); - return null; - } - if (this[OBJECTMODE]) - n = null; - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - // not object mode, so if we have an encoding, then RType is string - // otherwise, must be Buffer - this[BUFFER] = [ - (this[ENCODING] - ? this[BUFFER].join('') - : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), - ]; - } - const ret = this[READ](n || null, this[BUFFER][0]); - this[MAYBE_EMIT_END](); - return ret; - } - [READ](n, chunk) { - if (this[OBJECTMODE]) - this[BUFFERSHIFT](); - else { - const c = chunk; - if (n === c.length || n === null) - this[BUFFERSHIFT](); - else if (typeof c === 'string') { - this[BUFFER][0] = c.slice(n); - chunk = c.slice(0, n); - this[BUFFERLENGTH] -= n; - } - else { - this[BUFFER][0] = c.subarray(n); - chunk = c.subarray(0, n); - this[BUFFERLENGTH] -= n; - } - } - this.emit('data', chunk); - if (!this[BUFFER].length && !this[EOF]) - this.emit('drain'); - return chunk; - } - end(chunk, encoding, cb) { - if (typeof chunk === 'function') { - cb = chunk; - chunk = undefined; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (chunk !== undefined) - this.write(chunk, encoding); - if (cb) - this.once('end', cb); - this[EOF] = true; - this.writable = false; - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this[FLOWING] || !this[PAUSED]) - this[MAYBE_EMIT_END](); - return this; - } - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) - return; - if (!this[DATALISTENERS] && !this[PIPES].length) { - this[DISCARDED] = true; - } - this[PAUSED] = false; - this[FLOWING] = true; - this.emit('resume'); - if (this[BUFFER].length) - this[FLUSH](); - else if (this[EOF]) - this[MAYBE_EMIT_END](); - else - this.emit('drain'); - } - /** - * Resume the stream if it is currently in a paused state - * - * If called when there are no pipe destinations or `data` event listeners, - * this will place the stream in a "discarded" state, where all data will - * be thrown away. The discarded state is removed if a pipe destination or - * data handler is added, if pause() is called, or if any synchronous or - * asynchronous iteration is started. - */ - resume() { - return this[RESUME](); - } - /** - * Pause the stream - */ - pause() { - this[FLOWING] = false; - this[PAUSED] = true; - this[DISCARDED] = false; - } - /** - * true if the stream has been forcibly destroyed - */ - get destroyed() { - return this[DESTROYED]; - } - /** - * true if the stream is currently in a flowing state, meaning that - * any writes will be immediately emitted. - */ - get flowing() { - return this[FLOWING]; - } - /** - * true if the stream is currently in a paused state - */ - get paused() { - return this[PAUSED]; - } - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1; - else - this[BUFFERLENGTH] += chunk.length; - this[BUFFER].push(chunk); - } - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1; - else - this[BUFFERLENGTH] -= this[BUFFER][0].length; - return this[BUFFER].shift(); - } - [FLUSH](noDrain = false) { - do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && - this[BUFFER].length); - if (!noDrain && !this[BUFFER].length && !this[EOF]) - this.emit('drain'); - } - [FLUSHCHUNK](chunk) { - this.emit('data', chunk); - return this[FLOWING]; - } - /** - * Pipe all data emitted by this stream into the destination provided. - * - * Triggers the flow of data. - */ - pipe(dest, opts) { - if (this[DESTROYED]) - return dest; - this[DISCARDED] = false; - const ended = this[EMITTED_END]; - opts = opts || {}; - if (dest === proc.stdout || dest === proc.stderr) - opts.end = false; - else - opts.end = opts.end !== false; - opts.proxyErrors = !!opts.proxyErrors; - // piping an ended stream ends immediately - if (ended) { - if (opts.end) - dest.end(); - } - else { - // "as" here just ignores the WType, which pipes don't care about, - // since they're only consuming from us, and writing to the dest - this[PIPES].push(!opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts)); - if (this[ASYNC]) - defer(() => this[RESUME]()); - else - this[RESUME](); - } - return dest; - } - /** - * Fully unhook a piped destination stream. - * - * If the destination stream was the only consumer of this stream (ie, - * there are no other piped destinations or `'data'` event listeners) - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest); - if (p) { - if (this[PIPES].length === 1) { - if (this[FLOWING] && this[DATALISTENERS] === 0) { - this[FLOWING] = false; - } - this[PIPES] = []; - } - else - this[PIPES].splice(this[PIPES].indexOf(p), 1); - p.unpipe(); - } - } - /** - * Alias for {@link Minipass#on} - */ - addListener(ev, handler) { - return this.on(ev, handler); - } - /** - * Mostly identical to `EventEmitter.on`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * - Adding a 'data' event handler will trigger the flow of data - * - * - Adding a 'readable' event handler when there is data waiting to be read - * will cause 'readable' to be emitted immediately. - * - * - Adding an 'endish' event handler ('end', 'finish', etc.) which has - * already passed will cause the event to be emitted immediately and all - * handlers removed. - * - * - Adding an 'error' event handler after an error has been emitted will - * cause the event to be re-emitted immediately with the error previously - * raised. - */ - on(ev, handler) { - const ret = super.on(ev, handler); - if (ev === 'data') { - this[DISCARDED] = false; - this[DATALISTENERS]++; - if (!this[PIPES].length && !this[FLOWING]) { - this[RESUME](); - } - } - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { - super.emit('readable'); - } - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev); - this.removeAllListeners(ev); - } - else if (ev === 'error' && this[EMITTED_ERROR]) { - const h = handler; - if (this[ASYNC]) - defer(() => h.call(this, this[EMITTED_ERROR])); - else - h.call(this, this[EMITTED_ERROR]); - } - return ret; - } - /** - * Alias for {@link Minipass#off} - */ - removeListener(ev, handler) { - return this.off(ev, handler); - } - /** - * Mostly identical to `EventEmitter.off` - * - * If a 'data' event handler is removed, and it was the last consumer - * (ie, there are no pipe destinations or other 'data' event listeners), - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - off(ev, handler) { - const ret = super.off(ev, handler); - // if we previously had listeners, and now we don't, and we don't - // have any pipes, then stop the flow, unless it's been explicitly - // put in a discarded flowing state via stream.resume(). - if (ev === 'data') { - this[DATALISTENERS] = this.listeners('data').length; - if (this[DATALISTENERS] === 0 && - !this[DISCARDED] && - !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * Mostly identical to `EventEmitter.removeAllListeners` - * - * If all 'data' event handlers are removed, and they were the last consumer - * (ie, there are no pipe destinations), then the flow of data will stop - * until there is another consumer or {@link Minipass#resume} is explicitly - * called. - */ - removeAllListeners(ev) { - const ret = super.removeAllListeners(ev); - if (ev === 'data' || ev === undefined) { - this[DATALISTENERS] = 0; - if (!this[DISCARDED] && !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * true if the 'end' event has been emitted - */ - get emittedEnd() { - return this[EMITTED_END]; - } - [MAYBE_EMIT_END]() { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF]) { - this[EMITTING_END] = true; - this.emit('end'); - this.emit('prefinish'); - this.emit('finish'); - if (this[CLOSED]) - this.emit('close'); - this[EMITTING_END] = false; - } - } - /** - * Mostly identical to `EventEmitter.emit`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * If the stream has been destroyed, and the event is something other - * than 'close' or 'error', then `false` is returned and no handlers - * are called. - * - * If the event is 'end', and has already been emitted, then the event - * is ignored. If the stream is in a paused or non-flowing state, then - * the event will be deferred until data flow resumes. If the stream is - * async, then handlers will be called on the next tick rather than - * immediately. - * - * If the event is 'close', and 'end' has not yet been emitted, then - * the event will be deferred until after 'end' is emitted. - * - * If the event is 'error', and an AbortSignal was provided for the stream, - * and there are no listeners, then the event is ignored, matching the - * behavior of node core streams in the presense of an AbortSignal. - * - * If the event is 'finish' or 'prefinish', then all listeners will be - * removed after emitting the event, to prevent double-firing. - */ - emit(ev, ...args) { - const data = args[0]; - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && - ev !== 'close' && - ev !== DESTROYED && - this[DESTROYED]) { - return false; - } - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? (defer(() => this[EMITDATA](data)), true) - : this[EMITDATA](data); - } - else if (ev === 'end') { - return this[EMITEND](); - } - else if (ev === 'close') { - this[CLOSED] = true; - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return false; - const ret = super.emit('close'); - this.removeAllListeners('close'); - return ret; - } - else if (ev === 'error') { - this[EMITTED_ERROR] = data; - super.emit(ERROR, data); - const ret = !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false; - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'resume') { - const ret = super.emit('resume'); - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev); - this.removeAllListeners(ev); - return ret; - } - // Some other unknown event - const ret = super.emit(ev, ...args); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) - this.pause(); - } - const ret = this[DISCARDED] ? false : super.emit('data', data); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITEND]() { - if (this[EMITTED_END]) - return false; - this[EMITTED_END] = true; - this.readable = false; - return this[ASYNC] - ? (defer(() => this[EMITEND2]()), true) - : this[EMITEND2](); - } - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end(); - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data); - } - if (!this[DISCARDED]) - super.emit('data', data); - } - } - for (const p of this[PIPES]) { - p.end(); - } - const ret = super.emit('end'); - this.removeAllListeners('end'); - return ret; - } - /** - * Return a Promise that resolves to an array of all emitted data once - * the stream ends. - */ - async collect() { - const buf = Object.assign([], { - dataLength: 0, - }); - if (!this[OBJECTMODE]) - buf.dataLength = 0; - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise(); - this.on('data', c => { - buf.push(c); - if (!this[OBJECTMODE]) - buf.dataLength += c.length; - }); - await p; - return buf; - } - /** - * Return a Promise that resolves to the concatenation of all emitted data - * once the stream ends. - * - * Not allowed on objectMode streams. - */ - async concat() { - if (this[OBJECTMODE]) { - throw new Error('cannot concat in objectMode'); - } - const buf = await this.collect(); - return (this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength)); - } - /** - * Return a void Promise that resolves once the stream ends. - */ - async promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))); - this.on('error', er => reject(er)); - this.on('end', () => resolve()); - }); - } - /** - * Asynchronous `for await of` iteration. - * - * This will continue emitting all chunks until the stream terminates. - */ - [Symbol.asyncIterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = async () => { - this.pause(); - stopped = true; - return { value: undefined, done: true }; - }; - const next = () => { - if (stopped) - return stop(); - const res = this.read(); - if (res !== null) - return Promise.resolve({ done: false, value: res }); - if (this[EOF]) - return stop(); - let resolve; - let reject; - const onerr = (er) => { - this.off('data', ondata); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - stop(); - reject(er); - }; - const ondata = (value) => { - this.off('error', onerr); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - this.pause(); - resolve({ value, done: !!this[EOF] }); - }; - const onend = () => { - this.off('error', onerr); - this.off('data', ondata); - this.off(DESTROYED, ondestroy); - stop(); - resolve({ done: true, value: undefined }); - }; - const ondestroy = () => onerr(new Error('stream destroyed')); - return new Promise((res, rej) => { - reject = rej; - resolve = res; - this.once(DESTROYED, ondestroy); - this.once('error', onerr); - this.once('end', onend); - this.once('data', ondata); - }); - }; - return { - next, - throw: stop, - return: stop, - [Symbol.asyncIterator]() { - return this; - }, - }; - } - /** - * Synchronous `for of` iteration. - * - * The iteration will terminate when the internal buffer runs out, even - * if the stream has not yet terminated. - */ - [Symbol.iterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = () => { - this.pause(); - this.off(ERROR, stop); - this.off(DESTROYED, stop); - this.off('end', stop); - stopped = true; - return { done: true, value: undefined }; - }; - const next = () => { - if (stopped) - return stop(); - const value = this.read(); - return value === null ? stop() : { done: false, value }; - }; - this.once('end', stop); - this.once(ERROR, stop); - this.once(DESTROYED, stop); - return { - next, - throw: stop, - return: stop, - [Symbol.iterator]() { - return this; - }, - }; - } - /** - * Destroy a stream, preventing it from being used for any further purpose. - * - * If the stream has a `close()` method, then it will be called on - * destruction. - * - * After destruction, any attempt to write data, read data, or emit most - * events will be ignored. - * - * If an error argument is provided, then it will be emitted in an - * 'error' event. - */ - destroy(er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er); - else - this.emit(DESTROYED); - return this; - } - this[DESTROYED] = true; - this[DISCARDED] = true; - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0; - this[BUFFERLENGTH] = 0; - const wc = this; - if (typeof wc.close === 'function' && !this[CLOSED]) - wc.close(); - if (er) - this.emit('error', er); - // if no error to emit, still reject pending promises - else - this.emit(DESTROYED); - return this; - } - /** - * Alias for {@link isStream} - * - * Former export location, maintained for backwards compatibility. - * - * @deprecated - */ - static get isStream() { - return exports.isStream; - } -} -exports.Minipass = Minipass; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/ssri/node_modules/minipass/dist/cjs/package.json b/node_modules/ssri/node_modules/minipass/dist/cjs/package.json deleted file mode 100644 index 5bbefffbabee3..0000000000000 --- a/node_modules/ssri/node_modules/minipass/dist/cjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "commonjs" -} diff --git a/node_modules/ssri/node_modules/minipass/dist/mjs/index.js b/node_modules/ssri/node_modules/minipass/dist/mjs/index.js deleted file mode 100644 index b65fafbae43a4..0000000000000 --- a/node_modules/ssri/node_modules/minipass/dist/mjs/index.js +++ /dev/null @@ -1,1018 +0,0 @@ -const proc = typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - }; -import { EventEmitter } from 'events'; -import Stream from 'stream'; -import { StringDecoder } from 'string_decoder'; -/** - * Return true if the argument is a Minipass stream, Node stream, or something - * else that Minipass can interact with. - */ -export const isStream = (s) => !!s && - typeof s === 'object' && - (s instanceof Minipass || - s instanceof Stream || - isReadable(s) || - isWritable(s)); -/** - * Return true if the argument is a valid {@link Minipass.Readable} - */ -export const isReadable = (s) => !!s && - typeof s === 'object' && - s instanceof EventEmitter && - typeof s.pipe === 'function' && - // node core Writable streams have a pipe() method, but it throws - s.pipe !== Stream.Writable.prototype.pipe; -/** - * Return true if the argument is a valid {@link Minipass.Writable} - */ -export const isWritable = (s) => !!s && - typeof s === 'object' && - s instanceof EventEmitter && - typeof s.write === 'function' && - typeof s.end === 'function'; -const EOF = Symbol('EOF'); -const MAYBE_EMIT_END = Symbol('maybeEmitEnd'); -const EMITTED_END = Symbol('emittedEnd'); -const EMITTING_END = Symbol('emittingEnd'); -const EMITTED_ERROR = Symbol('emittedError'); -const CLOSED = Symbol('closed'); -const READ = Symbol('read'); -const FLUSH = Symbol('flush'); -const FLUSHCHUNK = Symbol('flushChunk'); -const ENCODING = Symbol('encoding'); -const DECODER = Symbol('decoder'); -const FLOWING = Symbol('flowing'); -const PAUSED = Symbol('paused'); -const RESUME = Symbol('resume'); -const BUFFER = Symbol('buffer'); -const PIPES = Symbol('pipes'); -const BUFFERLENGTH = Symbol('bufferLength'); -const BUFFERPUSH = Symbol('bufferPush'); -const BUFFERSHIFT = Symbol('bufferShift'); -const OBJECTMODE = Symbol('objectMode'); -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed'); -// internal event when stream has an error -const ERROR = Symbol('error'); -const EMITDATA = Symbol('emitData'); -const EMITEND = Symbol('emitEnd'); -const EMITEND2 = Symbol('emitEnd2'); -const ASYNC = Symbol('async'); -const ABORT = Symbol('abort'); -const ABORTED = Symbol('aborted'); -const SIGNAL = Symbol('signal'); -const DATALISTENERS = Symbol('dataListeners'); -const DISCARDED = Symbol('discarded'); -const defer = (fn) => Promise.resolve().then(fn); -const nodefer = (fn) => fn(); -const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish'; -const isArrayBufferLike = (b) => b instanceof ArrayBuffer || - (!!b && - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0); -const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b); -/** - * Internal class representing a pipe to a destination stream. - * - * @internal - */ -class Pipe { - src; - dest; - opts; - ondrain; - constructor(src, dest, opts) { - this.src = src; - this.dest = dest; - this.opts = opts; - this.ondrain = () => src[RESUME](); - this.dest.on('drain', this.ondrain); - } - unpipe() { - this.dest.removeListener('drain', this.ondrain); - } - // only here for the prototype - /* c8 ignore start */ - proxyErrors(_er) { } - /* c8 ignore stop */ - end() { - this.unpipe(); - if (this.opts.end) - this.dest.end(); - } -} -/** - * Internal class representing a pipe to a destination stream where - * errors are proxied. - * - * @internal - */ -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors); - super.unpipe(); - } - constructor(src, dest, opts) { - super(src, dest, opts); - this.proxyErrors = er => dest.emit('error', er); - src.on('error', this.proxyErrors); - } -} -const isObjectModeOptions = (o) => !!o.objectMode; -const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer'; -/** - * Main export, the Minipass class - * - * `RType` is the type of data emitted, defaults to Buffer - * - * `WType` is the type of data to be written, if RType is buffer or string, - * then any {@link Minipass.ContiguousData} is allowed. - * - * `Events` is the set of event handler signatures that this object - * will emit, see {@link Minipass.Events} - */ -export class Minipass extends EventEmitter { - [FLOWING] = false; - [PAUSED] = false; - [PIPES] = []; - [BUFFER] = []; - [OBJECTMODE]; - [ENCODING]; - [ASYNC]; - [DECODER]; - [EOF] = false; - [EMITTED_END] = false; - [EMITTING_END] = false; - [CLOSED] = false; - [EMITTED_ERROR] = null; - [BUFFERLENGTH] = 0; - [DESTROYED] = false; - [SIGNAL]; - [ABORTED] = false; - [DATALISTENERS] = 0; - [DISCARDED] = false; - /** - * true if the stream can be written - */ - writable = true; - /** - * true if the stream can be read - */ - readable = true; - /** - * If `RType` is Buffer, then options do not need to be provided. - * Otherwise, an options object must be provided to specify either - * {@link Minipass.SharedOptions.objectMode} or - * {@link Minipass.SharedOptions.encoding}, as appropriate. - */ - constructor(...args) { - const options = (args[0] || - {}); - super(); - if (options.objectMode && typeof options.encoding === 'string') { - throw new TypeError('Encoding and objectMode may not be used together'); - } - if (isObjectModeOptions(options)) { - this[OBJECTMODE] = true; - this[ENCODING] = null; - } - else if (isEncodingOptions(options)) { - this[ENCODING] = options.encoding; - this[OBJECTMODE] = false; - } - else { - this[OBJECTMODE] = false; - this[ENCODING] = null; - } - this[ASYNC] = !!options.async; - this[DECODER] = this[ENCODING] - ? new StringDecoder(this[ENCODING]) - : null; - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }); - } - //@ts-ignore - private option for debugging and testing - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }); - } - const { signal } = options; - if (signal) { - this[SIGNAL] = signal; - if (signal.aborted) { - this[ABORT](); - } - else { - signal.addEventListener('abort', () => this[ABORT]()); - } - } - } - /** - * The amount of data stored in the buffer waiting to be read. - * - * For Buffer strings, this will be the total byte length. - * For string encoding streams, this will be the string character length, - * according to JavaScript's `string.length` logic. - * For objectMode streams, this is a count of the items waiting to be - * emitted. - */ - get bufferLength() { - return this[BUFFERLENGTH]; - } - /** - * The `BufferEncoding` currently in use, or `null` - */ - get encoding() { - return this[ENCODING]; - } - /** - * @deprecated - This is a read only property - */ - set encoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * @deprecated - Encoding may only be set at instantiation time - */ - setEncoding(_enc) { - throw new Error('Encoding must be set at instantiation time'); - } - /** - * True if this is an objectMode stream - */ - get objectMode() { - return this[OBJECTMODE]; - } - /** - * @deprecated - This is a read-only property - */ - set objectMode(_om) { - throw new Error('objectMode must be set at instantiation time'); - } - /** - * true if this is an async stream - */ - get ['async']() { - return this[ASYNC]; - } - /** - * Set to true to make this stream async. - * - * Once set, it cannot be unset, as this would potentially cause incorrect - * behavior. Ie, a sync stream can be made async, but an async stream - * cannot be safely made sync. - */ - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a; - } - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true; - this.emit('abort', this[SIGNAL]?.reason); - this.destroy(this[SIGNAL]?.reason); - } - /** - * True if the stream has been aborted. - */ - get aborted() { - return this[ABORTED]; - } - /** - * No-op setter. Stream aborted status is set via the AbortSignal provided - * in the constructor options. - */ - set aborted(_) { } - write(chunk, encoding, cb) { - if (this[ABORTED]) - return false; - if (this[EOF]) - throw new Error('write after end'); - if (this[DESTROYED]) { - this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' })); - return true; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (!encoding) - encoding = 'utf8'; - const fn = this[ASYNC] ? defer : nodefer; - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything is only allowed if in object mode, so throw - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength); - } - else if (isArrayBufferLike(chunk)) { - //@ts-ignore - sinful unsafe type changing - chunk = Buffer.from(chunk); - } - else if (typeof chunk !== 'string') { - throw new Error('Non-contiguous data written to non-objectMode stream'); - } - } - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - // maybe impossible? - /* c8 ignore start */ - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - /* c8 ignore stop */ - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) { - //@ts-ignore - sinful unsafe type change - chunk = Buffer.from(chunk, encoding); - } - if (Buffer.isBuffer(chunk) && this[ENCODING]) { - //@ts-ignore - sinful unsafe type change - chunk = this[DECODER].write(chunk); - } - // Note: flushing CAN potentially switch us into not-flowing mode - if (this[FLOWING] && this[BUFFERLENGTH] !== 0) - this[FLUSH](true); - if (this[FLOWING]) - this.emit('data', chunk); - else - this[BUFFERPUSH](chunk); - if (this[BUFFERLENGTH] !== 0) - this.emit('readable'); - if (cb) - fn(cb); - return this[FLOWING]; - } - /** - * Low-level explicit read method. - * - * In objectMode, the argument is ignored, and one item is returned if - * available. - * - * `n` is the number of bytes (or in the case of encoding streams, - * characters) to consume. If `n` is not provided, then the entire buffer - * is returned, or `null` is returned if no data is available. - * - * If `n` is greater that the amount of data in the internal buffer, - * then `null` is returned. - */ - read(n) { - if (this[DESTROYED]) - return null; - this[DISCARDED] = false; - if (this[BUFFERLENGTH] === 0 || - n === 0 || - (n && n > this[BUFFERLENGTH])) { - this[MAYBE_EMIT_END](); - return null; - } - if (this[OBJECTMODE]) - n = null; - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - // not object mode, so if we have an encoding, then RType is string - // otherwise, must be Buffer - this[BUFFER] = [ - (this[ENCODING] - ? this[BUFFER].join('') - : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])), - ]; - } - const ret = this[READ](n || null, this[BUFFER][0]); - this[MAYBE_EMIT_END](); - return ret; - } - [READ](n, chunk) { - if (this[OBJECTMODE]) - this[BUFFERSHIFT](); - else { - const c = chunk; - if (n === c.length || n === null) - this[BUFFERSHIFT](); - else if (typeof c === 'string') { - this[BUFFER][0] = c.slice(n); - chunk = c.slice(0, n); - this[BUFFERLENGTH] -= n; - } - else { - this[BUFFER][0] = c.subarray(n); - chunk = c.subarray(0, n); - this[BUFFERLENGTH] -= n; - } - } - this.emit('data', chunk); - if (!this[BUFFER].length && !this[EOF]) - this.emit('drain'); - return chunk; - } - end(chunk, encoding, cb) { - if (typeof chunk === 'function') { - cb = chunk; - chunk = undefined; - } - if (typeof encoding === 'function') { - cb = encoding; - encoding = 'utf8'; - } - if (chunk !== undefined) - this.write(chunk, encoding); - if (cb) - this.once('end', cb); - this[EOF] = true; - this.writable = false; - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this[FLOWING] || !this[PAUSED]) - this[MAYBE_EMIT_END](); - return this; - } - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) - return; - if (!this[DATALISTENERS] && !this[PIPES].length) { - this[DISCARDED] = true; - } - this[PAUSED] = false; - this[FLOWING] = true; - this.emit('resume'); - if (this[BUFFER].length) - this[FLUSH](); - else if (this[EOF]) - this[MAYBE_EMIT_END](); - else - this.emit('drain'); - } - /** - * Resume the stream if it is currently in a paused state - * - * If called when there are no pipe destinations or `data` event listeners, - * this will place the stream in a "discarded" state, where all data will - * be thrown away. The discarded state is removed if a pipe destination or - * data handler is added, if pause() is called, or if any synchronous or - * asynchronous iteration is started. - */ - resume() { - return this[RESUME](); - } - /** - * Pause the stream - */ - pause() { - this[FLOWING] = false; - this[PAUSED] = true; - this[DISCARDED] = false; - } - /** - * true if the stream has been forcibly destroyed - */ - get destroyed() { - return this[DESTROYED]; - } - /** - * true if the stream is currently in a flowing state, meaning that - * any writes will be immediately emitted. - */ - get flowing() { - return this[FLOWING]; - } - /** - * true if the stream is currently in a paused state - */ - get paused() { - return this[PAUSED]; - } - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1; - else - this[BUFFERLENGTH] += chunk.length; - this[BUFFER].push(chunk); - } - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1; - else - this[BUFFERLENGTH] -= this[BUFFER][0].length; - return this[BUFFER].shift(); - } - [FLUSH](noDrain = false) { - do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && - this[BUFFER].length); - if (!noDrain && !this[BUFFER].length && !this[EOF]) - this.emit('drain'); - } - [FLUSHCHUNK](chunk) { - this.emit('data', chunk); - return this[FLOWING]; - } - /** - * Pipe all data emitted by this stream into the destination provided. - * - * Triggers the flow of data. - */ - pipe(dest, opts) { - if (this[DESTROYED]) - return dest; - this[DISCARDED] = false; - const ended = this[EMITTED_END]; - opts = opts || {}; - if (dest === proc.stdout || dest === proc.stderr) - opts.end = false; - else - opts.end = opts.end !== false; - opts.proxyErrors = !!opts.proxyErrors; - // piping an ended stream ends immediately - if (ended) { - if (opts.end) - dest.end(); - } - else { - // "as" here just ignores the WType, which pipes don't care about, - // since they're only consuming from us, and writing to the dest - this[PIPES].push(!opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts)); - if (this[ASYNC]) - defer(() => this[RESUME]()); - else - this[RESUME](); - } - return dest; - } - /** - * Fully unhook a piped destination stream. - * - * If the destination stream was the only consumer of this stream (ie, - * there are no other piped destinations or `'data'` event listeners) - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest); - if (p) { - if (this[PIPES].length === 1) { - if (this[FLOWING] && this[DATALISTENERS] === 0) { - this[FLOWING] = false; - } - this[PIPES] = []; - } - else - this[PIPES].splice(this[PIPES].indexOf(p), 1); - p.unpipe(); - } - } - /** - * Alias for {@link Minipass#on} - */ - addListener(ev, handler) { - return this.on(ev, handler); - } - /** - * Mostly identical to `EventEmitter.on`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * - Adding a 'data' event handler will trigger the flow of data - * - * - Adding a 'readable' event handler when there is data waiting to be read - * will cause 'readable' to be emitted immediately. - * - * - Adding an 'endish' event handler ('end', 'finish', etc.) which has - * already passed will cause the event to be emitted immediately and all - * handlers removed. - * - * - Adding an 'error' event handler after an error has been emitted will - * cause the event to be re-emitted immediately with the error previously - * raised. - */ - on(ev, handler) { - const ret = super.on(ev, handler); - if (ev === 'data') { - this[DISCARDED] = false; - this[DATALISTENERS]++; - if (!this[PIPES].length && !this[FLOWING]) { - this[RESUME](); - } - } - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) { - super.emit('readable'); - } - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev); - this.removeAllListeners(ev); - } - else if (ev === 'error' && this[EMITTED_ERROR]) { - const h = handler; - if (this[ASYNC]) - defer(() => h.call(this, this[EMITTED_ERROR])); - else - h.call(this, this[EMITTED_ERROR]); - } - return ret; - } - /** - * Alias for {@link Minipass#off} - */ - removeListener(ev, handler) { - return this.off(ev, handler); - } - /** - * Mostly identical to `EventEmitter.off` - * - * If a 'data' event handler is removed, and it was the last consumer - * (ie, there are no pipe destinations or other 'data' event listeners), - * then the flow of data will stop until there is another consumer or - * {@link Minipass#resume} is explicitly called. - */ - off(ev, handler) { - const ret = super.off(ev, handler); - // if we previously had listeners, and now we don't, and we don't - // have any pipes, then stop the flow, unless it's been explicitly - // put in a discarded flowing state via stream.resume(). - if (ev === 'data') { - this[DATALISTENERS] = this.listeners('data').length; - if (this[DATALISTENERS] === 0 && - !this[DISCARDED] && - !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * Mostly identical to `EventEmitter.removeAllListeners` - * - * If all 'data' event handlers are removed, and they were the last consumer - * (ie, there are no pipe destinations), then the flow of data will stop - * until there is another consumer or {@link Minipass#resume} is explicitly - * called. - */ - removeAllListeners(ev) { - const ret = super.removeAllListeners(ev); - if (ev === 'data' || ev === undefined) { - this[DATALISTENERS] = 0; - if (!this[DISCARDED] && !this[PIPES].length) { - this[FLOWING] = false; - } - } - return ret; - } - /** - * true if the 'end' event has been emitted - */ - get emittedEnd() { - return this[EMITTED_END]; - } - [MAYBE_EMIT_END]() { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF]) { - this[EMITTING_END] = true; - this.emit('end'); - this.emit('prefinish'); - this.emit('finish'); - if (this[CLOSED]) - this.emit('close'); - this[EMITTING_END] = false; - } - } - /** - * Mostly identical to `EventEmitter.emit`, with the following - * behavior differences to prevent data loss and unnecessary hangs: - * - * If the stream has been destroyed, and the event is something other - * than 'close' or 'error', then `false` is returned and no handlers - * are called. - * - * If the event is 'end', and has already been emitted, then the event - * is ignored. If the stream is in a paused or non-flowing state, then - * the event will be deferred until data flow resumes. If the stream is - * async, then handlers will be called on the next tick rather than - * immediately. - * - * If the event is 'close', and 'end' has not yet been emitted, then - * the event will be deferred until after 'end' is emitted. - * - * If the event is 'error', and an AbortSignal was provided for the stream, - * and there are no listeners, then the event is ignored, matching the - * behavior of node core streams in the presense of an AbortSignal. - * - * If the event is 'finish' or 'prefinish', then all listeners will be - * removed after emitting the event, to prevent double-firing. - */ - emit(ev, ...args) { - const data = args[0]; - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && - ev !== 'close' && - ev !== DESTROYED && - this[DESTROYED]) { - return false; - } - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? (defer(() => this[EMITDATA](data)), true) - : this[EMITDATA](data); - } - else if (ev === 'end') { - return this[EMITEND](); - } - else if (ev === 'close') { - this[CLOSED] = true; - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return false; - const ret = super.emit('close'); - this.removeAllListeners('close'); - return ret; - } - else if (ev === 'error') { - this[EMITTED_ERROR] = data; - super.emit(ERROR, data); - const ret = !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false; - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'resume') { - const ret = super.emit('resume'); - this[MAYBE_EMIT_END](); - return ret; - } - else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev); - this.removeAllListeners(ev); - return ret; - } - // Some other unknown event - const ret = super.emit(ev, ...args); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) - this.pause(); - } - const ret = this[DISCARDED] ? false : super.emit('data', data); - this[MAYBE_EMIT_END](); - return ret; - } - [EMITEND]() { - if (this[EMITTED_END]) - return false; - this[EMITTED_END] = true; - this.readable = false; - return this[ASYNC] - ? (defer(() => this[EMITEND2]()), true) - : this[EMITEND2](); - } - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end(); - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data); - } - if (!this[DISCARDED]) - super.emit('data', data); - } - } - for (const p of this[PIPES]) { - p.end(); - } - const ret = super.emit('end'); - this.removeAllListeners('end'); - return ret; - } - /** - * Return a Promise that resolves to an array of all emitted data once - * the stream ends. - */ - async collect() { - const buf = Object.assign([], { - dataLength: 0, - }); - if (!this[OBJECTMODE]) - buf.dataLength = 0; - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise(); - this.on('data', c => { - buf.push(c); - if (!this[OBJECTMODE]) - buf.dataLength += c.length; - }); - await p; - return buf; - } - /** - * Return a Promise that resolves to the concatenation of all emitted data - * once the stream ends. - * - * Not allowed on objectMode streams. - */ - async concat() { - if (this[OBJECTMODE]) { - throw new Error('cannot concat in objectMode'); - } - const buf = await this.collect(); - return (this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength)); - } - /** - * Return a void Promise that resolves once the stream ends. - */ - async promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))); - this.on('error', er => reject(er)); - this.on('end', () => resolve()); - }); - } - /** - * Asynchronous `for await of` iteration. - * - * This will continue emitting all chunks until the stream terminates. - */ - [Symbol.asyncIterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = async () => { - this.pause(); - stopped = true; - return { value: undefined, done: true }; - }; - const next = () => { - if (stopped) - return stop(); - const res = this.read(); - if (res !== null) - return Promise.resolve({ done: false, value: res }); - if (this[EOF]) - return stop(); - let resolve; - let reject; - const onerr = (er) => { - this.off('data', ondata); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - stop(); - reject(er); - }; - const ondata = (value) => { - this.off('error', onerr); - this.off('end', onend); - this.off(DESTROYED, ondestroy); - this.pause(); - resolve({ value, done: !!this[EOF] }); - }; - const onend = () => { - this.off('error', onerr); - this.off('data', ondata); - this.off(DESTROYED, ondestroy); - stop(); - resolve({ done: true, value: undefined }); - }; - const ondestroy = () => onerr(new Error('stream destroyed')); - return new Promise((res, rej) => { - reject = rej; - resolve = res; - this.once(DESTROYED, ondestroy); - this.once('error', onerr); - this.once('end', onend); - this.once('data', ondata); - }); - }; - return { - next, - throw: stop, - return: stop, - [Symbol.asyncIterator]() { - return this; - }, - }; - } - /** - * Synchronous `for of` iteration. - * - * The iteration will terminate when the internal buffer runs out, even - * if the stream has not yet terminated. - */ - [Symbol.iterator]() { - // set this up front, in case the consumer doesn't call next() - // right away. - this[DISCARDED] = false; - let stopped = false; - const stop = () => { - this.pause(); - this.off(ERROR, stop); - this.off(DESTROYED, stop); - this.off('end', stop); - stopped = true; - return { done: true, value: undefined }; - }; - const next = () => { - if (stopped) - return stop(); - const value = this.read(); - return value === null ? stop() : { done: false, value }; - }; - this.once('end', stop); - this.once(ERROR, stop); - this.once(DESTROYED, stop); - return { - next, - throw: stop, - return: stop, - [Symbol.iterator]() { - return this; - }, - }; - } - /** - * Destroy a stream, preventing it from being used for any further purpose. - * - * If the stream has a `close()` method, then it will be called on - * destruction. - * - * After destruction, any attempt to write data, read data, or emit most - * events will be ignored. - * - * If an error argument is provided, then it will be emitted in an - * 'error' event. - */ - destroy(er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er); - else - this.emit(DESTROYED); - return this; - } - this[DESTROYED] = true; - this[DISCARDED] = true; - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0; - this[BUFFERLENGTH] = 0; - const wc = this; - if (typeof wc.close === 'function' && !this[CLOSED]) - wc.close(); - if (er) - this.emit('error', er); - // if no error to emit, still reject pending promises - else - this.emit(DESTROYED); - return this; - } - /** - * Alias for {@link isStream} - * - * Former export location, maintained for backwards compatibility. - * - * @deprecated - */ - static get isStream() { - return isStream; - } -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/ssri/node_modules/minipass/dist/mjs/package.json b/node_modules/ssri/node_modules/minipass/dist/mjs/package.json deleted file mode 100644 index 3dbc1ca591c05..0000000000000 --- a/node_modules/ssri/node_modules/minipass/dist/mjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "module" -} diff --git a/node_modules/ssri/node_modules/minipass/package.json b/node_modules/ssri/node_modules/minipass/package.json deleted file mode 100644 index 6faaa247a5bc6..0000000000000 --- a/node_modules/ssri/node_modules/minipass/package.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "name": "minipass", - "version": "7.0.3", - "description": "minimal implementation of a PassThrough stream", - "main": "./dist/cjs/index.js", - "module": "./dist/mjs/index.js", - "types": "./dist/cjs/index.js", - "exports": { - ".": { - "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.js" - }, - "require": { - "types": "./dist/cjs/index.d.ts", - "default": "./dist/cjs/index.js" - } - }, - "./package.json": "./package.json" - }, - "files": [ - "dist" - ], - "scripts": { - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "preprepare": "rm -rf dist", - "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh", - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "test": "c8 tap", - "snap": "c8 tap", - "format": "prettier --write . --loglevel warn", - "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" - }, - "tap": { - "coverage": false, - "node-arg": [ - "--enable-source-maps", - "--no-warnings", - "--loader", - "ts-node/esm" - ], - "ts": false - }, - "prettier": { - "semi": false, - "printWidth": 75, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - }, - "devDependencies": { - "@types/node": "^20.1.2", - "@types/tap": "^15.0.8", - "c8": "^7.13.0", - "prettier": "^2.6.2", - "tap": "^16.3.0", - "ts-node": "^10.9.1", - "typedoc": "^0.24.8", - "typescript": "^5.1.3", - "end-of-stream": "^1.4.0", - "node-abort-controller": "^3.1.1", - "sync-content": "^1.0.2", - "through2": "^2.0.3" - }, - "repository": "https://github.com/isaacs/minipass", - "keywords": [ - "passthrough", - "stream" - ], - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } -} diff --git a/node_modules/pacote/node_modules/minipass/LICENSE b/node_modules/tar/node_modules/minipass/LICENSE similarity index 100% rename from node_modules/pacote/node_modules/minipass/LICENSE rename to node_modules/tar/node_modules/minipass/LICENSE diff --git a/node_modules/tar/node_modules/minipass/index.js b/node_modules/tar/node_modules/minipass/index.js new file mode 100644 index 0000000000000..ed07c17acd97b --- /dev/null +++ b/node_modules/tar/node_modules/minipass/index.js @@ -0,0 +1,702 @@ +'use strict' +const proc = + typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + } +const EE = require('events') +const Stream = require('stream') +const stringdecoder = require('string_decoder') +const SD = stringdecoder.StringDecoder + +const EOF = Symbol('EOF') +const MAYBE_EMIT_END = Symbol('maybeEmitEnd') +const EMITTED_END = Symbol('emittedEnd') +const EMITTING_END = Symbol('emittingEnd') +const EMITTED_ERROR = Symbol('emittedError') +const CLOSED = Symbol('closed') +const READ = Symbol('read') +const FLUSH = Symbol('flush') +const FLUSHCHUNK = Symbol('flushChunk') +const ENCODING = Symbol('encoding') +const DECODER = Symbol('decoder') +const FLOWING = Symbol('flowing') +const PAUSED = Symbol('paused') +const RESUME = Symbol('resume') +const BUFFER = Symbol('buffer') +const PIPES = Symbol('pipes') +const BUFFERLENGTH = Symbol('bufferLength') +const BUFFERPUSH = Symbol('bufferPush') +const BUFFERSHIFT = Symbol('bufferShift') +const OBJECTMODE = Symbol('objectMode') +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed') +// internal event when stream has an error +const ERROR = Symbol('error') +const EMITDATA = Symbol('emitData') +const EMITEND = Symbol('emitEnd') +const EMITEND2 = Symbol('emitEnd2') +const ASYNC = Symbol('async') +const ABORT = Symbol('abort') +const ABORTED = Symbol('aborted') +const SIGNAL = Symbol('signal') + +const defer = fn => Promise.resolve().then(fn) + +// TODO remove when Node v8 support drops +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' +const ASYNCITERATOR = + (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') +const ITERATOR = + (doIter && Symbol.iterator) || Symbol('iterator not implemented') + +// events that mean 'the stream is over' +// these are treated specially, and re-emitted +// if they are listened for after emitting. +const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' + +const isArrayBuffer = b => + b instanceof ArrayBuffer || + (typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0) + +const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) + +class Pipe { + constructor(src, dest, opts) { + this.src = src + this.dest = dest + this.opts = opts + this.ondrain = () => src[RESUME]() + dest.on('drain', this.ondrain) + } + unpipe() { + this.dest.removeListener('drain', this.ondrain) + } + // istanbul ignore next - only here for the prototype + proxyErrors() {} + end() { + this.unpipe() + if (this.opts.end) this.dest.end() + } +} + +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors) + super.unpipe() + } + constructor(src, dest, opts) { + super(src, dest, opts) + this.proxyErrors = er => dest.emit('error', er) + src.on('error', this.proxyErrors) + } +} + +class Minipass extends Stream { + constructor(options) { + super() + this[FLOWING] = false + // whether we're explicitly paused + this[PAUSED] = false + this[PIPES] = [] + this[BUFFER] = [] + this[OBJECTMODE] = (options && options.objectMode) || false + if (this[OBJECTMODE]) this[ENCODING] = null + else this[ENCODING] = (options && options.encoding) || null + if (this[ENCODING] === 'buffer') this[ENCODING] = null + this[ASYNC] = (options && !!options.async) || false + this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null + this[EOF] = false + this[EMITTED_END] = false + this[EMITTING_END] = false + this[CLOSED] = false + this[EMITTED_ERROR] = null + this.writable = true + this.readable = true + this[BUFFERLENGTH] = 0 + this[DESTROYED] = false + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) + } + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) + } + this[SIGNAL] = options && options.signal + this[ABORTED] = false + if (this[SIGNAL]) { + this[SIGNAL].addEventListener('abort', () => this[ABORT]()) + if (this[SIGNAL].aborted) { + this[ABORT]() + } + } + } + + get bufferLength() { + return this[BUFFERLENGTH] + } + + get encoding() { + return this[ENCODING] + } + set encoding(enc) { + if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') + + if ( + this[ENCODING] && + enc !== this[ENCODING] && + ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) + ) + throw new Error('cannot change encoding') + + if (this[ENCODING] !== enc) { + this[DECODER] = enc ? new SD(enc) : null + if (this[BUFFER].length) + this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) + } + + this[ENCODING] = enc + } + + setEncoding(enc) { + this.encoding = enc + } + + get objectMode() { + return this[OBJECTMODE] + } + set objectMode(om) { + this[OBJECTMODE] = this[OBJECTMODE] || !!om + } + + get ['async']() { + return this[ASYNC] + } + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a + } + + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true + this.emit('abort', this[SIGNAL].reason) + this.destroy(this[SIGNAL].reason) + } + + get aborted() { + return this[ABORTED] + } + set aborted(_) {} + + write(chunk, encoding, cb) { + if (this[ABORTED]) return false + if (this[EOF]) throw new Error('write after end') + + if (this[DESTROYED]) { + this.emit( + 'error', + Object.assign( + new Error('Cannot call write after a stream was destroyed'), + { code: 'ERR_STREAM_DESTROYED' } + ) + ) + return true + } + + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + + if (!encoding) encoding = 'utf8' + + const fn = this[ASYNC] ? defer : f => f() + + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything else switches us into object mode + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) + else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) + else if (typeof chunk !== 'string') + // use the setter so we throw if we have encoding set + this.objectMode = true + } + + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + /* istanbul ignore if - maybe impossible? */ + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + if (cb) fn(cb) + return this.flowing + } + + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if ( + typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER].lastNeed) + ) { + chunk = Buffer.from(chunk, encoding) + } + + if (Buffer.isBuffer(chunk) && this[ENCODING]) + chunk = this[DECODER].write(chunk) + + // Note: flushing CAN potentially switch us into not-flowing mode + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + read(n) { + if (this[DESTROYED]) return null + + if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { + this[MAYBE_EMIT_END]() + return null + } + + if (this[OBJECTMODE]) n = null + + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] + else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] + } + + const ret = this[READ](n || null, this[BUFFER][0]) + this[MAYBE_EMIT_END]() + return ret + } + + [READ](n, chunk) { + if (n === chunk.length || n === null) this[BUFFERSHIFT]() + else { + this[BUFFER][0] = chunk.slice(n) + chunk = chunk.slice(0, n) + this[BUFFERLENGTH] -= n + } + + this.emit('data', chunk) + + if (!this[BUFFER].length && !this[EOF]) this.emit('drain') + + return chunk + } + + end(chunk, encoding, cb) { + if (typeof chunk === 'function') (cb = chunk), (chunk = null) + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + if (chunk) this.write(chunk, encoding) + if (cb) this.once('end', cb) + this[EOF] = true + this.writable = false + + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() + return this + } + + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) return + + this[PAUSED] = false + this[FLOWING] = true + this.emit('resume') + if (this[BUFFER].length) this[FLUSH]() + else if (this[EOF]) this[MAYBE_EMIT_END]() + else this.emit('drain') + } + + resume() { + return this[RESUME]() + } + + pause() { + this[FLOWING] = false + this[PAUSED] = true + } + + get destroyed() { + return this[DESTROYED] + } + + get flowing() { + return this[FLOWING] + } + + get paused() { + return this[PAUSED] + } + + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 + else this[BUFFERLENGTH] += chunk.length + this[BUFFER].push(chunk) + } + + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 + else this[BUFFERLENGTH] -= this[BUFFER][0].length + return this[BUFFER].shift() + } + + [FLUSH](noDrain) { + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) + + if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') + } + + [FLUSHCHUNK](chunk) { + this.emit('data', chunk) + return this.flowing + } + + pipe(dest, opts) { + if (this[DESTROYED]) return + + const ended = this[EMITTED_END] + opts = opts || {} + if (dest === proc.stdout || dest === proc.stderr) opts.end = false + else opts.end = opts.end !== false + opts.proxyErrors = !!opts.proxyErrors + + // piping an ended stream ends immediately + if (ended) { + if (opts.end) dest.end() + } else { + this[PIPES].push( + !opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts) + ) + if (this[ASYNC]) defer(() => this[RESUME]()) + else this[RESUME]() + } + + return dest + } + + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest) + if (p) { + this[PIPES].splice(this[PIPES].indexOf(p), 1) + p.unpipe() + } + } + + addListener(ev, fn) { + return this.on(ev, fn) + } + + on(ev, fn) { + const ret = super.on(ev, fn) + if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) + super.emit('readable') + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev) + this.removeAllListeners(ev) + } else if (ev === 'error' && this[EMITTED_ERROR]) { + if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) + else fn.call(this, this[EMITTED_ERROR]) + } + return ret + } + + get emittedEnd() { + return this[EMITTED_END] + } + + [MAYBE_EMIT_END]() { + if ( + !this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF] + ) { + this[EMITTING_END] = true + this.emit('end') + this.emit('prefinish') + this.emit('finish') + if (this[CLOSED]) this.emit('close') + this[EMITTING_END] = false + } + } + + emit(ev, data, ...extra) { + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) + return + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? defer(() => this[EMITDATA](data)) + : this[EMITDATA](data) + } else if (ev === 'end') { + return this[EMITEND]() + } else if (ev === 'close') { + this[CLOSED] = true + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) return + const ret = super.emit('close') + this.removeAllListeners('close') + return ret + } else if (ev === 'error') { + this[EMITTED_ERROR] = data + super.emit(ERROR, data) + const ret = + !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'resume') { + const ret = super.emit('resume') + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev) + this.removeAllListeners(ev) + return ret + } + + // Some other unknown event + const ret = super.emit(ev, data, ...extra) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) this.pause() + } + const ret = super.emit('data', data) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITEND]() { + if (this[EMITTED_END]) return + + this[EMITTED_END] = true + this.readable = false + if (this[ASYNC]) defer(() => this[EMITEND2]()) + else this[EMITEND2]() + } + + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end() + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data) + } + super.emit('data', data) + } + } + + for (const p of this[PIPES]) { + p.end() + } + const ret = super.emit('end') + this.removeAllListeners('end') + return ret + } + + // const all = await stream.collect() + collect() { + const buf = [] + if (!this[OBJECTMODE]) buf.dataLength = 0 + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise() + this.on('data', c => { + buf.push(c) + if (!this[OBJECTMODE]) buf.dataLength += c.length + }) + return p.then(() => buf) + } + + // const data = await stream.concat() + concat() { + return this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this.collect().then(buf => + this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength) + ) + } + + // stream.promise().then(() => done, er => emitted error) + promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))) + this.on('error', er => reject(er)) + this.on('end', () => resolve()) + }) + } + + // for await (let chunk of stream) + [ASYNCITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + stopped = true + return Promise.resolve({ done: true }) + } + const next = () => { + if (stopped) return stop() + const res = this.read() + if (res !== null) return Promise.resolve({ done: false, value: res }) + + if (this[EOF]) return stop() + + let resolve = null + let reject = null + const onerr = er => { + this.removeListener('data', ondata) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + stop() + reject(er) + } + const ondata = value => { + this.removeListener('error', onerr) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + this.pause() + resolve({ value: value, done: !!this[EOF] }) + } + const onend = () => { + this.removeListener('error', onerr) + this.removeListener('data', ondata) + this.removeListener(DESTROYED, ondestroy) + stop() + resolve({ done: true }) + } + const ondestroy = () => onerr(new Error('stream destroyed')) + return new Promise((res, rej) => { + reject = rej + resolve = res + this.once(DESTROYED, ondestroy) + this.once('error', onerr) + this.once('end', onend) + this.once('data', ondata) + }) + } + + return { + next, + throw: stop, + return: stop, + [ASYNCITERATOR]() { + return this + }, + } + } + + // for (let chunk of stream) + [ITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + this.removeListener(ERROR, stop) + this.removeListener(DESTROYED, stop) + this.removeListener('end', stop) + stopped = true + return { done: true } + } + + const next = () => { + if (stopped) return stop() + const value = this.read() + return value === null ? stop() : { value } + } + this.once('end', stop) + this.once(ERROR, stop) + this.once(DESTROYED, stop) + + return { + next, + throw: stop, + return: stop, + [ITERATOR]() { + return this + }, + } + } + + destroy(er) { + if (this[DESTROYED]) { + if (er) this.emit('error', er) + else this.emit(DESTROYED) + return this + } + + this[DESTROYED] = true + + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0 + this[BUFFERLENGTH] = 0 + + if (typeof this.close === 'function' && !this[CLOSED]) this.close() + + if (er) this.emit('error', er) + // if no error to emit, still reject pending promises + else this.emit(DESTROYED) + + return this + } + + static isStream(s) { + return ( + !!s && + (s instanceof Minipass || + s instanceof Stream || + (s instanceof EE && + // readable + (typeof s.pipe === 'function' || + // writable + (typeof s.write === 'function' && typeof s.end === 'function')))) + ) + } +} + +exports.Minipass = Minipass diff --git a/node_modules/tar/node_modules/minipass/index.mjs b/node_modules/tar/node_modules/minipass/index.mjs new file mode 100644 index 0000000000000..6ef6cd8cf0703 --- /dev/null +++ b/node_modules/tar/node_modules/minipass/index.mjs @@ -0,0 +1,702 @@ +'use strict' +const proc = + typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + } +import EE from 'events' +import Stream from 'stream' +import stringdecoder from 'string_decoder' +const SD = stringdecoder.StringDecoder + +const EOF = Symbol('EOF') +const MAYBE_EMIT_END = Symbol('maybeEmitEnd') +const EMITTED_END = Symbol('emittedEnd') +const EMITTING_END = Symbol('emittingEnd') +const EMITTED_ERROR = Symbol('emittedError') +const CLOSED = Symbol('closed') +const READ = Symbol('read') +const FLUSH = Symbol('flush') +const FLUSHCHUNK = Symbol('flushChunk') +const ENCODING = Symbol('encoding') +const DECODER = Symbol('decoder') +const FLOWING = Symbol('flowing') +const PAUSED = Symbol('paused') +const RESUME = Symbol('resume') +const BUFFER = Symbol('buffer') +const PIPES = Symbol('pipes') +const BUFFERLENGTH = Symbol('bufferLength') +const BUFFERPUSH = Symbol('bufferPush') +const BUFFERSHIFT = Symbol('bufferShift') +const OBJECTMODE = Symbol('objectMode') +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed') +// internal event when stream has an error +const ERROR = Symbol('error') +const EMITDATA = Symbol('emitData') +const EMITEND = Symbol('emitEnd') +const EMITEND2 = Symbol('emitEnd2') +const ASYNC = Symbol('async') +const ABORT = Symbol('abort') +const ABORTED = Symbol('aborted') +const SIGNAL = Symbol('signal') + +const defer = fn => Promise.resolve().then(fn) + +// TODO remove when Node v8 support drops +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' +const ASYNCITERATOR = + (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') +const ITERATOR = + (doIter && Symbol.iterator) || Symbol('iterator not implemented') + +// events that mean 'the stream is over' +// these are treated specially, and re-emitted +// if they are listened for after emitting. +const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' + +const isArrayBuffer = b => + b instanceof ArrayBuffer || + (typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0) + +const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) + +class Pipe { + constructor(src, dest, opts) { + this.src = src + this.dest = dest + this.opts = opts + this.ondrain = () => src[RESUME]() + dest.on('drain', this.ondrain) + } + unpipe() { + this.dest.removeListener('drain', this.ondrain) + } + // istanbul ignore next - only here for the prototype + proxyErrors() {} + end() { + this.unpipe() + if (this.opts.end) this.dest.end() + } +} + +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors) + super.unpipe() + } + constructor(src, dest, opts) { + super(src, dest, opts) + this.proxyErrors = er => dest.emit('error', er) + src.on('error', this.proxyErrors) + } +} + +export class Minipass extends Stream { + constructor(options) { + super() + this[FLOWING] = false + // whether we're explicitly paused + this[PAUSED] = false + this[PIPES] = [] + this[BUFFER] = [] + this[OBJECTMODE] = (options && options.objectMode) || false + if (this[OBJECTMODE]) this[ENCODING] = null + else this[ENCODING] = (options && options.encoding) || null + if (this[ENCODING] === 'buffer') this[ENCODING] = null + this[ASYNC] = (options && !!options.async) || false + this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null + this[EOF] = false + this[EMITTED_END] = false + this[EMITTING_END] = false + this[CLOSED] = false + this[EMITTED_ERROR] = null + this.writable = true + this.readable = true + this[BUFFERLENGTH] = 0 + this[DESTROYED] = false + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) + } + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) + } + this[SIGNAL] = options && options.signal + this[ABORTED] = false + if (this[SIGNAL]) { + this[SIGNAL].addEventListener('abort', () => this[ABORT]()) + if (this[SIGNAL].aborted) { + this[ABORT]() + } + } + } + + get bufferLength() { + return this[BUFFERLENGTH] + } + + get encoding() { + return this[ENCODING] + } + set encoding(enc) { + if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') + + if ( + this[ENCODING] && + enc !== this[ENCODING] && + ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) + ) + throw new Error('cannot change encoding') + + if (this[ENCODING] !== enc) { + this[DECODER] = enc ? new SD(enc) : null + if (this[BUFFER].length) + this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) + } + + this[ENCODING] = enc + } + + setEncoding(enc) { + this.encoding = enc + } + + get objectMode() { + return this[OBJECTMODE] + } + set objectMode(om) { + this[OBJECTMODE] = this[OBJECTMODE] || !!om + } + + get ['async']() { + return this[ASYNC] + } + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a + } + + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true + this.emit('abort', this[SIGNAL].reason) + this.destroy(this[SIGNAL].reason) + } + + get aborted() { + return this[ABORTED] + } + set aborted(_) {} + + write(chunk, encoding, cb) { + if (this[ABORTED]) return false + if (this[EOF]) throw new Error('write after end') + + if (this[DESTROYED]) { + this.emit( + 'error', + Object.assign( + new Error('Cannot call write after a stream was destroyed'), + { code: 'ERR_STREAM_DESTROYED' } + ) + ) + return true + } + + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + + if (!encoding) encoding = 'utf8' + + const fn = this[ASYNC] ? defer : f => f() + + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything else switches us into object mode + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) + else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) + else if (typeof chunk !== 'string') + // use the setter so we throw if we have encoding set + this.objectMode = true + } + + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + /* istanbul ignore if - maybe impossible? */ + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + if (cb) fn(cb) + return this.flowing + } + + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if ( + typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER].lastNeed) + ) { + chunk = Buffer.from(chunk, encoding) + } + + if (Buffer.isBuffer(chunk) && this[ENCODING]) + chunk = this[DECODER].write(chunk) + + // Note: flushing CAN potentially switch us into not-flowing mode + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + read(n) { + if (this[DESTROYED]) return null + + if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { + this[MAYBE_EMIT_END]() + return null + } + + if (this[OBJECTMODE]) n = null + + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] + else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] + } + + const ret = this[READ](n || null, this[BUFFER][0]) + this[MAYBE_EMIT_END]() + return ret + } + + [READ](n, chunk) { + if (n === chunk.length || n === null) this[BUFFERSHIFT]() + else { + this[BUFFER][0] = chunk.slice(n) + chunk = chunk.slice(0, n) + this[BUFFERLENGTH] -= n + } + + this.emit('data', chunk) + + if (!this[BUFFER].length && !this[EOF]) this.emit('drain') + + return chunk + } + + end(chunk, encoding, cb) { + if (typeof chunk === 'function') (cb = chunk), (chunk = null) + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + if (chunk) this.write(chunk, encoding) + if (cb) this.once('end', cb) + this[EOF] = true + this.writable = false + + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() + return this + } + + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) return + + this[PAUSED] = false + this[FLOWING] = true + this.emit('resume') + if (this[BUFFER].length) this[FLUSH]() + else if (this[EOF]) this[MAYBE_EMIT_END]() + else this.emit('drain') + } + + resume() { + return this[RESUME]() + } + + pause() { + this[FLOWING] = false + this[PAUSED] = true + } + + get destroyed() { + return this[DESTROYED] + } + + get flowing() { + return this[FLOWING] + } + + get paused() { + return this[PAUSED] + } + + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 + else this[BUFFERLENGTH] += chunk.length + this[BUFFER].push(chunk) + } + + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 + else this[BUFFERLENGTH] -= this[BUFFER][0].length + return this[BUFFER].shift() + } + + [FLUSH](noDrain) { + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) + + if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') + } + + [FLUSHCHUNK](chunk) { + this.emit('data', chunk) + return this.flowing + } + + pipe(dest, opts) { + if (this[DESTROYED]) return + + const ended = this[EMITTED_END] + opts = opts || {} + if (dest === proc.stdout || dest === proc.stderr) opts.end = false + else opts.end = opts.end !== false + opts.proxyErrors = !!opts.proxyErrors + + // piping an ended stream ends immediately + if (ended) { + if (opts.end) dest.end() + } else { + this[PIPES].push( + !opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts) + ) + if (this[ASYNC]) defer(() => this[RESUME]()) + else this[RESUME]() + } + + return dest + } + + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest) + if (p) { + this[PIPES].splice(this[PIPES].indexOf(p), 1) + p.unpipe() + } + } + + addListener(ev, fn) { + return this.on(ev, fn) + } + + on(ev, fn) { + const ret = super.on(ev, fn) + if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) + super.emit('readable') + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev) + this.removeAllListeners(ev) + } else if (ev === 'error' && this[EMITTED_ERROR]) { + if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) + else fn.call(this, this[EMITTED_ERROR]) + } + return ret + } + + get emittedEnd() { + return this[EMITTED_END] + } + + [MAYBE_EMIT_END]() { + if ( + !this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF] + ) { + this[EMITTING_END] = true + this.emit('end') + this.emit('prefinish') + this.emit('finish') + if (this[CLOSED]) this.emit('close') + this[EMITTING_END] = false + } + } + + emit(ev, data, ...extra) { + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) + return + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? defer(() => this[EMITDATA](data)) + : this[EMITDATA](data) + } else if (ev === 'end') { + return this[EMITEND]() + } else if (ev === 'close') { + this[CLOSED] = true + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) return + const ret = super.emit('close') + this.removeAllListeners('close') + return ret + } else if (ev === 'error') { + this[EMITTED_ERROR] = data + super.emit(ERROR, data) + const ret = + !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'resume') { + const ret = super.emit('resume') + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev) + this.removeAllListeners(ev) + return ret + } + + // Some other unknown event + const ret = super.emit(ev, data, ...extra) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) this.pause() + } + const ret = super.emit('data', data) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITEND]() { + if (this[EMITTED_END]) return + + this[EMITTED_END] = true + this.readable = false + if (this[ASYNC]) defer(() => this[EMITEND2]()) + else this[EMITEND2]() + } + + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end() + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data) + } + super.emit('data', data) + } + } + + for (const p of this[PIPES]) { + p.end() + } + const ret = super.emit('end') + this.removeAllListeners('end') + return ret + } + + // const all = await stream.collect() + collect() { + const buf = [] + if (!this[OBJECTMODE]) buf.dataLength = 0 + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise() + this.on('data', c => { + buf.push(c) + if (!this[OBJECTMODE]) buf.dataLength += c.length + }) + return p.then(() => buf) + } + + // const data = await stream.concat() + concat() { + return this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this.collect().then(buf => + this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength) + ) + } + + // stream.promise().then(() => done, er => emitted error) + promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))) + this.on('error', er => reject(er)) + this.on('end', () => resolve()) + }) + } + + // for await (let chunk of stream) + [ASYNCITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + stopped = true + return Promise.resolve({ done: true }) + } + const next = () => { + if (stopped) return stop() + const res = this.read() + if (res !== null) return Promise.resolve({ done: false, value: res }) + + if (this[EOF]) return stop() + + let resolve = null + let reject = null + const onerr = er => { + this.removeListener('data', ondata) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + stop() + reject(er) + } + const ondata = value => { + this.removeListener('error', onerr) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + this.pause() + resolve({ value: value, done: !!this[EOF] }) + } + const onend = () => { + this.removeListener('error', onerr) + this.removeListener('data', ondata) + this.removeListener(DESTROYED, ondestroy) + stop() + resolve({ done: true }) + } + const ondestroy = () => onerr(new Error('stream destroyed')) + return new Promise((res, rej) => { + reject = rej + resolve = res + this.once(DESTROYED, ondestroy) + this.once('error', onerr) + this.once('end', onend) + this.once('data', ondata) + }) + } + + return { + next, + throw: stop, + return: stop, + [ASYNCITERATOR]() { + return this + }, + } + } + + // for (let chunk of stream) + [ITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + this.removeListener(ERROR, stop) + this.removeListener(DESTROYED, stop) + this.removeListener('end', stop) + stopped = true + return { done: true } + } + + const next = () => { + if (stopped) return stop() + const value = this.read() + return value === null ? stop() : { value } + } + this.once('end', stop) + this.once(ERROR, stop) + this.once(DESTROYED, stop) + + return { + next, + throw: stop, + return: stop, + [ITERATOR]() { + return this + }, + } + } + + destroy(er) { + if (this[DESTROYED]) { + if (er) this.emit('error', er) + else this.emit(DESTROYED) + return this + } + + this[DESTROYED] = true + + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0 + this[BUFFERLENGTH] = 0 + + if (typeof this.close === 'function' && !this[CLOSED]) this.close() + + if (er) this.emit('error', er) + // if no error to emit, still reject pending promises + else this.emit(DESTROYED) + + return this + } + + static isStream(s) { + return ( + !!s && + (s instanceof Minipass || + s instanceof Stream || + (s instanceof EE && + // readable + (typeof s.pipe === 'function' || + // writable + (typeof s.write === 'function' && typeof s.end === 'function')))) + ) + } +} + + diff --git a/node_modules/tar/node_modules/minipass/package.json b/node_modules/tar/node_modules/minipass/package.json new file mode 100644 index 0000000000000..0e20e988047f2 --- /dev/null +++ b/node_modules/tar/node_modules/minipass/package.json @@ -0,0 +1,76 @@ +{ + "name": "minipass", + "version": "5.0.0", + "description": "minimal implementation of a PassThrough stream", + "main": "./index.js", + "module": "./index.mjs", + "types": "./index.d.ts", + "exports": { + ".": { + "import": { + "types": "./index.d.ts", + "default": "./index.mjs" + }, + "require": { + "types": "./index.d.ts", + "default": "./index.js" + } + }, + "./package.json": "./package.json" + }, + "devDependencies": { + "@types/node": "^17.0.41", + "end-of-stream": "^1.4.0", + "node-abort-controller": "^3.1.1", + "prettier": "^2.6.2", + "tap": "^16.2.0", + "through2": "^2.0.3", + "ts-node": "^10.8.1", + "typedoc": "^0.23.24", + "typescript": "^4.7.3" + }, + "scripts": { + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "prepare": "node ./scripts/transpile-to-esm.js", + "snap": "tap", + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags", + "typedoc": "typedoc ./index.d.ts", + "format": "prettier --write . --loglevel warn" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minipass.git" + }, + "keywords": [ + "passthrough", + "stream" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "files": [ + "index.d.ts", + "index.js", + "index.mjs" + ], + "tap": { + "check-coverage": true + }, + "engines": { + "node": ">=8" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/node_modules/ssri/node_modules/minipass/LICENSE b/node_modules/tuf-js/node_modules/minipass/LICENSE similarity index 100% rename from node_modules/ssri/node_modules/minipass/LICENSE rename to node_modules/tuf-js/node_modules/minipass/LICENSE diff --git a/node_modules/tuf-js/node_modules/minipass/index.js b/node_modules/tuf-js/node_modules/minipass/index.js new file mode 100644 index 0000000000000..ed07c17acd97b --- /dev/null +++ b/node_modules/tuf-js/node_modules/minipass/index.js @@ -0,0 +1,702 @@ +'use strict' +const proc = + typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + } +const EE = require('events') +const Stream = require('stream') +const stringdecoder = require('string_decoder') +const SD = stringdecoder.StringDecoder + +const EOF = Symbol('EOF') +const MAYBE_EMIT_END = Symbol('maybeEmitEnd') +const EMITTED_END = Symbol('emittedEnd') +const EMITTING_END = Symbol('emittingEnd') +const EMITTED_ERROR = Symbol('emittedError') +const CLOSED = Symbol('closed') +const READ = Symbol('read') +const FLUSH = Symbol('flush') +const FLUSHCHUNK = Symbol('flushChunk') +const ENCODING = Symbol('encoding') +const DECODER = Symbol('decoder') +const FLOWING = Symbol('flowing') +const PAUSED = Symbol('paused') +const RESUME = Symbol('resume') +const BUFFER = Symbol('buffer') +const PIPES = Symbol('pipes') +const BUFFERLENGTH = Symbol('bufferLength') +const BUFFERPUSH = Symbol('bufferPush') +const BUFFERSHIFT = Symbol('bufferShift') +const OBJECTMODE = Symbol('objectMode') +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed') +// internal event when stream has an error +const ERROR = Symbol('error') +const EMITDATA = Symbol('emitData') +const EMITEND = Symbol('emitEnd') +const EMITEND2 = Symbol('emitEnd2') +const ASYNC = Symbol('async') +const ABORT = Symbol('abort') +const ABORTED = Symbol('aborted') +const SIGNAL = Symbol('signal') + +const defer = fn => Promise.resolve().then(fn) + +// TODO remove when Node v8 support drops +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' +const ASYNCITERATOR = + (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') +const ITERATOR = + (doIter && Symbol.iterator) || Symbol('iterator not implemented') + +// events that mean 'the stream is over' +// these are treated specially, and re-emitted +// if they are listened for after emitting. +const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' + +const isArrayBuffer = b => + b instanceof ArrayBuffer || + (typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0) + +const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) + +class Pipe { + constructor(src, dest, opts) { + this.src = src + this.dest = dest + this.opts = opts + this.ondrain = () => src[RESUME]() + dest.on('drain', this.ondrain) + } + unpipe() { + this.dest.removeListener('drain', this.ondrain) + } + // istanbul ignore next - only here for the prototype + proxyErrors() {} + end() { + this.unpipe() + if (this.opts.end) this.dest.end() + } +} + +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors) + super.unpipe() + } + constructor(src, dest, opts) { + super(src, dest, opts) + this.proxyErrors = er => dest.emit('error', er) + src.on('error', this.proxyErrors) + } +} + +class Minipass extends Stream { + constructor(options) { + super() + this[FLOWING] = false + // whether we're explicitly paused + this[PAUSED] = false + this[PIPES] = [] + this[BUFFER] = [] + this[OBJECTMODE] = (options && options.objectMode) || false + if (this[OBJECTMODE]) this[ENCODING] = null + else this[ENCODING] = (options && options.encoding) || null + if (this[ENCODING] === 'buffer') this[ENCODING] = null + this[ASYNC] = (options && !!options.async) || false + this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null + this[EOF] = false + this[EMITTED_END] = false + this[EMITTING_END] = false + this[CLOSED] = false + this[EMITTED_ERROR] = null + this.writable = true + this.readable = true + this[BUFFERLENGTH] = 0 + this[DESTROYED] = false + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) + } + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) + } + this[SIGNAL] = options && options.signal + this[ABORTED] = false + if (this[SIGNAL]) { + this[SIGNAL].addEventListener('abort', () => this[ABORT]()) + if (this[SIGNAL].aborted) { + this[ABORT]() + } + } + } + + get bufferLength() { + return this[BUFFERLENGTH] + } + + get encoding() { + return this[ENCODING] + } + set encoding(enc) { + if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') + + if ( + this[ENCODING] && + enc !== this[ENCODING] && + ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) + ) + throw new Error('cannot change encoding') + + if (this[ENCODING] !== enc) { + this[DECODER] = enc ? new SD(enc) : null + if (this[BUFFER].length) + this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) + } + + this[ENCODING] = enc + } + + setEncoding(enc) { + this.encoding = enc + } + + get objectMode() { + return this[OBJECTMODE] + } + set objectMode(om) { + this[OBJECTMODE] = this[OBJECTMODE] || !!om + } + + get ['async']() { + return this[ASYNC] + } + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a + } + + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true + this.emit('abort', this[SIGNAL].reason) + this.destroy(this[SIGNAL].reason) + } + + get aborted() { + return this[ABORTED] + } + set aborted(_) {} + + write(chunk, encoding, cb) { + if (this[ABORTED]) return false + if (this[EOF]) throw new Error('write after end') + + if (this[DESTROYED]) { + this.emit( + 'error', + Object.assign( + new Error('Cannot call write after a stream was destroyed'), + { code: 'ERR_STREAM_DESTROYED' } + ) + ) + return true + } + + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + + if (!encoding) encoding = 'utf8' + + const fn = this[ASYNC] ? defer : f => f() + + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything else switches us into object mode + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) + else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) + else if (typeof chunk !== 'string') + // use the setter so we throw if we have encoding set + this.objectMode = true + } + + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + /* istanbul ignore if - maybe impossible? */ + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + if (cb) fn(cb) + return this.flowing + } + + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if ( + typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER].lastNeed) + ) { + chunk = Buffer.from(chunk, encoding) + } + + if (Buffer.isBuffer(chunk) && this[ENCODING]) + chunk = this[DECODER].write(chunk) + + // Note: flushing CAN potentially switch us into not-flowing mode + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + read(n) { + if (this[DESTROYED]) return null + + if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { + this[MAYBE_EMIT_END]() + return null + } + + if (this[OBJECTMODE]) n = null + + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] + else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] + } + + const ret = this[READ](n || null, this[BUFFER][0]) + this[MAYBE_EMIT_END]() + return ret + } + + [READ](n, chunk) { + if (n === chunk.length || n === null) this[BUFFERSHIFT]() + else { + this[BUFFER][0] = chunk.slice(n) + chunk = chunk.slice(0, n) + this[BUFFERLENGTH] -= n + } + + this.emit('data', chunk) + + if (!this[BUFFER].length && !this[EOF]) this.emit('drain') + + return chunk + } + + end(chunk, encoding, cb) { + if (typeof chunk === 'function') (cb = chunk), (chunk = null) + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + if (chunk) this.write(chunk, encoding) + if (cb) this.once('end', cb) + this[EOF] = true + this.writable = false + + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() + return this + } + + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) return + + this[PAUSED] = false + this[FLOWING] = true + this.emit('resume') + if (this[BUFFER].length) this[FLUSH]() + else if (this[EOF]) this[MAYBE_EMIT_END]() + else this.emit('drain') + } + + resume() { + return this[RESUME]() + } + + pause() { + this[FLOWING] = false + this[PAUSED] = true + } + + get destroyed() { + return this[DESTROYED] + } + + get flowing() { + return this[FLOWING] + } + + get paused() { + return this[PAUSED] + } + + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 + else this[BUFFERLENGTH] += chunk.length + this[BUFFER].push(chunk) + } + + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 + else this[BUFFERLENGTH] -= this[BUFFER][0].length + return this[BUFFER].shift() + } + + [FLUSH](noDrain) { + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) + + if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') + } + + [FLUSHCHUNK](chunk) { + this.emit('data', chunk) + return this.flowing + } + + pipe(dest, opts) { + if (this[DESTROYED]) return + + const ended = this[EMITTED_END] + opts = opts || {} + if (dest === proc.stdout || dest === proc.stderr) opts.end = false + else opts.end = opts.end !== false + opts.proxyErrors = !!opts.proxyErrors + + // piping an ended stream ends immediately + if (ended) { + if (opts.end) dest.end() + } else { + this[PIPES].push( + !opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts) + ) + if (this[ASYNC]) defer(() => this[RESUME]()) + else this[RESUME]() + } + + return dest + } + + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest) + if (p) { + this[PIPES].splice(this[PIPES].indexOf(p), 1) + p.unpipe() + } + } + + addListener(ev, fn) { + return this.on(ev, fn) + } + + on(ev, fn) { + const ret = super.on(ev, fn) + if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) + super.emit('readable') + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev) + this.removeAllListeners(ev) + } else if (ev === 'error' && this[EMITTED_ERROR]) { + if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) + else fn.call(this, this[EMITTED_ERROR]) + } + return ret + } + + get emittedEnd() { + return this[EMITTED_END] + } + + [MAYBE_EMIT_END]() { + if ( + !this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF] + ) { + this[EMITTING_END] = true + this.emit('end') + this.emit('prefinish') + this.emit('finish') + if (this[CLOSED]) this.emit('close') + this[EMITTING_END] = false + } + } + + emit(ev, data, ...extra) { + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) + return + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? defer(() => this[EMITDATA](data)) + : this[EMITDATA](data) + } else if (ev === 'end') { + return this[EMITEND]() + } else if (ev === 'close') { + this[CLOSED] = true + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) return + const ret = super.emit('close') + this.removeAllListeners('close') + return ret + } else if (ev === 'error') { + this[EMITTED_ERROR] = data + super.emit(ERROR, data) + const ret = + !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'resume') { + const ret = super.emit('resume') + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev) + this.removeAllListeners(ev) + return ret + } + + // Some other unknown event + const ret = super.emit(ev, data, ...extra) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) this.pause() + } + const ret = super.emit('data', data) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITEND]() { + if (this[EMITTED_END]) return + + this[EMITTED_END] = true + this.readable = false + if (this[ASYNC]) defer(() => this[EMITEND2]()) + else this[EMITEND2]() + } + + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end() + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data) + } + super.emit('data', data) + } + } + + for (const p of this[PIPES]) { + p.end() + } + const ret = super.emit('end') + this.removeAllListeners('end') + return ret + } + + // const all = await stream.collect() + collect() { + const buf = [] + if (!this[OBJECTMODE]) buf.dataLength = 0 + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise() + this.on('data', c => { + buf.push(c) + if (!this[OBJECTMODE]) buf.dataLength += c.length + }) + return p.then(() => buf) + } + + // const data = await stream.concat() + concat() { + return this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this.collect().then(buf => + this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength) + ) + } + + // stream.promise().then(() => done, er => emitted error) + promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))) + this.on('error', er => reject(er)) + this.on('end', () => resolve()) + }) + } + + // for await (let chunk of stream) + [ASYNCITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + stopped = true + return Promise.resolve({ done: true }) + } + const next = () => { + if (stopped) return stop() + const res = this.read() + if (res !== null) return Promise.resolve({ done: false, value: res }) + + if (this[EOF]) return stop() + + let resolve = null + let reject = null + const onerr = er => { + this.removeListener('data', ondata) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + stop() + reject(er) + } + const ondata = value => { + this.removeListener('error', onerr) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + this.pause() + resolve({ value: value, done: !!this[EOF] }) + } + const onend = () => { + this.removeListener('error', onerr) + this.removeListener('data', ondata) + this.removeListener(DESTROYED, ondestroy) + stop() + resolve({ done: true }) + } + const ondestroy = () => onerr(new Error('stream destroyed')) + return new Promise((res, rej) => { + reject = rej + resolve = res + this.once(DESTROYED, ondestroy) + this.once('error', onerr) + this.once('end', onend) + this.once('data', ondata) + }) + } + + return { + next, + throw: stop, + return: stop, + [ASYNCITERATOR]() { + return this + }, + } + } + + // for (let chunk of stream) + [ITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + this.removeListener(ERROR, stop) + this.removeListener(DESTROYED, stop) + this.removeListener('end', stop) + stopped = true + return { done: true } + } + + const next = () => { + if (stopped) return stop() + const value = this.read() + return value === null ? stop() : { value } + } + this.once('end', stop) + this.once(ERROR, stop) + this.once(DESTROYED, stop) + + return { + next, + throw: stop, + return: stop, + [ITERATOR]() { + return this + }, + } + } + + destroy(er) { + if (this[DESTROYED]) { + if (er) this.emit('error', er) + else this.emit(DESTROYED) + return this + } + + this[DESTROYED] = true + + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0 + this[BUFFERLENGTH] = 0 + + if (typeof this.close === 'function' && !this[CLOSED]) this.close() + + if (er) this.emit('error', er) + // if no error to emit, still reject pending promises + else this.emit(DESTROYED) + + return this + } + + static isStream(s) { + return ( + !!s && + (s instanceof Minipass || + s instanceof Stream || + (s instanceof EE && + // readable + (typeof s.pipe === 'function' || + // writable + (typeof s.write === 'function' && typeof s.end === 'function')))) + ) + } +} + +exports.Minipass = Minipass diff --git a/node_modules/tuf-js/node_modules/minipass/index.mjs b/node_modules/tuf-js/node_modules/minipass/index.mjs new file mode 100644 index 0000000000000..6ef6cd8cf0703 --- /dev/null +++ b/node_modules/tuf-js/node_modules/minipass/index.mjs @@ -0,0 +1,702 @@ +'use strict' +const proc = + typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + } +import EE from 'events' +import Stream from 'stream' +import stringdecoder from 'string_decoder' +const SD = stringdecoder.StringDecoder + +const EOF = Symbol('EOF') +const MAYBE_EMIT_END = Symbol('maybeEmitEnd') +const EMITTED_END = Symbol('emittedEnd') +const EMITTING_END = Symbol('emittingEnd') +const EMITTED_ERROR = Symbol('emittedError') +const CLOSED = Symbol('closed') +const READ = Symbol('read') +const FLUSH = Symbol('flush') +const FLUSHCHUNK = Symbol('flushChunk') +const ENCODING = Symbol('encoding') +const DECODER = Symbol('decoder') +const FLOWING = Symbol('flowing') +const PAUSED = Symbol('paused') +const RESUME = Symbol('resume') +const BUFFER = Symbol('buffer') +const PIPES = Symbol('pipes') +const BUFFERLENGTH = Symbol('bufferLength') +const BUFFERPUSH = Symbol('bufferPush') +const BUFFERSHIFT = Symbol('bufferShift') +const OBJECTMODE = Symbol('objectMode') +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed') +// internal event when stream has an error +const ERROR = Symbol('error') +const EMITDATA = Symbol('emitData') +const EMITEND = Symbol('emitEnd') +const EMITEND2 = Symbol('emitEnd2') +const ASYNC = Symbol('async') +const ABORT = Symbol('abort') +const ABORTED = Symbol('aborted') +const SIGNAL = Symbol('signal') + +const defer = fn => Promise.resolve().then(fn) + +// TODO remove when Node v8 support drops +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' +const ASYNCITERATOR = + (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') +const ITERATOR = + (doIter && Symbol.iterator) || Symbol('iterator not implemented') + +// events that mean 'the stream is over' +// these are treated specially, and re-emitted +// if they are listened for after emitting. +const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' + +const isArrayBuffer = b => + b instanceof ArrayBuffer || + (typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0) + +const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) + +class Pipe { + constructor(src, dest, opts) { + this.src = src + this.dest = dest + this.opts = opts + this.ondrain = () => src[RESUME]() + dest.on('drain', this.ondrain) + } + unpipe() { + this.dest.removeListener('drain', this.ondrain) + } + // istanbul ignore next - only here for the prototype + proxyErrors() {} + end() { + this.unpipe() + if (this.opts.end) this.dest.end() + } +} + +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors) + super.unpipe() + } + constructor(src, dest, opts) { + super(src, dest, opts) + this.proxyErrors = er => dest.emit('error', er) + src.on('error', this.proxyErrors) + } +} + +export class Minipass extends Stream { + constructor(options) { + super() + this[FLOWING] = false + // whether we're explicitly paused + this[PAUSED] = false + this[PIPES] = [] + this[BUFFER] = [] + this[OBJECTMODE] = (options && options.objectMode) || false + if (this[OBJECTMODE]) this[ENCODING] = null + else this[ENCODING] = (options && options.encoding) || null + if (this[ENCODING] === 'buffer') this[ENCODING] = null + this[ASYNC] = (options && !!options.async) || false + this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null + this[EOF] = false + this[EMITTED_END] = false + this[EMITTING_END] = false + this[CLOSED] = false + this[EMITTED_ERROR] = null + this.writable = true + this.readable = true + this[BUFFERLENGTH] = 0 + this[DESTROYED] = false + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) + } + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) + } + this[SIGNAL] = options && options.signal + this[ABORTED] = false + if (this[SIGNAL]) { + this[SIGNAL].addEventListener('abort', () => this[ABORT]()) + if (this[SIGNAL].aborted) { + this[ABORT]() + } + } + } + + get bufferLength() { + return this[BUFFERLENGTH] + } + + get encoding() { + return this[ENCODING] + } + set encoding(enc) { + if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') + + if ( + this[ENCODING] && + enc !== this[ENCODING] && + ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) + ) + throw new Error('cannot change encoding') + + if (this[ENCODING] !== enc) { + this[DECODER] = enc ? new SD(enc) : null + if (this[BUFFER].length) + this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) + } + + this[ENCODING] = enc + } + + setEncoding(enc) { + this.encoding = enc + } + + get objectMode() { + return this[OBJECTMODE] + } + set objectMode(om) { + this[OBJECTMODE] = this[OBJECTMODE] || !!om + } + + get ['async']() { + return this[ASYNC] + } + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a + } + + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true + this.emit('abort', this[SIGNAL].reason) + this.destroy(this[SIGNAL].reason) + } + + get aborted() { + return this[ABORTED] + } + set aborted(_) {} + + write(chunk, encoding, cb) { + if (this[ABORTED]) return false + if (this[EOF]) throw new Error('write after end') + + if (this[DESTROYED]) { + this.emit( + 'error', + Object.assign( + new Error('Cannot call write after a stream was destroyed'), + { code: 'ERR_STREAM_DESTROYED' } + ) + ) + return true + } + + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + + if (!encoding) encoding = 'utf8' + + const fn = this[ASYNC] ? defer : f => f() + + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything else switches us into object mode + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) + else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) + else if (typeof chunk !== 'string') + // use the setter so we throw if we have encoding set + this.objectMode = true + } + + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + /* istanbul ignore if - maybe impossible? */ + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + if (cb) fn(cb) + return this.flowing + } + + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if ( + typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER].lastNeed) + ) { + chunk = Buffer.from(chunk, encoding) + } + + if (Buffer.isBuffer(chunk) && this[ENCODING]) + chunk = this[DECODER].write(chunk) + + // Note: flushing CAN potentially switch us into not-flowing mode + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + read(n) { + if (this[DESTROYED]) return null + + if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { + this[MAYBE_EMIT_END]() + return null + } + + if (this[OBJECTMODE]) n = null + + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] + else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] + } + + const ret = this[READ](n || null, this[BUFFER][0]) + this[MAYBE_EMIT_END]() + return ret + } + + [READ](n, chunk) { + if (n === chunk.length || n === null) this[BUFFERSHIFT]() + else { + this[BUFFER][0] = chunk.slice(n) + chunk = chunk.slice(0, n) + this[BUFFERLENGTH] -= n + } + + this.emit('data', chunk) + + if (!this[BUFFER].length && !this[EOF]) this.emit('drain') + + return chunk + } + + end(chunk, encoding, cb) { + if (typeof chunk === 'function') (cb = chunk), (chunk = null) + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + if (chunk) this.write(chunk, encoding) + if (cb) this.once('end', cb) + this[EOF] = true + this.writable = false + + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() + return this + } + + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) return + + this[PAUSED] = false + this[FLOWING] = true + this.emit('resume') + if (this[BUFFER].length) this[FLUSH]() + else if (this[EOF]) this[MAYBE_EMIT_END]() + else this.emit('drain') + } + + resume() { + return this[RESUME]() + } + + pause() { + this[FLOWING] = false + this[PAUSED] = true + } + + get destroyed() { + return this[DESTROYED] + } + + get flowing() { + return this[FLOWING] + } + + get paused() { + return this[PAUSED] + } + + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 + else this[BUFFERLENGTH] += chunk.length + this[BUFFER].push(chunk) + } + + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 + else this[BUFFERLENGTH] -= this[BUFFER][0].length + return this[BUFFER].shift() + } + + [FLUSH](noDrain) { + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) + + if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') + } + + [FLUSHCHUNK](chunk) { + this.emit('data', chunk) + return this.flowing + } + + pipe(dest, opts) { + if (this[DESTROYED]) return + + const ended = this[EMITTED_END] + opts = opts || {} + if (dest === proc.stdout || dest === proc.stderr) opts.end = false + else opts.end = opts.end !== false + opts.proxyErrors = !!opts.proxyErrors + + // piping an ended stream ends immediately + if (ended) { + if (opts.end) dest.end() + } else { + this[PIPES].push( + !opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts) + ) + if (this[ASYNC]) defer(() => this[RESUME]()) + else this[RESUME]() + } + + return dest + } + + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest) + if (p) { + this[PIPES].splice(this[PIPES].indexOf(p), 1) + p.unpipe() + } + } + + addListener(ev, fn) { + return this.on(ev, fn) + } + + on(ev, fn) { + const ret = super.on(ev, fn) + if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) + super.emit('readable') + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev) + this.removeAllListeners(ev) + } else if (ev === 'error' && this[EMITTED_ERROR]) { + if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) + else fn.call(this, this[EMITTED_ERROR]) + } + return ret + } + + get emittedEnd() { + return this[EMITTED_END] + } + + [MAYBE_EMIT_END]() { + if ( + !this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF] + ) { + this[EMITTING_END] = true + this.emit('end') + this.emit('prefinish') + this.emit('finish') + if (this[CLOSED]) this.emit('close') + this[EMITTING_END] = false + } + } + + emit(ev, data, ...extra) { + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) + return + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? defer(() => this[EMITDATA](data)) + : this[EMITDATA](data) + } else if (ev === 'end') { + return this[EMITEND]() + } else if (ev === 'close') { + this[CLOSED] = true + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) return + const ret = super.emit('close') + this.removeAllListeners('close') + return ret + } else if (ev === 'error') { + this[EMITTED_ERROR] = data + super.emit(ERROR, data) + const ret = + !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'resume') { + const ret = super.emit('resume') + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev) + this.removeAllListeners(ev) + return ret + } + + // Some other unknown event + const ret = super.emit(ev, data, ...extra) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) this.pause() + } + const ret = super.emit('data', data) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITEND]() { + if (this[EMITTED_END]) return + + this[EMITTED_END] = true + this.readable = false + if (this[ASYNC]) defer(() => this[EMITEND2]()) + else this[EMITEND2]() + } + + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end() + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data) + } + super.emit('data', data) + } + } + + for (const p of this[PIPES]) { + p.end() + } + const ret = super.emit('end') + this.removeAllListeners('end') + return ret + } + + // const all = await stream.collect() + collect() { + const buf = [] + if (!this[OBJECTMODE]) buf.dataLength = 0 + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise() + this.on('data', c => { + buf.push(c) + if (!this[OBJECTMODE]) buf.dataLength += c.length + }) + return p.then(() => buf) + } + + // const data = await stream.concat() + concat() { + return this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this.collect().then(buf => + this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength) + ) + } + + // stream.promise().then(() => done, er => emitted error) + promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))) + this.on('error', er => reject(er)) + this.on('end', () => resolve()) + }) + } + + // for await (let chunk of stream) + [ASYNCITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + stopped = true + return Promise.resolve({ done: true }) + } + const next = () => { + if (stopped) return stop() + const res = this.read() + if (res !== null) return Promise.resolve({ done: false, value: res }) + + if (this[EOF]) return stop() + + let resolve = null + let reject = null + const onerr = er => { + this.removeListener('data', ondata) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + stop() + reject(er) + } + const ondata = value => { + this.removeListener('error', onerr) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + this.pause() + resolve({ value: value, done: !!this[EOF] }) + } + const onend = () => { + this.removeListener('error', onerr) + this.removeListener('data', ondata) + this.removeListener(DESTROYED, ondestroy) + stop() + resolve({ done: true }) + } + const ondestroy = () => onerr(new Error('stream destroyed')) + return new Promise((res, rej) => { + reject = rej + resolve = res + this.once(DESTROYED, ondestroy) + this.once('error', onerr) + this.once('end', onend) + this.once('data', ondata) + }) + } + + return { + next, + throw: stop, + return: stop, + [ASYNCITERATOR]() { + return this + }, + } + } + + // for (let chunk of stream) + [ITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + this.removeListener(ERROR, stop) + this.removeListener(DESTROYED, stop) + this.removeListener('end', stop) + stopped = true + return { done: true } + } + + const next = () => { + if (stopped) return stop() + const value = this.read() + return value === null ? stop() : { value } + } + this.once('end', stop) + this.once(ERROR, stop) + this.once(DESTROYED, stop) + + return { + next, + throw: stop, + return: stop, + [ITERATOR]() { + return this + }, + } + } + + destroy(er) { + if (this[DESTROYED]) { + if (er) this.emit('error', er) + else this.emit(DESTROYED) + return this + } + + this[DESTROYED] = true + + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0 + this[BUFFERLENGTH] = 0 + + if (typeof this.close === 'function' && !this[CLOSED]) this.close() + + if (er) this.emit('error', er) + // if no error to emit, still reject pending promises + else this.emit(DESTROYED) + + return this + } + + static isStream(s) { + return ( + !!s && + (s instanceof Minipass || + s instanceof Stream || + (s instanceof EE && + // readable + (typeof s.pipe === 'function' || + // writable + (typeof s.write === 'function' && typeof s.end === 'function')))) + ) + } +} + + diff --git a/node_modules/tuf-js/node_modules/minipass/package.json b/node_modules/tuf-js/node_modules/minipass/package.json new file mode 100644 index 0000000000000..0e20e988047f2 --- /dev/null +++ b/node_modules/tuf-js/node_modules/minipass/package.json @@ -0,0 +1,76 @@ +{ + "name": "minipass", + "version": "5.0.0", + "description": "minimal implementation of a PassThrough stream", + "main": "./index.js", + "module": "./index.mjs", + "types": "./index.d.ts", + "exports": { + ".": { + "import": { + "types": "./index.d.ts", + "default": "./index.mjs" + }, + "require": { + "types": "./index.d.ts", + "default": "./index.js" + } + }, + "./package.json": "./package.json" + }, + "devDependencies": { + "@types/node": "^17.0.41", + "end-of-stream": "^1.4.0", + "node-abort-controller": "^3.1.1", + "prettier": "^2.6.2", + "tap": "^16.2.0", + "through2": "^2.0.3", + "ts-node": "^10.8.1", + "typedoc": "^0.23.24", + "typescript": "^4.7.3" + }, + "scripts": { + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "prepare": "node ./scripts/transpile-to-esm.js", + "snap": "tap", + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags", + "typedoc": "typedoc ./index.d.ts", + "format": "prettier --write . --loglevel warn" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minipass.git" + }, + "keywords": [ + "passthrough", + "stream" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "files": [ + "index.d.ts", + "index.js", + "index.mjs" + ], + "tap": { + "check-coverage": true + }, + "engines": { + "node": ">=8" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/package-lock.json b/package-lock.json index 7b695e11b2876..0cb2e813fea21 100644 --- a/package-lock.json +++ b/package-lock.json @@ -124,7 +124,7 @@ "libnpmversion": "^4.0.2", "make-fetch-happen": "^12.0.0", "minimatch": "^9.0.3", - "minipass": "^5.0.0", + "minipass": "^7.0.3", "minipass-pipeline": "^1.2.4", "ms": "^2.1.2", "node-gyp": "^9.4.0", @@ -2498,6 +2498,14 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/@npmcli/metavuln-calculator/node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "engines": { + "node": ">=8" + } + }, "node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch": { "version": "14.0.5", "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-14.0.5.tgz", @@ -3589,15 +3597,6 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, - "node_modules/cacache/node_modules/minipass": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz", - "integrity": "sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg==", - "inBundle": true, - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, "node_modules/caching-transform": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz", @@ -5867,15 +5866,6 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, - "node_modules/fs-minipass/node_modules/minipass": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz", - "integrity": "sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg==", - "inBundle": true, - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -7991,15 +7981,6 @@ "node": "^16.13.0 || >=18.0.0" } }, - "node_modules/make-fetch-happen/node_modules/minipass": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz", - "integrity": "sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg==", - "inBundle": true, - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, "node_modules/map-obj": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz", @@ -9084,12 +9065,12 @@ } }, "node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz", + "integrity": "sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg==", "inBundle": true, "engines": { - "node": ">=8" + "node": ">=16 || 14 >=14.17" } }, "node_modules/minipass-collect": { @@ -9133,6 +9114,15 @@ "encoding": "^0.1.13" } }, + "node_modules/minipass-fetch/node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "inBundle": true, + "engines": { + "node": ">=8" + } + }, "node_modules/minipass-flush": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz", @@ -9494,6 +9484,15 @@ "node": "*" } }, + "node_modules/node-gyp/node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "inBundle": true, + "engines": { + "node": ">=8" + } + }, "node_modules/node-gyp/node_modules/nopt": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/nopt/-/nopt-6.0.0.tgz", @@ -9747,15 +9746,6 @@ "node": "^16.13.0 || >=18.0.0" } }, - "node_modules/npm-registry-fetch/node_modules/minipass": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.2.tgz", - "integrity": "sha512-eL79dXrE1q9dBbDCLg7xfn/vl7MS4F1gvJAgjJrQli/jbQWdUttuVawphqpffoIYfRdq78LHx6GP4bU/EQ2ATA==", - "inBundle": true, - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, "node_modules/npm-run-path": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", @@ -10321,15 +10311,6 @@ "node": "^16.13.0 || >=18.0.0" } }, - "node_modules/pacote/node_modules/minipass": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.2.tgz", - "integrity": "sha512-eL79dXrE1q9dBbDCLg7xfn/vl7MS4F1gvJAgjJrQli/jbQWdUttuVawphqpffoIYfRdq78LHx6GP4bU/EQ2ATA==", - "inBundle": true, - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -11704,6 +11685,15 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/sigstore/node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "inBundle": true, + "engines": { + "node": ">=8" + } + }, "node_modules/slash": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", @@ -11979,15 +11969,6 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, - "node_modules/ssri/node_modules/minipass": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz", - "integrity": "sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg==", - "inBundle": true, - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, "node_modules/stack-utils": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", @@ -14503,6 +14484,15 @@ "node": ">=8" } }, + "node_modules/tar/node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "inBundle": true, + "engines": { + "node": ">=8" + } + }, "node_modules/tcompare": { "version": "5.0.7", "resolved": "https://registry.npmjs.org/tcompare/-/tcompare-5.0.7.tgz", @@ -14856,6 +14846,15 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/tuf-js/node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "inBundle": true, + "engines": { + "node": ">=8" + } + }, "node_modules/tunnel": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", @@ -16000,7 +15999,7 @@ "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.18.0", - "minipass": "^5.0.0", + "minipass": "^7.0.3", "nock": "^13.3.0", "tap": "^16.3.4" }, diff --git a/package.json b/package.json index c3df86759868a..946a3c8a4cafc 100644 --- a/package.json +++ b/package.json @@ -89,7 +89,7 @@ "libnpmversion": "^4.0.2", "make-fetch-happen": "^12.0.0", "minimatch": "^9.0.3", - "minipass": "^5.0.0", + "minipass": "^7.0.3", "minipass-pipeline": "^1.2.4", "ms": "^2.1.2", "node-gyp": "^9.4.0", diff --git a/workspaces/libnpmorg/package.json b/workspaces/libnpmorg/package.json index 03e58a2fc13f2..8cef0bd762af6 100644 --- a/workspaces/libnpmorg/package.json +++ b/workspaces/libnpmorg/package.json @@ -29,7 +29,7 @@ "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.18.0", - "minipass": "^5.0.0", + "minipass": "^7.0.3", "nock": "^13.3.0", "tap": "^16.3.4" }, From b886c3b62014844ceea85f9f13f39ba616db911c Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Mon, 14 Aug 2023 13:12:06 -0700 Subject: [PATCH 26/68] deps: @npmcli/metavuln-calculator@6.0.1 --- DEPENDENCIES.md | 2 +- node_modules/.gitignore | 6 - .../node_modules/make-fetch-happen/LICENSE | 16 - .../make-fetch-happen/lib/agent.js | 214 ------ .../make-fetch-happen/lib/cache/entry.js | 469 ------------ .../make-fetch-happen/lib/cache/errors.js | 11 - .../make-fetch-happen/lib/cache/index.js | 49 -- .../make-fetch-happen/lib/cache/key.js | 17 - .../make-fetch-happen/lib/cache/policy.js | 161 ---- .../node_modules/make-fetch-happen/lib/dns.js | 49 -- .../make-fetch-happen/lib/fetch.js | 118 --- .../make-fetch-happen/lib/index.js | 41 - .../make-fetch-happen/lib/options.js | 54 -- .../make-fetch-happen/lib/pipeline.js | 41 - .../make-fetch-happen/lib/remote.js | 121 --- .../make-fetch-happen/package.json | 78 -- .../node_modules/minipass/LICENSE | 15 - .../node_modules/minipass/index.js | 702 ------------------ .../node_modules/minipass/index.mjs | 702 ------------------ .../node_modules/minipass/package.json | 76 -- .../npm-registry-fetch/LICENSE.md | 20 - .../npm-registry-fetch/lib/auth.js | 145 ---- .../npm-registry-fetch/lib/check-response.js | 100 --- .../npm-registry-fetch/lib/clean-url.js | 27 - .../npm-registry-fetch/lib/default-opts.js | 19 - .../npm-registry-fetch/lib/errors.js | 80 -- .../npm-registry-fetch/lib/index.js | 247 ------ .../npm-registry-fetch/package.json | 67 -- .../node_modules/pacote/LICENSE | 15 - .../node_modules/pacote/lib/bin.js | 158 ---- .../node_modules/pacote/lib/dir.js | 108 --- .../node_modules/pacote/lib/fetcher.js | 505 ------------- .../node_modules/pacote/lib/file.js | 96 --- .../node_modules/pacote/lib/git.js | 327 -------- .../node_modules/pacote/lib/index.js | 23 - .../node_modules/pacote/lib/registry.js | 344 --------- .../node_modules/pacote/lib/remote.js | 91 --- .../pacote/lib/util/add-git-sha.js | 15 - .../node_modules/pacote/lib/util/cache-dir.js | 15 - .../pacote/lib/util/is-package-bin.js | 25 - .../node_modules/pacote/lib/util/npm.js | 14 - .../pacote/lib/util/tar-create-options.js | 31 - .../pacote/lib/util/trailing-slashes.js | 10 - .../node_modules/pacote/package.json | 79 -- .../@npmcli/metavuln-calculator/package.json | 4 +- package-lock.json | 91 +-- workspaces/arborist/package.json | 2 +- 47 files changed, 9 insertions(+), 5591 deletions(-) delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/LICENSE delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/agent.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/entry.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/errors.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/index.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/key.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/policy.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/dns.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/fetch.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/index.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/options.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/pipeline.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/remote.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/package.json delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/minipass/LICENSE delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/minipass/index.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/minipass/index.mjs delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/minipass/package.json delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/LICENSE.md delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/auth.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/check-response.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/clean-url.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/default-opts.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/errors.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/index.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/package.json delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE delete mode 100755 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/bin.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md index eeeea398ab182..324dbb190ca34 100644 --- a/DEPENDENCIES.md +++ b/DEPENDENCIES.md @@ -830,4 +830,4 @@ packages higher up the chain. - @npmcli/git, make-fetch-happen, @npmcli/config, init-package-json - @npmcli/installed-package-contents, @npmcli/map-workspaces, cacache, npm-pick-manifest, @npmcli/run-script, read-package-json, promzard - @npmcli/docs, @npmcli/fs, npm-bundled, read-package-json-fast, unique-filename, npm-install-checks, npm-package-arg, npm-packlist, normalize-package-data, bin-links, nopt, npmlog, parse-conflict-json, @npmcli/mock-globals, read - - @npmcli/eslint-config, @npmcli/template-oss, ignore-walk, semver, npm-normalize-package-bin, @npmcli/name-from-folder, json-parse-even-better-errors, fs-minipass, ssri, unique-slug, @npmcli/promise-spawn, hosted-git-info, proc-log, validate-npm-package-name, @npmcli/node-gyp, minipass-fetch, @npmcli/query, cmd-shim, read-cmd-shim, write-file-atomic, abbrev, are-we-there-yet, gauge, minify-registry-metadata, ini, @npmcli/disparity-colors, mute-stream, @npmcli/agent, npm-audit-report, npm-user-validate + - @npmcli/eslint-config, @npmcli/template-oss, ignore-walk, semver, npm-normalize-package-bin, @npmcli/name-from-folder, json-parse-even-better-errors, fs-minipass, ssri, unique-slug, @npmcli/promise-spawn, hosted-git-info, proc-log, validate-npm-package-name, @npmcli/node-gyp, @npmcli/agent, minipass-fetch, @npmcli/query, cmd-shim, read-cmd-shim, write-file-atomic, abbrev, are-we-there-yet, gauge, minify-registry-metadata, ini, @npmcli/disparity-colors, mute-stream, npm-audit-report, npm-user-validate diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 4c5a8843caff7..0ea07f6b415c3 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -25,12 +25,6 @@ !/@npmcli/installed-package-contents !/@npmcli/map-workspaces !/@npmcli/metavuln-calculator -!/@npmcli/metavuln-calculator/node_modules/ -/@npmcli/metavuln-calculator/node_modules/* -!/@npmcli/metavuln-calculator/node_modules/make-fetch-happen -!/@npmcli/metavuln-calculator/node_modules/minipass -!/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch -!/@npmcli/metavuln-calculator/node_modules/pacote !/@npmcli/name-from-folder !/@npmcli/node-gyp !/@npmcli/package-json diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/LICENSE deleted file mode 100644 index 1808eb2844231..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/LICENSE +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright 2017-2022 (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/agent.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/agent.js deleted file mode 100644 index dd68492ed7ea7..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/agent.js +++ /dev/null @@ -1,214 +0,0 @@ -'use strict' -const LRU = require('lru-cache') -const url = require('url') -const isLambda = require('is-lambda') -const dns = require('./dns.js') - -const AGENT_CACHE = new LRU({ max: 50 }) -const HttpAgent = require('agentkeepalive') -const HttpsAgent = HttpAgent.HttpsAgent - -module.exports = getAgent - -const getAgentTimeout = timeout => - typeof timeout !== 'number' || !timeout ? 0 : timeout + 1 - -const getMaxSockets = maxSockets => maxSockets || 15 - -function getAgent (uri, opts) { - const parsedUri = new url.URL(typeof uri === 'string' ? uri : uri.url) - const isHttps = parsedUri.protocol === 'https:' - const pxuri = getProxyUri(parsedUri.href, opts) - - // If opts.timeout is zero, set the agentTimeout to zero as well. A timeout - // of zero disables the timeout behavior (OS limits still apply). Else, if - // opts.timeout is a non-zero value, set it to timeout + 1, to ensure that - // the node-fetch-npm timeout will always fire first, giving us more - // consistent errors. - const agentTimeout = getAgentTimeout(opts.timeout) - const agentMaxSockets = getMaxSockets(opts.maxSockets) - - const key = [ - `https:${isHttps}`, - pxuri - ? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}` - : '>no-proxy<', - `local-address:${opts.localAddress || '>no-local-address<'}`, - `strict-ssl:${isHttps ? opts.rejectUnauthorized : '>no-strict-ssl<'}`, - `ca:${(isHttps && opts.ca) || '>no-ca<'}`, - `cert:${(isHttps && opts.cert) || '>no-cert<'}`, - `key:${(isHttps && opts.key) || '>no-key<'}`, - `timeout:${agentTimeout}`, - `maxSockets:${agentMaxSockets}`, - ].join(':') - - if (opts.agent != null) { // `agent: false` has special behavior! - return opts.agent - } - - // keep alive in AWS lambda makes no sense - const lambdaAgent = !isLambda ? null - : isHttps ? require('https').globalAgent - : require('http').globalAgent - - if (isLambda && !pxuri) { - return lambdaAgent - } - - if (AGENT_CACHE.peek(key)) { - return AGENT_CACHE.get(key) - } - - if (pxuri) { - const pxopts = isLambda ? { - ...opts, - agent: lambdaAgent, - } : opts - const proxy = getProxy(pxuri, pxopts, isHttps) - AGENT_CACHE.set(key, proxy) - return proxy - } - - const agent = isHttps ? new HttpsAgent({ - maxSockets: agentMaxSockets, - ca: opts.ca, - cert: opts.cert, - key: opts.key, - localAddress: opts.localAddress, - rejectUnauthorized: opts.rejectUnauthorized, - timeout: agentTimeout, - freeSocketTimeout: 15000, - lookup: dns.getLookup(opts.dns), - }) : new HttpAgent({ - maxSockets: agentMaxSockets, - localAddress: opts.localAddress, - timeout: agentTimeout, - freeSocketTimeout: 15000, - lookup: dns.getLookup(opts.dns), - }) - AGENT_CACHE.set(key, agent) - return agent -} - -function checkNoProxy (uri, opts) { - const host = new url.URL(uri).hostname.split('.').reverse() - let noproxy = (opts.noProxy || getProcessEnv('no_proxy')) - if (typeof noproxy === 'string') { - noproxy = noproxy.split(',').map(n => n.trim()) - } - - return noproxy && noproxy.some(no => { - const noParts = no.split('.').filter(x => x).reverse() - if (!noParts.length) { - return false - } - for (let i = 0; i < noParts.length; i++) { - if (host[i] !== noParts[i]) { - return false - } - } - return true - }) -} - -module.exports.getProcessEnv = getProcessEnv - -function getProcessEnv (env) { - if (!env) { - return - } - - let value - - if (Array.isArray(env)) { - for (const e of env) { - value = process.env[e] || - process.env[e.toUpperCase()] || - process.env[e.toLowerCase()] - if (typeof value !== 'undefined') { - break - } - } - } - - if (typeof env === 'string') { - value = process.env[env] || - process.env[env.toUpperCase()] || - process.env[env.toLowerCase()] - } - - return value -} - -module.exports.getProxyUri = getProxyUri -function getProxyUri (uri, opts) { - const protocol = new url.URL(uri).protocol - - const proxy = opts.proxy || - ( - protocol === 'https:' && - getProcessEnv('https_proxy') - ) || - ( - protocol === 'http:' && - getProcessEnv(['https_proxy', 'http_proxy', 'proxy']) - ) - if (!proxy) { - return null - } - - const parsedProxy = (typeof proxy === 'string') ? new url.URL(proxy) : proxy - - return !checkNoProxy(uri, opts) && parsedProxy -} - -const getAuth = u => - u.username && u.password ? decodeURIComponent(`${u.username}:${u.password}`) - : u.username ? decodeURIComponent(u.username) - : null - -const getPath = u => u.pathname + u.search + u.hash - -const HttpProxyAgent = require('http-proxy-agent') -const HttpsProxyAgent = require('https-proxy-agent') -const { SocksProxyAgent } = require('socks-proxy-agent') -module.exports.getProxy = getProxy -function getProxy (proxyUrl, opts, isHttps) { - // our current proxy agents do not support an overridden dns lookup method, so will not - // benefit from the dns cache - const popts = { - host: proxyUrl.hostname, - port: proxyUrl.port, - protocol: proxyUrl.protocol, - path: getPath(proxyUrl), - auth: getAuth(proxyUrl), - ca: opts.ca, - cert: opts.cert, - key: opts.key, - timeout: getAgentTimeout(opts.timeout), - localAddress: opts.localAddress, - maxSockets: getMaxSockets(opts.maxSockets), - rejectUnauthorized: opts.rejectUnauthorized, - } - - if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') { - if (!isHttps) { - return new HttpProxyAgent(popts) - } else { - return new HttpsProxyAgent(popts) - } - } else if (proxyUrl.protocol.startsWith('socks')) { - // socks-proxy-agent uses hostname not host - popts.hostname = popts.host - delete popts.host - return new SocksProxyAgent(popts) - } else { - throw Object.assign( - new Error(`unsupported proxy protocol: '${proxyUrl.protocol}'`), - { - code: 'EUNSUPPORTEDPROXY', - url: proxyUrl.href, - } - ) - } -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/entry.js deleted file mode 100644 index 45141095074ec..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/entry.js +++ /dev/null @@ -1,469 +0,0 @@ -const { Request, Response } = require('minipass-fetch') -const { Minipass } = require('minipass') -const MinipassFlush = require('minipass-flush') -const cacache = require('cacache') -const url = require('url') - -const CachingMinipassPipeline = require('../pipeline.js') -const CachePolicy = require('./policy.js') -const cacheKey = require('./key.js') -const remote = require('../remote.js') - -const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) - -// allow list for request headers that will be written to the cache index -// note: we will also store any request headers -// that are named in a response's vary header -const KEEP_REQUEST_HEADERS = [ - 'accept-charset', - 'accept-encoding', - 'accept-language', - 'accept', - 'cache-control', -] - -// allow list for response headers that will be written to the cache index -// note: we must not store the real response's age header, or when we load -// a cache policy based on the metadata it will think the cached response -// is always stale -const KEEP_RESPONSE_HEADERS = [ - 'cache-control', - 'content-encoding', - 'content-language', - 'content-type', - 'date', - 'etag', - 'expires', - 'last-modified', - 'link', - 'location', - 'pragma', - 'vary', -] - -// return an object containing all metadata to be written to the index -const getMetadata = (request, response, options) => { - const metadata = { - time: Date.now(), - url: request.url, - reqHeaders: {}, - resHeaders: {}, - - // options on which we must match the request and vary the response - options: { - compress: options.compress != null ? options.compress : request.compress, - }, - } - - // only save the status if it's not a 200 or 304 - if (response.status !== 200 && response.status !== 304) { - metadata.status = response.status - } - - for (const name of KEEP_REQUEST_HEADERS) { - if (request.headers.has(name)) { - metadata.reqHeaders[name] = request.headers.get(name) - } - } - - // if the request's host header differs from the host in the url - // we need to keep it, otherwise it's just noise and we ignore it - const host = request.headers.get('host') - const parsedUrl = new url.URL(request.url) - if (host && parsedUrl.host !== host) { - metadata.reqHeaders.host = host - } - - // if the response has a vary header, make sure - // we store the relevant request headers too - if (response.headers.has('vary')) { - const vary = response.headers.get('vary') - // a vary of "*" means every header causes a different response. - // in that scenario, we do not include any additional headers - // as the freshness check will always fail anyway and we don't - // want to bloat the cache indexes - if (vary !== '*') { - // copy any other request headers that will vary the response - const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) - for (const name of varyHeaders) { - if (request.headers.has(name)) { - metadata.reqHeaders[name] = request.headers.get(name) - } - } - } - } - - for (const name of KEEP_RESPONSE_HEADERS) { - if (response.headers.has(name)) { - metadata.resHeaders[name] = response.headers.get(name) - } - } - - for (const name of options.cacheAdditionalHeaders) { - if (response.headers.has(name)) { - metadata.resHeaders[name] = response.headers.get(name) - } - } - - return metadata -} - -// symbols used to hide objects that may be lazily evaluated in a getter -const _request = Symbol('request') -const _response = Symbol('response') -const _policy = Symbol('policy') - -class CacheEntry { - constructor ({ entry, request, response, options }) { - if (entry) { - this.key = entry.key - this.entry = entry - // previous versions of this module didn't write an explicit timestamp in - // the metadata, so fall back to the entry's timestamp. we can't use the - // entry timestamp to determine staleness because cacache will update it - // when it verifies its data - this.entry.metadata.time = this.entry.metadata.time || this.entry.time - } else { - this.key = cacheKey(request) - } - - this.options = options - - // these properties are behind getters that lazily evaluate - this[_request] = request - this[_response] = response - this[_policy] = null - } - - // returns a CacheEntry instance that satisfies the given request - // or undefined if no existing entry satisfies - static async find (request, options) { - try { - // compacts the index and returns an array of unique entries - var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { - const entryA = new CacheEntry({ entry: A, options }) - const entryB = new CacheEntry({ entry: B, options }) - return entryA.policy.satisfies(entryB.request) - }, { - validateEntry: (entry) => { - // clean out entries with a buggy content-encoding value - if (entry.metadata && - entry.metadata.resHeaders && - entry.metadata.resHeaders['content-encoding'] === null) { - return false - } - - // if an integrity is null, it needs to have a status specified - if (entry.integrity === null) { - return !!(entry.metadata && entry.metadata.status) - } - - return true - }, - }) - } catch (err) { - // if the compact request fails, ignore the error and return - return - } - - // a cache mode of 'reload' means to behave as though we have no cache - // on the way to the network. return undefined to allow cacheFetch to - // create a brand new request no matter what. - if (options.cache === 'reload') { - return - } - - // find the specific entry that satisfies the request - let match - for (const entry of matches) { - const _entry = new CacheEntry({ - entry, - options, - }) - - if (_entry.policy.satisfies(request)) { - match = _entry - break - } - } - - return match - } - - // if the user made a PUT/POST/PATCH then we invalidate our - // cache for the same url by deleting the index entirely - static async invalidate (request, options) { - const key = cacheKey(request) - try { - await cacache.rm.entry(options.cachePath, key, { removeFully: true }) - } catch (err) { - // ignore errors - } - } - - get request () { - if (!this[_request]) { - this[_request] = new Request(this.entry.metadata.url, { - method: 'GET', - headers: this.entry.metadata.reqHeaders, - ...this.entry.metadata.options, - }) - } - - return this[_request] - } - - get response () { - if (!this[_response]) { - this[_response] = new Response(null, { - url: this.entry.metadata.url, - counter: this.options.counter, - status: this.entry.metadata.status || 200, - headers: { - ...this.entry.metadata.resHeaders, - 'content-length': this.entry.size, - }, - }) - } - - return this[_response] - } - - get policy () { - if (!this[_policy]) { - this[_policy] = new CachePolicy({ - entry: this.entry, - request: this.request, - response: this.response, - options: this.options, - }) - } - - return this[_policy] - } - - // wraps the response in a pipeline that stores the data - // in the cache while the user consumes it - async store (status) { - // if we got a status other than 200, 301, or 308, - // or the CachePolicy forbid storage, append the - // cache status header and return it untouched - if ( - this.request.method !== 'GET' || - ![200, 301, 308].includes(this.response.status) || - !this.policy.storable() - ) { - this.response.headers.set('x-local-cache-status', 'skip') - return this.response - } - - const size = this.response.headers.get('content-length') - const cacheOpts = { - algorithms: this.options.algorithms, - metadata: getMetadata(this.request, this.response, this.options), - size, - integrity: this.options.integrity, - integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body, - } - - let body = null - // we only set a body if the status is a 200, redirects are - // stored as metadata only - if (this.response.status === 200) { - let cacheWriteResolve, cacheWriteReject - const cacheWritePromise = new Promise((resolve, reject) => { - cacheWriteResolve = resolve - cacheWriteReject = reject - }) - - body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({ - flush () { - return cacheWritePromise - }, - })) - // this is always true since if we aren't reusing the one from the remote fetch, we - // are using the one from cacache - body.hasIntegrityEmitter = true - - const onResume = () => { - const tee = new Minipass() - const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) - // re-emit the integrity and size events on our new response body so they can be reused - cacheStream.on('integrity', i => body.emit('integrity', i)) - cacheStream.on('size', s => body.emit('size', s)) - // stick a flag on here so downstream users will know if they can expect integrity events - tee.pipe(cacheStream) - // TODO if the cache write fails, log a warning but return the response anyway - // eslint-disable-next-line promise/catch-or-return - cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) - body.unshift(tee) - body.unshift(this.response.body) - } - - body.once('resume', onResume) - body.once('end', () => body.removeListener('resume', onResume)) - } else { - await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) - } - - // note: we do not set the x-local-cache-hash header because we do not know - // the hash value until after the write to the cache completes, which doesn't - // happen until after the response has been sent and it's too late to write - // the header anyway - this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) - this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) - this.response.headers.set('x-local-cache-mode', 'stream') - this.response.headers.set('x-local-cache-status', status) - this.response.headers.set('x-local-cache-time', new Date().toISOString()) - const newResponse = new Response(body, { - url: this.response.url, - status: this.response.status, - headers: this.response.headers, - counter: this.options.counter, - }) - return newResponse - } - - // use the cached data to create a response and return it - async respond (method, options, status) { - let response - if (method === 'HEAD' || [301, 308].includes(this.response.status)) { - // if the request is a HEAD, or the response is a redirect, - // then the metadata in the entry already includes everything - // we need to build a response - response = this.response - } else { - // we're responding with a full cached response, so create a body - // that reads from cacache and attach it to a new Response - const body = new Minipass() - const headers = { ...this.policy.responseHeaders() } - - const onResume = () => { - const cacheStream = cacache.get.stream.byDigest( - this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } - ) - cacheStream.on('error', async (err) => { - cacheStream.pause() - if (err.code === 'EINTEGRITY') { - await cacache.rm.content( - this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } - ) - } - if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { - await CacheEntry.invalidate(this.request, this.options) - } - body.emit('error', err) - cacheStream.resume() - }) - // emit the integrity and size events based on our metadata so we're consistent - body.emit('integrity', this.entry.integrity) - body.emit('size', Number(headers['content-length'])) - cacheStream.pipe(body) - } - - body.once('resume', onResume) - body.once('end', () => body.removeListener('resume', onResume)) - response = new Response(body, { - url: this.entry.metadata.url, - counter: options.counter, - status: 200, - headers, - }) - } - - response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) - response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) - response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) - response.headers.set('x-local-cache-mode', 'stream') - response.headers.set('x-local-cache-status', status) - response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) - return response - } - - // use the provided request along with this cache entry to - // revalidate the stored response. returns a response, either - // from the cache or from the update - async revalidate (request, options) { - const revalidateRequest = new Request(request, { - headers: this.policy.revalidationHeaders(request), - }) - - try { - // NOTE: be sure to remove the headers property from the - // user supplied options, since we have already defined - // them on the new request object. if they're still in the - // options then those will overwrite the ones from the policy - var response = await remote(revalidateRequest, { - ...options, - headers: undefined, - }) - } catch (err) { - // if the network fetch fails, return the stale - // cached response unless it has a cache-control - // of 'must-revalidate' - if (!this.policy.mustRevalidate) { - return this.respond(request.method, options, 'stale') - } - - throw err - } - - if (this.policy.revalidated(revalidateRequest, response)) { - // we got a 304, write a new index to the cache and respond from cache - const metadata = getMetadata(request, response, options) - // 304 responses do not include headers that are specific to the response data - // since they do not include a body, so we copy values for headers that were - // in the old cache entry to the new one, if the new metadata does not already - // include that header - for (const name of KEEP_RESPONSE_HEADERS) { - if ( - !hasOwnProperty(metadata.resHeaders, name) && - hasOwnProperty(this.entry.metadata.resHeaders, name) - ) { - metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] - } - } - - for (const name of options.cacheAdditionalHeaders) { - const inMeta = hasOwnProperty(metadata.resHeaders, name) - const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name) - const inPolicy = hasOwnProperty(this.policy.response.headers, name) - - // if the header is in the existing entry, but it is not in the metadata - // then we need to write it to the metadata as this will refresh the on-disk cache - if (!inMeta && inEntry) { - metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] - } - // if the header is in the metadata, but not in the policy, then we need to set - // it in the policy so that it's included in the immediate response. future - // responses will load a new cache entry, so we don't need to change that - if (!inPolicy && inMeta) { - this.policy.response.headers[name] = metadata.resHeaders[name] - } - } - - try { - await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { - size: this.entry.size, - metadata, - }) - } catch (err) { - // if updating the cache index fails, we ignore it and - // respond anyway - } - return this.respond(request.method, options, 'revalidated') - } - - // if we got a modified response, create a new entry based on it - const newEntry = new CacheEntry({ - request, - response, - options, - }) - - // respond with the new entry while writing it to the cache - return newEntry.store('updated') - } -} - -module.exports = CacheEntry diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/errors.js deleted file mode 100644 index 67a66573bebe6..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/errors.js +++ /dev/null @@ -1,11 +0,0 @@ -class NotCachedError extends Error { - constructor (url) { - /* eslint-disable-next-line max-len */ - super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) - this.code = 'ENOTCACHED' - } -} - -module.exports = { - NotCachedError, -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/index.js deleted file mode 100644 index 0de49d23fb933..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/index.js +++ /dev/null @@ -1,49 +0,0 @@ -const { NotCachedError } = require('./errors.js') -const CacheEntry = require('./entry.js') -const remote = require('../remote.js') - -// do whatever is necessary to get a Response and return it -const cacheFetch = async (request, options) => { - // try to find a cached entry that satisfies this request - const entry = await CacheEntry.find(request, options) - if (!entry) { - // no cached result, if the cache mode is 'only-if-cached' that's a failure - if (options.cache === 'only-if-cached') { - throw new NotCachedError(request.url) - } - - // otherwise, we make a request, store it and return it - const response = await remote(request, options) - const newEntry = new CacheEntry({ request, response, options }) - return newEntry.store('miss') - } - - // we have a cached response that satisfies this request, however if the cache - // mode is 'no-cache' then we send the revalidation request no matter what - if (options.cache === 'no-cache') { - return entry.revalidate(request, options) - } - - // if the cached entry is not stale, or if the cache mode is 'force-cache' or - // 'only-if-cached' we can respond with the cached entry. set the status - // based on the result of needsRevalidation and respond - const _needsRevalidation = entry.policy.needsRevalidation(request) - if (options.cache === 'force-cache' || - options.cache === 'only-if-cached' || - !_needsRevalidation) { - return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') - } - - // if we got here, the cache entry is stale so revalidate it - return entry.revalidate(request, options) -} - -cacheFetch.invalidate = async (request, options) => { - if (!options.cachePath) { - return - } - - return CacheEntry.invalidate(request, options) -} - -module.exports = cacheFetch diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/key.js deleted file mode 100644 index f7684d562b7fa..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/key.js +++ /dev/null @@ -1,17 +0,0 @@ -const { URL, format } = require('url') - -// options passed to url.format() when generating a key -const formatOptions = { - auth: false, - fragment: false, - search: true, - unicode: false, -} - -// returns a string to be used as the cache key for the Request -const cacheKey = (request) => { - const parsed = new URL(request.url) - return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` -} - -module.exports = cacheKey diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/policy.js deleted file mode 100644 index ada3c8600dae9..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/cache/policy.js +++ /dev/null @@ -1,161 +0,0 @@ -const CacheSemantics = require('http-cache-semantics') -const Negotiator = require('negotiator') -const ssri = require('ssri') - -// options passed to http-cache-semantics constructor -const policyOptions = { - shared: false, - ignoreCargoCult: true, -} - -// a fake empty response, used when only testing the -// request for storability -const emptyResponse = { status: 200, headers: {} } - -// returns a plain object representation of the Request -const requestObject = (request) => { - const _obj = { - method: request.method, - url: request.url, - headers: {}, - compress: request.compress, - } - - request.headers.forEach((value, key) => { - _obj.headers[key] = value - }) - - return _obj -} - -// returns a plain object representation of the Response -const responseObject = (response) => { - const _obj = { - status: response.status, - headers: {}, - } - - response.headers.forEach((value, key) => { - _obj.headers[key] = value - }) - - return _obj -} - -class CachePolicy { - constructor ({ entry, request, response, options }) { - this.entry = entry - this.request = requestObject(request) - this.response = responseObject(response) - this.options = options - this.policy = new CacheSemantics(this.request, this.response, policyOptions) - - if (this.entry) { - // if we have an entry, copy the timestamp to the _responseTime - // this is necessary because the CacheSemantics constructor forces - // the value to Date.now() which means a policy created from a - // cache entry is likely to always identify itself as stale - this.policy._responseTime = this.entry.metadata.time - } - } - - // static method to quickly determine if a request alone is storable - static storable (request, options) { - // no cachePath means no caching - if (!options.cachePath) { - return false - } - - // user explicitly asked not to cache - if (options.cache === 'no-store') { - return false - } - - // we only cache GET and HEAD requests - if (!['GET', 'HEAD'].includes(request.method)) { - return false - } - - // otherwise, let http-cache-semantics make the decision - // based on the request's headers - const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) - return policy.storable() - } - - // returns true if the policy satisfies the request - satisfies (request) { - const _req = requestObject(request) - if (this.request.headers.host !== _req.headers.host) { - return false - } - - if (this.request.compress !== _req.compress) { - return false - } - - const negotiatorA = new Negotiator(this.request) - const negotiatorB = new Negotiator(_req) - - if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { - return false - } - - if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { - return false - } - - if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { - return false - } - - if (this.options.integrity) { - return ssri.parse(this.options.integrity).match(this.entry.integrity) - } - - return true - } - - // returns true if the request and response allow caching - storable () { - return this.policy.storable() - } - - // NOTE: this is a hack to avoid parsing the cache-control - // header ourselves, it returns true if the response's - // cache-control contains must-revalidate - get mustRevalidate () { - return !!this.policy._rescc['must-revalidate'] - } - - // returns true if the cached response requires revalidation - // for the given request - needsRevalidation (request) { - const _req = requestObject(request) - // force method to GET because we only cache GETs - // but can serve a HEAD from a cached GET - _req.method = 'GET' - return !this.policy.satisfiesWithoutRevalidation(_req) - } - - responseHeaders () { - return this.policy.responseHeaders() - } - - // returns a new object containing the appropriate headers - // to send a revalidation request - revalidationHeaders (request) { - const _req = requestObject(request) - return this.policy.revalidationHeaders(_req) - } - - // returns true if the request/response was revalidated - // successfully. returns false if a new response was received - revalidated (request, response) { - const _req = requestObject(request) - const _res = responseObject(response) - const policy = this.policy.revalidatedPolicy(_req, _res) - return !policy.modified - } -} - -module.exports = CachePolicy diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/dns.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/dns.js deleted file mode 100644 index 13102b57c4aa0..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/dns.js +++ /dev/null @@ -1,49 +0,0 @@ -const LRUCache = require('lru-cache') -const dns = require('dns') - -const defaultOptions = exports.defaultOptions = { - family: undefined, - hints: dns.ADDRCONFIG, - all: false, - verbatim: undefined, -} - -const lookupCache = exports.lookupCache = new LRUCache({ max: 50 }) - -// this is a factory so that each request can have its own opts (i.e. ttl) -// while still sharing the cache across all requests -exports.getLookup = (dnsOptions) => { - return (hostname, options, callback) => { - if (typeof options === 'function') { - callback = options - options = null - } else if (typeof options === 'number') { - options = { family: options } - } - - options = { ...defaultOptions, ...options } - - const key = JSON.stringify({ - hostname, - family: options.family, - hints: options.hints, - all: options.all, - verbatim: options.verbatim, - }) - - if (lookupCache.has(key)) { - const [address, family] = lookupCache.get(key) - process.nextTick(callback, null, address, family) - return - } - - dnsOptions.lookup(hostname, options, (err, address, family) => { - if (err) { - return callback(err) - } - - lookupCache.set(key, [address, family], { ttl: dnsOptions.ttl }) - return callback(null, address, family) - }) - } -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/fetch.js deleted file mode 100644 index 233ba67e16550..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/fetch.js +++ /dev/null @@ -1,118 +0,0 @@ -'use strict' - -const { FetchError, Request, isRedirect } = require('minipass-fetch') -const url = require('url') - -const CachePolicy = require('./cache/policy.js') -const cache = require('./cache/index.js') -const remote = require('./remote.js') - -// given a Request, a Response and user options -// return true if the response is a redirect that -// can be followed. we throw errors that will result -// in the fetch being rejected if the redirect is -// possible but invalid for some reason -const canFollowRedirect = (request, response, options) => { - if (!isRedirect(response.status)) { - return false - } - - if (options.redirect === 'manual') { - return false - } - - if (options.redirect === 'error') { - throw new FetchError(`redirect mode is set to error: ${request.url}`, - 'no-redirect', { code: 'ENOREDIRECT' }) - } - - if (!response.headers.has('location')) { - throw new FetchError(`redirect location header missing for: ${request.url}`, - 'no-location', { code: 'EINVALIDREDIRECT' }) - } - - if (request.counter >= request.follow) { - throw new FetchError(`maximum redirect reached at: ${request.url}`, - 'max-redirect', { code: 'EMAXREDIRECT' }) - } - - return true -} - -// given a Request, a Response, and the user's options return an object -// with a new Request and a new options object that will be used for -// following the redirect -const getRedirect = (request, response, options) => { - const _opts = { ...options } - const location = response.headers.get('location') - const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) - // Comment below is used under the following license: - /** - * @license - * Copyright (c) 2010-2012 Mikeal Rogers - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an "AS - * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language - * governing permissions and limitations under the License. - */ - - // Remove authorization if changing hostnames (but not if just - // changing ports or protocols). This matches the behavior of request: - // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 - if (new url.URL(request.url).hostname !== redirectUrl.hostname) { - request.headers.delete('authorization') - request.headers.delete('cookie') - } - - // for POST request with 301/302 response, or any request with 303 response, - // use GET when following redirect - if ( - response.status === 303 || - (request.method === 'POST' && [301, 302].includes(response.status)) - ) { - _opts.method = 'GET' - _opts.body = null - request.headers.delete('content-length') - } - - _opts.headers = {} - request.headers.forEach((value, key) => { - _opts.headers[key] = value - }) - - _opts.counter = ++request.counter - const redirectReq = new Request(url.format(redirectUrl), _opts) - return { - request: redirectReq, - options: _opts, - } -} - -const fetch = async (request, options) => { - const response = CachePolicy.storable(request, options) - ? await cache(request, options) - : await remote(request, options) - - // if the request wasn't a GET or HEAD, and the response - // status is between 200 and 399 inclusive, invalidate the - // request url - if (!['GET', 'HEAD'].includes(request.method) && - response.status >= 200 && - response.status <= 399) { - await cache.invalidate(request, options) - } - - if (!canFollowRedirect(request, response, options)) { - return response - } - - const redirect = getRedirect(request, response, options) - return fetch(redirect.request, redirect.options) -} - -module.exports = fetch diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/index.js deleted file mode 100644 index 2f12e8e1b6113..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/index.js +++ /dev/null @@ -1,41 +0,0 @@ -const { FetchError, Headers, Request, Response } = require('minipass-fetch') - -const configureOptions = require('./options.js') -const fetch = require('./fetch.js') - -const makeFetchHappen = (url, opts) => { - const options = configureOptions(opts) - - const request = new Request(url, options) - return fetch(request, options) -} - -makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => { - if (typeof defaultUrl === 'object') { - defaultOptions = defaultUrl - defaultUrl = null - } - - const defaultedFetch = (url, options = {}) => { - const finalUrl = url || defaultUrl - const finalOptions = { - ...defaultOptions, - ...options, - headers: { - ...defaultOptions.headers, - ...options.headers, - }, - } - return wrappedFetch(finalUrl, finalOptions) - } - - defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) => - makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch) - return defaultedFetch -} - -module.exports = makeFetchHappen -module.exports.FetchError = FetchError -module.exports.Headers = Headers -module.exports.Request = Request -module.exports.Response = Response diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/options.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/options.js deleted file mode 100644 index f77511279f831..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/options.js +++ /dev/null @@ -1,54 +0,0 @@ -const dns = require('dns') - -const conditionalHeaders = [ - 'if-modified-since', - 'if-none-match', - 'if-unmodified-since', - 'if-match', - 'if-range', -] - -const configureOptions = (opts) => { - const { strictSSL, ...options } = { ...opts } - options.method = options.method ? options.method.toUpperCase() : 'GET' - options.rejectUnauthorized = strictSSL !== false - - if (!options.retry) { - options.retry = { retries: 0 } - } else if (typeof options.retry === 'string') { - const retries = parseInt(options.retry, 10) - if (isFinite(retries)) { - options.retry = { retries } - } else { - options.retry = { retries: 0 } - } - } else if (typeof options.retry === 'number') { - options.retry = { retries: options.retry } - } else { - options.retry = { retries: 0, ...options.retry } - } - - options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns } - - options.cache = options.cache || 'default' - if (options.cache === 'default') { - const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { - return conditionalHeaders.includes(name.toLowerCase()) - }) - if (hasConditionalHeader) { - options.cache = 'no-store' - } - } - - options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || [] - - // cacheManager is deprecated, but if it's set and - // cachePath is not we should copy it to the new field - if (options.cacheManager && !options.cachePath) { - options.cachePath = options.cacheManager - } - - return options -} - -module.exports = configureOptions diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/pipeline.js deleted file mode 100644 index b1d221b2d0ce3..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/pipeline.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict' - -const MinipassPipeline = require('minipass-pipeline') - -class CachingMinipassPipeline extends MinipassPipeline { - #events = [] - #data = new Map() - - constructor (opts, ...streams) { - // CRITICAL: do NOT pass the streams to the call to super(), this will start - // the flow of data and potentially cause the events we need to catch to emit - // before we've finished our own setup. instead we call super() with no args, - // finish our setup, and then push the streams into ourselves to start the - // data flow - super() - this.#events = opts.events - - /* istanbul ignore next - coverage disabled because this is pointless to test here */ - if (streams.length) { - this.push(...streams) - } - } - - on (event, handler) { - if (this.#events.includes(event) && this.#data.has(event)) { - return handler(...this.#data.get(event)) - } - - return super.on(event, handler) - } - - emit (event, ...data) { - if (this.#events.includes(event)) { - this.#data.set(event, data) - } - - return super.emit(event, ...data) - } -} - -module.exports = CachingMinipassPipeline diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/remote.js b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/remote.js deleted file mode 100644 index bdbcc79cad908..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/lib/remote.js +++ /dev/null @@ -1,121 +0,0 @@ -const { Minipass } = require('minipass') -const fetch = require('minipass-fetch') -const promiseRetry = require('promise-retry') -const ssri = require('ssri') - -const CachingMinipassPipeline = require('./pipeline.js') -const getAgent = require('./agent.js') -const pkg = require('../package.json') - -const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` - -const RETRY_ERRORS = [ - 'ECONNRESET', // remote socket closed on us - 'ECONNREFUSED', // remote host refused to open connection - 'EADDRINUSE', // failed to bind to a local port (proxy?) - 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW - 'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive - // Known codes we do NOT retry on: - // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) -] - -const RETRY_TYPES = [ - 'request-timeout', -] - -// make a request directly to the remote source, -// retrying certain classes of errors as well as -// following redirects (through the cache if necessary) -// and verifying response integrity -const remoteFetch = (request, options) => { - const agent = getAgent(request.url, options) - if (!request.headers.has('connection')) { - request.headers.set('connection', agent ? 'keep-alive' : 'close') - } - - if (!request.headers.has('user-agent')) { - request.headers.set('user-agent', USER_AGENT) - } - - // keep our own options since we're overriding the agent - // and the redirect mode - const _opts = { - ...options, - agent, - redirect: 'manual', - } - - return promiseRetry(async (retryHandler, attemptNum) => { - const req = new fetch.Request(request, _opts) - try { - let res = await fetch(req, _opts) - if (_opts.integrity && res.status === 200) { - // we got a 200 response and the user has specified an expected - // integrity value, so wrap the response in an ssri stream to verify it - const integrityStream = ssri.integrityStream({ - algorithms: _opts.algorithms, - integrity: _opts.integrity, - size: _opts.size, - }) - const pipeline = new CachingMinipassPipeline({ - events: ['integrity', 'size'], - }, res.body, integrityStream) - // we also propagate the integrity and size events out to the pipeline so we can use - // this new response body as an integrityEmitter for cacache - integrityStream.on('integrity', i => pipeline.emit('integrity', i)) - integrityStream.on('size', s => pipeline.emit('size', s)) - res = new fetch.Response(pipeline, res) - // set an explicit flag so we know if our response body will emit integrity and size - res.body.hasIntegrityEmitter = true - } - - res.headers.set('x-fetch-attempts', attemptNum) - - // do not retry POST requests, or requests with a streaming body - // do retry requests with a 408, 420, 429 or 500+ status in the response - const isStream = Minipass.isStream(req.body) - const isRetriable = req.method !== 'POST' && - !isStream && - ([408, 420, 429].includes(res.status) || res.status >= 500) - - if (isRetriable) { - if (typeof options.onRetry === 'function') { - options.onRetry(res) - } - - return retryHandler(res) - } - - return res - } catch (err) { - const code = (err.code === 'EPROMISERETRY') - ? err.retried.code - : err.code - - // err.retried will be the thing that was thrown from above - // if it's a response, we just got a bad status code and we - // can re-throw to allow the retry - const isRetryError = err.retried instanceof fetch.Response || - (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) - - if (req.method === 'POST' || isRetryError) { - throw err - } - - if (typeof options.onRetry === 'function') { - options.onRetry(err) - } - - return retryHandler(err) - } - }, options.retry).catch((err) => { - // don't reject for http errors, just return them - if (err.status >= 400 && err.type !== 'system') { - return err - } - - throw err - }) -} - -module.exports = remoteFetch diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/package.json deleted file mode 100644 index fd415dc9966fa..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen/package.json +++ /dev/null @@ -1,78 +0,0 @@ -{ - "name": "make-fetch-happen", - "version": "11.1.1", - "description": "Opinionated, caching, retrying fetch client", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "test": "tap", - "posttest": "npm run lint", - "eslint": "eslint", - "lint": "eslint \"**/*.js\"", - "lintfix": "npm run lint -- --fix", - "postlint": "template-oss-check", - "snap": "tap", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/make-fetch-happen.git" - }, - "keywords": [ - "http", - "request", - "fetch", - "mean girls", - "caching", - "cache", - "subresource integrity" - ], - "author": "GitHub Inc.", - "license": "ISC", - "dependencies": { - "agentkeepalive": "^4.2.1", - "cacache": "^17.0.0", - "http-cache-semantics": "^4.1.1", - "http-proxy-agent": "^5.0.0", - "https-proxy-agent": "^5.0.0", - "is-lambda": "^1.0.1", - "lru-cache": "^7.7.1", - "minipass": "^5.0.0", - "minipass-fetch": "^3.0.0", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", - "promise-retry": "^2.0.1", - "socks-proxy-agent": "^7.0.0", - "ssri": "^10.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", - "nock": "^13.2.4", - "safe-buffer": "^5.2.1", - "standard-version": "^9.3.2", - "tap": "^16.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "tap": { - "color": 1, - "files": "test/*.js", - "check-coverage": true, - "timeout": 60, - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", - "publish": "true" - } -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/minipass/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/minipass/LICENSE deleted file mode 100644 index 97f8e32ed82e4..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/minipass/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/minipass/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/minipass/index.js deleted file mode 100644 index ed07c17acd97b..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/minipass/index.js +++ /dev/null @@ -1,702 +0,0 @@ -'use strict' -const proc = - typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - } -const EE = require('events') -const Stream = require('stream') -const stringdecoder = require('string_decoder') -const SD = stringdecoder.StringDecoder - -const EOF = Symbol('EOF') -const MAYBE_EMIT_END = Symbol('maybeEmitEnd') -const EMITTED_END = Symbol('emittedEnd') -const EMITTING_END = Symbol('emittingEnd') -const EMITTED_ERROR = Symbol('emittedError') -const CLOSED = Symbol('closed') -const READ = Symbol('read') -const FLUSH = Symbol('flush') -const FLUSHCHUNK = Symbol('flushChunk') -const ENCODING = Symbol('encoding') -const DECODER = Symbol('decoder') -const FLOWING = Symbol('flowing') -const PAUSED = Symbol('paused') -const RESUME = Symbol('resume') -const BUFFER = Symbol('buffer') -const PIPES = Symbol('pipes') -const BUFFERLENGTH = Symbol('bufferLength') -const BUFFERPUSH = Symbol('bufferPush') -const BUFFERSHIFT = Symbol('bufferShift') -const OBJECTMODE = Symbol('objectMode') -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed') -// internal event when stream has an error -const ERROR = Symbol('error') -const EMITDATA = Symbol('emitData') -const EMITEND = Symbol('emitEnd') -const EMITEND2 = Symbol('emitEnd2') -const ASYNC = Symbol('async') -const ABORT = Symbol('abort') -const ABORTED = Symbol('aborted') -const SIGNAL = Symbol('signal') - -const defer = fn => Promise.resolve().then(fn) - -// TODO remove when Node v8 support drops -const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' -const ASYNCITERATOR = - (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') -const ITERATOR = - (doIter && Symbol.iterator) || Symbol('iterator not implemented') - -// events that mean 'the stream is over' -// these are treated specially, and re-emitted -// if they are listened for after emitting. -const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' - -const isArrayBuffer = b => - b instanceof ArrayBuffer || - (typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0) - -const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) - -class Pipe { - constructor(src, dest, opts) { - this.src = src - this.dest = dest - this.opts = opts - this.ondrain = () => src[RESUME]() - dest.on('drain', this.ondrain) - } - unpipe() { - this.dest.removeListener('drain', this.ondrain) - } - // istanbul ignore next - only here for the prototype - proxyErrors() {} - end() { - this.unpipe() - if (this.opts.end) this.dest.end() - } -} - -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors) - super.unpipe() - } - constructor(src, dest, opts) { - super(src, dest, opts) - this.proxyErrors = er => dest.emit('error', er) - src.on('error', this.proxyErrors) - } -} - -class Minipass extends Stream { - constructor(options) { - super() - this[FLOWING] = false - // whether we're explicitly paused - this[PAUSED] = false - this[PIPES] = [] - this[BUFFER] = [] - this[OBJECTMODE] = (options && options.objectMode) || false - if (this[OBJECTMODE]) this[ENCODING] = null - else this[ENCODING] = (options && options.encoding) || null - if (this[ENCODING] === 'buffer') this[ENCODING] = null - this[ASYNC] = (options && !!options.async) || false - this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null - this[EOF] = false - this[EMITTED_END] = false - this[EMITTING_END] = false - this[CLOSED] = false - this[EMITTED_ERROR] = null - this.writable = true - this.readable = true - this[BUFFERLENGTH] = 0 - this[DESTROYED] = false - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) - } - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) - } - this[SIGNAL] = options && options.signal - this[ABORTED] = false - if (this[SIGNAL]) { - this[SIGNAL].addEventListener('abort', () => this[ABORT]()) - if (this[SIGNAL].aborted) { - this[ABORT]() - } - } - } - - get bufferLength() { - return this[BUFFERLENGTH] - } - - get encoding() { - return this[ENCODING] - } - set encoding(enc) { - if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') - - if ( - this[ENCODING] && - enc !== this[ENCODING] && - ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) - ) - throw new Error('cannot change encoding') - - if (this[ENCODING] !== enc) { - this[DECODER] = enc ? new SD(enc) : null - if (this[BUFFER].length) - this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) - } - - this[ENCODING] = enc - } - - setEncoding(enc) { - this.encoding = enc - } - - get objectMode() { - return this[OBJECTMODE] - } - set objectMode(om) { - this[OBJECTMODE] = this[OBJECTMODE] || !!om - } - - get ['async']() { - return this[ASYNC] - } - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a - } - - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true - this.emit('abort', this[SIGNAL].reason) - this.destroy(this[SIGNAL].reason) - } - - get aborted() { - return this[ABORTED] - } - set aborted(_) {} - - write(chunk, encoding, cb) { - if (this[ABORTED]) return false - if (this[EOF]) throw new Error('write after end') - - if (this[DESTROYED]) { - this.emit( - 'error', - Object.assign( - new Error('Cannot call write after a stream was destroyed'), - { code: 'ERR_STREAM_DESTROYED' } - ) - ) - return true - } - - if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') - - if (!encoding) encoding = 'utf8' - - const fn = this[ASYNC] ? defer : f => f() - - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything else switches us into object mode - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) - else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) - else if (typeof chunk !== 'string') - // use the setter so we throw if we have encoding set - this.objectMode = true - } - - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - /* istanbul ignore if - maybe impossible? */ - if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) - - if (this.flowing) this.emit('data', chunk) - else this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - - if (cb) fn(cb) - - return this.flowing - } - - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - if (cb) fn(cb) - return this.flowing - } - - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if ( - typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER].lastNeed) - ) { - chunk = Buffer.from(chunk, encoding) - } - - if (Buffer.isBuffer(chunk) && this[ENCODING]) - chunk = this[DECODER].write(chunk) - - // Note: flushing CAN potentially switch us into not-flowing mode - if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) - - if (this.flowing) this.emit('data', chunk) - else this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - - if (cb) fn(cb) - - return this.flowing - } - - read(n) { - if (this[DESTROYED]) return null - - if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { - this[MAYBE_EMIT_END]() - return null - } - - if (this[OBJECTMODE]) n = null - - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] - else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] - } - - const ret = this[READ](n || null, this[BUFFER][0]) - this[MAYBE_EMIT_END]() - return ret - } - - [READ](n, chunk) { - if (n === chunk.length || n === null) this[BUFFERSHIFT]() - else { - this[BUFFER][0] = chunk.slice(n) - chunk = chunk.slice(0, n) - this[BUFFERLENGTH] -= n - } - - this.emit('data', chunk) - - if (!this[BUFFER].length && !this[EOF]) this.emit('drain') - - return chunk - } - - end(chunk, encoding, cb) { - if (typeof chunk === 'function') (cb = chunk), (chunk = null) - if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') - if (chunk) this.write(chunk, encoding) - if (cb) this.once('end', cb) - this[EOF] = true - this.writable = false - - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() - return this - } - - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) return - - this[PAUSED] = false - this[FLOWING] = true - this.emit('resume') - if (this[BUFFER].length) this[FLUSH]() - else if (this[EOF]) this[MAYBE_EMIT_END]() - else this.emit('drain') - } - - resume() { - return this[RESUME]() - } - - pause() { - this[FLOWING] = false - this[PAUSED] = true - } - - get destroyed() { - return this[DESTROYED] - } - - get flowing() { - return this[FLOWING] - } - - get paused() { - return this[PAUSED] - } - - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 - else this[BUFFERLENGTH] += chunk.length - this[BUFFER].push(chunk) - } - - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 - else this[BUFFERLENGTH] -= this[BUFFER][0].length - return this[BUFFER].shift() - } - - [FLUSH](noDrain) { - do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) - - if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') - } - - [FLUSHCHUNK](chunk) { - this.emit('data', chunk) - return this.flowing - } - - pipe(dest, opts) { - if (this[DESTROYED]) return - - const ended = this[EMITTED_END] - opts = opts || {} - if (dest === proc.stdout || dest === proc.stderr) opts.end = false - else opts.end = opts.end !== false - opts.proxyErrors = !!opts.proxyErrors - - // piping an ended stream ends immediately - if (ended) { - if (opts.end) dest.end() - } else { - this[PIPES].push( - !opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts) - ) - if (this[ASYNC]) defer(() => this[RESUME]()) - else this[RESUME]() - } - - return dest - } - - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest) - if (p) { - this[PIPES].splice(this[PIPES].indexOf(p), 1) - p.unpipe() - } - } - - addListener(ev, fn) { - return this.on(ev, fn) - } - - on(ev, fn) { - const ret = super.on(ev, fn) - if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) - super.emit('readable') - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev) - this.removeAllListeners(ev) - } else if (ev === 'error' && this[EMITTED_ERROR]) { - if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) - else fn.call(this, this[EMITTED_ERROR]) - } - return ret - } - - get emittedEnd() { - return this[EMITTED_END] - } - - [MAYBE_EMIT_END]() { - if ( - !this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF] - ) { - this[EMITTING_END] = true - this.emit('end') - this.emit('prefinish') - this.emit('finish') - if (this[CLOSED]) this.emit('close') - this[EMITTING_END] = false - } - } - - emit(ev, data, ...extra) { - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) - return - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? defer(() => this[EMITDATA](data)) - : this[EMITDATA](data) - } else if (ev === 'end') { - return this[EMITEND]() - } else if (ev === 'close') { - this[CLOSED] = true - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) return - const ret = super.emit('close') - this.removeAllListeners('close') - return ret - } else if (ev === 'error') { - this[EMITTED_ERROR] = data - super.emit(ERROR, data) - const ret = - !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'resume') { - const ret = super.emit('resume') - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev) - this.removeAllListeners(ev) - return ret - } - - // Some other unknown event - const ret = super.emit(ev, data, ...extra) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) this.pause() - } - const ret = super.emit('data', data) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITEND]() { - if (this[EMITTED_END]) return - - this[EMITTED_END] = true - this.readable = false - if (this[ASYNC]) defer(() => this[EMITEND2]()) - else this[EMITEND2]() - } - - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end() - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data) - } - super.emit('data', data) - } - } - - for (const p of this[PIPES]) { - p.end() - } - const ret = super.emit('end') - this.removeAllListeners('end') - return ret - } - - // const all = await stream.collect() - collect() { - const buf = [] - if (!this[OBJECTMODE]) buf.dataLength = 0 - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise() - this.on('data', c => { - buf.push(c) - if (!this[OBJECTMODE]) buf.dataLength += c.length - }) - return p.then(() => buf) - } - - // const data = await stream.concat() - concat() { - return this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this.collect().then(buf => - this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength) - ) - } - - // stream.promise().then(() => done, er => emitted error) - promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))) - this.on('error', er => reject(er)) - this.on('end', () => resolve()) - }) - } - - // for await (let chunk of stream) - [ASYNCITERATOR]() { - let stopped = false - const stop = () => { - this.pause() - stopped = true - return Promise.resolve({ done: true }) - } - const next = () => { - if (stopped) return stop() - const res = this.read() - if (res !== null) return Promise.resolve({ done: false, value: res }) - - if (this[EOF]) return stop() - - let resolve = null - let reject = null - const onerr = er => { - this.removeListener('data', ondata) - this.removeListener('end', onend) - this.removeListener(DESTROYED, ondestroy) - stop() - reject(er) - } - const ondata = value => { - this.removeListener('error', onerr) - this.removeListener('end', onend) - this.removeListener(DESTROYED, ondestroy) - this.pause() - resolve({ value: value, done: !!this[EOF] }) - } - const onend = () => { - this.removeListener('error', onerr) - this.removeListener('data', ondata) - this.removeListener(DESTROYED, ondestroy) - stop() - resolve({ done: true }) - } - const ondestroy = () => onerr(new Error('stream destroyed')) - return new Promise((res, rej) => { - reject = rej - resolve = res - this.once(DESTROYED, ondestroy) - this.once('error', onerr) - this.once('end', onend) - this.once('data', ondata) - }) - } - - return { - next, - throw: stop, - return: stop, - [ASYNCITERATOR]() { - return this - }, - } - } - - // for (let chunk of stream) - [ITERATOR]() { - let stopped = false - const stop = () => { - this.pause() - this.removeListener(ERROR, stop) - this.removeListener(DESTROYED, stop) - this.removeListener('end', stop) - stopped = true - return { done: true } - } - - const next = () => { - if (stopped) return stop() - const value = this.read() - return value === null ? stop() : { value } - } - this.once('end', stop) - this.once(ERROR, stop) - this.once(DESTROYED, stop) - - return { - next, - throw: stop, - return: stop, - [ITERATOR]() { - return this - }, - } - } - - destroy(er) { - if (this[DESTROYED]) { - if (er) this.emit('error', er) - else this.emit(DESTROYED) - return this - } - - this[DESTROYED] = true - - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0 - this[BUFFERLENGTH] = 0 - - if (typeof this.close === 'function' && !this[CLOSED]) this.close() - - if (er) this.emit('error', er) - // if no error to emit, still reject pending promises - else this.emit(DESTROYED) - - return this - } - - static isStream(s) { - return ( - !!s && - (s instanceof Minipass || - s instanceof Stream || - (s instanceof EE && - // readable - (typeof s.pipe === 'function' || - // writable - (typeof s.write === 'function' && typeof s.end === 'function')))) - ) - } -} - -exports.Minipass = Minipass diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/minipass/index.mjs b/node_modules/@npmcli/metavuln-calculator/node_modules/minipass/index.mjs deleted file mode 100644 index 6ef6cd8cf0703..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/minipass/index.mjs +++ /dev/null @@ -1,702 +0,0 @@ -'use strict' -const proc = - typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - } -import EE from 'events' -import Stream from 'stream' -import stringdecoder from 'string_decoder' -const SD = stringdecoder.StringDecoder - -const EOF = Symbol('EOF') -const MAYBE_EMIT_END = Symbol('maybeEmitEnd') -const EMITTED_END = Symbol('emittedEnd') -const EMITTING_END = Symbol('emittingEnd') -const EMITTED_ERROR = Symbol('emittedError') -const CLOSED = Symbol('closed') -const READ = Symbol('read') -const FLUSH = Symbol('flush') -const FLUSHCHUNK = Symbol('flushChunk') -const ENCODING = Symbol('encoding') -const DECODER = Symbol('decoder') -const FLOWING = Symbol('flowing') -const PAUSED = Symbol('paused') -const RESUME = Symbol('resume') -const BUFFER = Symbol('buffer') -const PIPES = Symbol('pipes') -const BUFFERLENGTH = Symbol('bufferLength') -const BUFFERPUSH = Symbol('bufferPush') -const BUFFERSHIFT = Symbol('bufferShift') -const OBJECTMODE = Symbol('objectMode') -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed') -// internal event when stream has an error -const ERROR = Symbol('error') -const EMITDATA = Symbol('emitData') -const EMITEND = Symbol('emitEnd') -const EMITEND2 = Symbol('emitEnd2') -const ASYNC = Symbol('async') -const ABORT = Symbol('abort') -const ABORTED = Symbol('aborted') -const SIGNAL = Symbol('signal') - -const defer = fn => Promise.resolve().then(fn) - -// TODO remove when Node v8 support drops -const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' -const ASYNCITERATOR = - (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') -const ITERATOR = - (doIter && Symbol.iterator) || Symbol('iterator not implemented') - -// events that mean 'the stream is over' -// these are treated specially, and re-emitted -// if they are listened for after emitting. -const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' - -const isArrayBuffer = b => - b instanceof ArrayBuffer || - (typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0) - -const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) - -class Pipe { - constructor(src, dest, opts) { - this.src = src - this.dest = dest - this.opts = opts - this.ondrain = () => src[RESUME]() - dest.on('drain', this.ondrain) - } - unpipe() { - this.dest.removeListener('drain', this.ondrain) - } - // istanbul ignore next - only here for the prototype - proxyErrors() {} - end() { - this.unpipe() - if (this.opts.end) this.dest.end() - } -} - -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors) - super.unpipe() - } - constructor(src, dest, opts) { - super(src, dest, opts) - this.proxyErrors = er => dest.emit('error', er) - src.on('error', this.proxyErrors) - } -} - -export class Minipass extends Stream { - constructor(options) { - super() - this[FLOWING] = false - // whether we're explicitly paused - this[PAUSED] = false - this[PIPES] = [] - this[BUFFER] = [] - this[OBJECTMODE] = (options && options.objectMode) || false - if (this[OBJECTMODE]) this[ENCODING] = null - else this[ENCODING] = (options && options.encoding) || null - if (this[ENCODING] === 'buffer') this[ENCODING] = null - this[ASYNC] = (options && !!options.async) || false - this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null - this[EOF] = false - this[EMITTED_END] = false - this[EMITTING_END] = false - this[CLOSED] = false - this[EMITTED_ERROR] = null - this.writable = true - this.readable = true - this[BUFFERLENGTH] = 0 - this[DESTROYED] = false - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) - } - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) - } - this[SIGNAL] = options && options.signal - this[ABORTED] = false - if (this[SIGNAL]) { - this[SIGNAL].addEventListener('abort', () => this[ABORT]()) - if (this[SIGNAL].aborted) { - this[ABORT]() - } - } - } - - get bufferLength() { - return this[BUFFERLENGTH] - } - - get encoding() { - return this[ENCODING] - } - set encoding(enc) { - if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') - - if ( - this[ENCODING] && - enc !== this[ENCODING] && - ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) - ) - throw new Error('cannot change encoding') - - if (this[ENCODING] !== enc) { - this[DECODER] = enc ? new SD(enc) : null - if (this[BUFFER].length) - this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) - } - - this[ENCODING] = enc - } - - setEncoding(enc) { - this.encoding = enc - } - - get objectMode() { - return this[OBJECTMODE] - } - set objectMode(om) { - this[OBJECTMODE] = this[OBJECTMODE] || !!om - } - - get ['async']() { - return this[ASYNC] - } - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a - } - - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true - this.emit('abort', this[SIGNAL].reason) - this.destroy(this[SIGNAL].reason) - } - - get aborted() { - return this[ABORTED] - } - set aborted(_) {} - - write(chunk, encoding, cb) { - if (this[ABORTED]) return false - if (this[EOF]) throw new Error('write after end') - - if (this[DESTROYED]) { - this.emit( - 'error', - Object.assign( - new Error('Cannot call write after a stream was destroyed'), - { code: 'ERR_STREAM_DESTROYED' } - ) - ) - return true - } - - if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') - - if (!encoding) encoding = 'utf8' - - const fn = this[ASYNC] ? defer : f => f() - - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything else switches us into object mode - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) - else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) - else if (typeof chunk !== 'string') - // use the setter so we throw if we have encoding set - this.objectMode = true - } - - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - /* istanbul ignore if - maybe impossible? */ - if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) - - if (this.flowing) this.emit('data', chunk) - else this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - - if (cb) fn(cb) - - return this.flowing - } - - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - if (cb) fn(cb) - return this.flowing - } - - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if ( - typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER].lastNeed) - ) { - chunk = Buffer.from(chunk, encoding) - } - - if (Buffer.isBuffer(chunk) && this[ENCODING]) - chunk = this[DECODER].write(chunk) - - // Note: flushing CAN potentially switch us into not-flowing mode - if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) - - if (this.flowing) this.emit('data', chunk) - else this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - - if (cb) fn(cb) - - return this.flowing - } - - read(n) { - if (this[DESTROYED]) return null - - if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { - this[MAYBE_EMIT_END]() - return null - } - - if (this[OBJECTMODE]) n = null - - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] - else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] - } - - const ret = this[READ](n || null, this[BUFFER][0]) - this[MAYBE_EMIT_END]() - return ret - } - - [READ](n, chunk) { - if (n === chunk.length || n === null) this[BUFFERSHIFT]() - else { - this[BUFFER][0] = chunk.slice(n) - chunk = chunk.slice(0, n) - this[BUFFERLENGTH] -= n - } - - this.emit('data', chunk) - - if (!this[BUFFER].length && !this[EOF]) this.emit('drain') - - return chunk - } - - end(chunk, encoding, cb) { - if (typeof chunk === 'function') (cb = chunk), (chunk = null) - if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') - if (chunk) this.write(chunk, encoding) - if (cb) this.once('end', cb) - this[EOF] = true - this.writable = false - - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() - return this - } - - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) return - - this[PAUSED] = false - this[FLOWING] = true - this.emit('resume') - if (this[BUFFER].length) this[FLUSH]() - else if (this[EOF]) this[MAYBE_EMIT_END]() - else this.emit('drain') - } - - resume() { - return this[RESUME]() - } - - pause() { - this[FLOWING] = false - this[PAUSED] = true - } - - get destroyed() { - return this[DESTROYED] - } - - get flowing() { - return this[FLOWING] - } - - get paused() { - return this[PAUSED] - } - - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 - else this[BUFFERLENGTH] += chunk.length - this[BUFFER].push(chunk) - } - - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 - else this[BUFFERLENGTH] -= this[BUFFER][0].length - return this[BUFFER].shift() - } - - [FLUSH](noDrain) { - do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) - - if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') - } - - [FLUSHCHUNK](chunk) { - this.emit('data', chunk) - return this.flowing - } - - pipe(dest, opts) { - if (this[DESTROYED]) return - - const ended = this[EMITTED_END] - opts = opts || {} - if (dest === proc.stdout || dest === proc.stderr) opts.end = false - else opts.end = opts.end !== false - opts.proxyErrors = !!opts.proxyErrors - - // piping an ended stream ends immediately - if (ended) { - if (opts.end) dest.end() - } else { - this[PIPES].push( - !opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts) - ) - if (this[ASYNC]) defer(() => this[RESUME]()) - else this[RESUME]() - } - - return dest - } - - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest) - if (p) { - this[PIPES].splice(this[PIPES].indexOf(p), 1) - p.unpipe() - } - } - - addListener(ev, fn) { - return this.on(ev, fn) - } - - on(ev, fn) { - const ret = super.on(ev, fn) - if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) - super.emit('readable') - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev) - this.removeAllListeners(ev) - } else if (ev === 'error' && this[EMITTED_ERROR]) { - if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) - else fn.call(this, this[EMITTED_ERROR]) - } - return ret - } - - get emittedEnd() { - return this[EMITTED_END] - } - - [MAYBE_EMIT_END]() { - if ( - !this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF] - ) { - this[EMITTING_END] = true - this.emit('end') - this.emit('prefinish') - this.emit('finish') - if (this[CLOSED]) this.emit('close') - this[EMITTING_END] = false - } - } - - emit(ev, data, ...extra) { - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) - return - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? defer(() => this[EMITDATA](data)) - : this[EMITDATA](data) - } else if (ev === 'end') { - return this[EMITEND]() - } else if (ev === 'close') { - this[CLOSED] = true - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) return - const ret = super.emit('close') - this.removeAllListeners('close') - return ret - } else if (ev === 'error') { - this[EMITTED_ERROR] = data - super.emit(ERROR, data) - const ret = - !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'resume') { - const ret = super.emit('resume') - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev) - this.removeAllListeners(ev) - return ret - } - - // Some other unknown event - const ret = super.emit(ev, data, ...extra) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) this.pause() - } - const ret = super.emit('data', data) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITEND]() { - if (this[EMITTED_END]) return - - this[EMITTED_END] = true - this.readable = false - if (this[ASYNC]) defer(() => this[EMITEND2]()) - else this[EMITEND2]() - } - - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end() - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data) - } - super.emit('data', data) - } - } - - for (const p of this[PIPES]) { - p.end() - } - const ret = super.emit('end') - this.removeAllListeners('end') - return ret - } - - // const all = await stream.collect() - collect() { - const buf = [] - if (!this[OBJECTMODE]) buf.dataLength = 0 - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise() - this.on('data', c => { - buf.push(c) - if (!this[OBJECTMODE]) buf.dataLength += c.length - }) - return p.then(() => buf) - } - - // const data = await stream.concat() - concat() { - return this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this.collect().then(buf => - this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength) - ) - } - - // stream.promise().then(() => done, er => emitted error) - promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))) - this.on('error', er => reject(er)) - this.on('end', () => resolve()) - }) - } - - // for await (let chunk of stream) - [ASYNCITERATOR]() { - let stopped = false - const stop = () => { - this.pause() - stopped = true - return Promise.resolve({ done: true }) - } - const next = () => { - if (stopped) return stop() - const res = this.read() - if (res !== null) return Promise.resolve({ done: false, value: res }) - - if (this[EOF]) return stop() - - let resolve = null - let reject = null - const onerr = er => { - this.removeListener('data', ondata) - this.removeListener('end', onend) - this.removeListener(DESTROYED, ondestroy) - stop() - reject(er) - } - const ondata = value => { - this.removeListener('error', onerr) - this.removeListener('end', onend) - this.removeListener(DESTROYED, ondestroy) - this.pause() - resolve({ value: value, done: !!this[EOF] }) - } - const onend = () => { - this.removeListener('error', onerr) - this.removeListener('data', ondata) - this.removeListener(DESTROYED, ondestroy) - stop() - resolve({ done: true }) - } - const ondestroy = () => onerr(new Error('stream destroyed')) - return new Promise((res, rej) => { - reject = rej - resolve = res - this.once(DESTROYED, ondestroy) - this.once('error', onerr) - this.once('end', onend) - this.once('data', ondata) - }) - } - - return { - next, - throw: stop, - return: stop, - [ASYNCITERATOR]() { - return this - }, - } - } - - // for (let chunk of stream) - [ITERATOR]() { - let stopped = false - const stop = () => { - this.pause() - this.removeListener(ERROR, stop) - this.removeListener(DESTROYED, stop) - this.removeListener('end', stop) - stopped = true - return { done: true } - } - - const next = () => { - if (stopped) return stop() - const value = this.read() - return value === null ? stop() : { value } - } - this.once('end', stop) - this.once(ERROR, stop) - this.once(DESTROYED, stop) - - return { - next, - throw: stop, - return: stop, - [ITERATOR]() { - return this - }, - } - } - - destroy(er) { - if (this[DESTROYED]) { - if (er) this.emit('error', er) - else this.emit(DESTROYED) - return this - } - - this[DESTROYED] = true - - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0 - this[BUFFERLENGTH] = 0 - - if (typeof this.close === 'function' && !this[CLOSED]) this.close() - - if (er) this.emit('error', er) - // if no error to emit, still reject pending promises - else this.emit(DESTROYED) - - return this - } - - static isStream(s) { - return ( - !!s && - (s instanceof Minipass || - s instanceof Stream || - (s instanceof EE && - // readable - (typeof s.pipe === 'function' || - // writable - (typeof s.write === 'function' && typeof s.end === 'function')))) - ) - } -} - - diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/minipass/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/minipass/package.json deleted file mode 100644 index 0e20e988047f2..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/minipass/package.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "name": "minipass", - "version": "5.0.0", - "description": "minimal implementation of a PassThrough stream", - "main": "./index.js", - "module": "./index.mjs", - "types": "./index.d.ts", - "exports": { - ".": { - "import": { - "types": "./index.d.ts", - "default": "./index.mjs" - }, - "require": { - "types": "./index.d.ts", - "default": "./index.js" - } - }, - "./package.json": "./package.json" - }, - "devDependencies": { - "@types/node": "^17.0.41", - "end-of-stream": "^1.4.0", - "node-abort-controller": "^3.1.1", - "prettier": "^2.6.2", - "tap": "^16.2.0", - "through2": "^2.0.3", - "ts-node": "^10.8.1", - "typedoc": "^0.23.24", - "typescript": "^4.7.3" - }, - "scripts": { - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "prepare": "node ./scripts/transpile-to-esm.js", - "snap": "tap", - "test": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "postpublish": "git push origin --follow-tags", - "typedoc": "typedoc ./index.d.ts", - "format": "prettier --write . --loglevel warn" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/isaacs/minipass.git" - }, - "keywords": [ - "passthrough", - "stream" - ], - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "files": [ - "index.d.ts", - "index.js", - "index.mjs" - ], - "tap": { - "check-coverage": true - }, - "engines": { - "node": ">=8" - }, - "prettier": { - "semi": false, - "printWidth": 80, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - } -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/LICENSE.md b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/LICENSE.md deleted file mode 100644 index 5fc208ff122e0..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/LICENSE.md +++ /dev/null @@ -1,20 +0,0 @@ - - -ISC License - -Copyright npm, Inc. - -Permission to use, copy, modify, and/or distribute this -software for any purpose with or without fee is hereby -granted, provided that the above copyright notice and this -permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL -WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO -EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, -WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/auth.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/auth.js deleted file mode 100644 index 870ce0d923cd0..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/auth.js +++ /dev/null @@ -1,145 +0,0 @@ -'use strict' -const fs = require('fs') -const npa = require('npm-package-arg') -const { URL } = require('url') - -// Find the longest registry key that is used for some kind of auth -// in the options. -const regKeyFromURI = (uri, opts) => { - const parsed = new URL(uri) - // try to find a config key indicating we have auth for this registry - // can be one of :_authToken, :_auth, :_password and :username, or - // :certfile and :keyfile - // We walk up the "path" until we're left with just //[:], - // stopping when we reach '//'. - let regKey = `//${parsed.host}${parsed.pathname}` - while (regKey.length > '//'.length) { - // got some auth for this URI - if (hasAuth(regKey, opts)) { - return regKey - } - - // can be either //host/some/path/:_auth or //host/some/path:_auth - // walk up by removing EITHER what's after the slash OR the slash itself - regKey = regKey.replace(/([^/]+|\/)$/, '') - } -} - -const hasAuth = (regKey, opts) => ( - opts[`${regKey}:_authToken`] || - opts[`${regKey}:_auth`] || - opts[`${regKey}:username`] && opts[`${regKey}:_password`] || - opts[`${regKey}:certfile`] && opts[`${regKey}:keyfile`] -) - -const sameHost = (a, b) => { - const parsedA = new URL(a) - const parsedB = new URL(b) - return parsedA.host === parsedB.host -} - -const getRegistry = opts => { - const { spec } = opts - const { scope: specScope, subSpec } = spec ? npa(spec) : {} - const subSpecScope = subSpec && subSpec.scope - const scope = subSpec ? subSpecScope : specScope - const scopeReg = scope && opts[`${scope}:registry`] - return scopeReg || opts.registry -} - -const maybeReadFile = file => { - try { - return fs.readFileSync(file, 'utf8') - } catch (er) { - if (er.code !== 'ENOENT') { - throw er - } - return null - } -} - -const getAuth = (uri, opts = {}) => { - const { forceAuth } = opts - if (!uri) { - throw new Error('URI is required') - } - const regKey = regKeyFromURI(uri, forceAuth || opts) - - // we are only allowed to use what's in forceAuth if specified - if (forceAuth && !regKey) { - return new Auth({ - scopeAuthKey: null, - token: forceAuth._authToken || forceAuth.token, - username: forceAuth.username, - password: forceAuth._password || forceAuth.password, - auth: forceAuth._auth || forceAuth.auth, - certfile: forceAuth.certfile, - keyfile: forceAuth.keyfile, - }) - } - - // no auth for this URI, but might have it for the registry - if (!regKey) { - const registry = getRegistry(opts) - if (registry && uri !== registry && sameHost(uri, registry)) { - return getAuth(registry, opts) - } else if (registry !== opts.registry) { - // If making a tarball request to a different base URI than the - // registry where we logged in, but the same auth SHOULD be sent - // to that artifact host, then we track where it was coming in from, - // and warn the user if we get a 4xx error on it. - const scopeAuthKey = regKeyFromURI(registry, opts) - return new Auth({ scopeAuthKey }) - } - } - - const { - [`${regKey}:_authToken`]: token, - [`${regKey}:username`]: username, - [`${regKey}:_password`]: password, - [`${regKey}:_auth`]: auth, - [`${regKey}:certfile`]: certfile, - [`${regKey}:keyfile`]: keyfile, - } = opts - - return new Auth({ - scopeAuthKey: null, - token, - auth, - username, - password, - certfile, - keyfile, - }) -} - -class Auth { - constructor ({ token, auth, username, password, scopeAuthKey, certfile, keyfile }) { - this.scopeAuthKey = scopeAuthKey - this.token = null - this.auth = null - this.isBasicAuth = false - this.cert = null - this.key = null - if (token) { - this.token = token - } else if (auth) { - this.auth = auth - } else if (username && password) { - const p = Buffer.from(password, 'base64').toString('utf8') - this.auth = Buffer.from(`${username}:${p}`, 'utf8').toString('base64') - this.isBasicAuth = true - } - // mTLS may be used in conjunction with another auth method above - if (certfile && keyfile) { - const cert = maybeReadFile(certfile, 'utf-8') - const key = maybeReadFile(keyfile, 'utf-8') - if (cert && key) { - this.cert = cert - this.key = key - } - } - } -} - -module.exports = getAuth diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/check-response.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/check-response.js deleted file mode 100644 index 066ac3c32420f..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/check-response.js +++ /dev/null @@ -1,100 +0,0 @@ -'use strict' - -const errors = require('./errors.js') -const { Response } = require('minipass-fetch') -const defaultOpts = require('./default-opts.js') -const log = require('proc-log') -const cleanUrl = require('./clean-url.js') - -/* eslint-disable-next-line max-len */ -const moreInfoUrl = 'https://github.com/npm/cli/wiki/No-auth-for-URI,-but-auth-present-for-scoped-registry' -const checkResponse = - async ({ method, uri, res, startTime, auth, opts }) => { - opts = { ...defaultOpts, ...opts } - if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) { - log.notice('', res.headers.get('npm-notice')) - } - - if (res.status >= 400) { - logRequest(method, res, startTime) - if (auth && auth.scopeAuthKey && !auth.token && !auth.auth) { - // we didn't have auth for THIS request, but we do have auth for - // requests to the registry indicated by the spec's scope value. - // Warn the user. - log.warn('registry', `No auth for URI, but auth present for scoped registry. - -URI: ${uri} -Scoped Registry Key: ${auth.scopeAuthKey} - -More info here: ${moreInfoUrl}`) - } - return checkErrors(method, res, startTime, opts) - } else { - res.body.on('end', () => logRequest(method, res, startTime, opts)) - if (opts.ignoreBody) { - res.body.resume() - return new Response(null, res) - } - return res - } - } -module.exports = checkResponse - -function logRequest (method, res, startTime) { - const elapsedTime = Date.now() - startTime - const attempt = res.headers.get('x-fetch-attempts') - const attemptStr = attempt && attempt > 1 ? ` attempt #${attempt}` : '' - const cacheStatus = res.headers.get('x-local-cache-status') - const cacheStr = cacheStatus ? ` (cache ${cacheStatus})` : '' - const urlStr = cleanUrl(res.url) - - log.http( - 'fetch', - `${method.toUpperCase()} ${res.status} ${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}` - ) -} - -function checkErrors (method, res, startTime, opts) { - return res.buffer() - .catch(() => null) - .then(body => { - let parsed = body - try { - parsed = JSON.parse(body.toString('utf8')) - } catch { - // ignore errors - } - if (res.status === 401 && res.headers.get('www-authenticate')) { - const auth = res.headers.get('www-authenticate') - .split(/,\s*/) - .map(s => s.toLowerCase()) - if (auth.indexOf('ipaddress') !== -1) { - throw new errors.HttpErrorAuthIPAddress( - method, res, parsed, opts.spec - ) - } else if (auth.indexOf('otp') !== -1) { - throw new errors.HttpErrorAuthOTP( - method, res, parsed, opts.spec - ) - } else { - throw new errors.HttpErrorAuthUnknown( - method, res, parsed, opts.spec - ) - } - } else if ( - res.status === 401 && - body != null && - /one-time pass/.test(body.toString('utf8')) - ) { - // Heuristic for malformed OTP responses that don't include the - // www-authenticate header. - throw new errors.HttpErrorAuthOTP( - method, res, parsed, opts.spec - ) - } else { - throw new errors.HttpErrorGeneral( - method, res, parsed, opts.spec - ) - } - }) -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/clean-url.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/clean-url.js deleted file mode 100644 index 0c2656b5653a0..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/clean-url.js +++ /dev/null @@ -1,27 +0,0 @@ -const { URL } = require('url') - -const replace = '***' -const tokenRegex = /\bnpm_[a-zA-Z0-9]{36}\b/g -const guidRegex = /\b[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\b/g - -const cleanUrl = (str) => { - if (typeof str !== 'string' || !str) { - return str - } - - try { - const url = new URL(str) - if (url.password) { - url.password = replace - str = url.toString() - } - } catch { - // ignore errors - } - - return str - .replace(tokenRegex, `npm_${replace}`) - .replace(guidRegex, `npm_${replace}`) -} - -module.exports = cleanUrl diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/default-opts.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/default-opts.js deleted file mode 100644 index f0847f0b507e2..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/default-opts.js +++ /dev/null @@ -1,19 +0,0 @@ -const pkg = require('../package.json') -module.exports = { - maxSockets: 12, - method: 'GET', - registry: 'https://registry.npmjs.org/', - timeout: 5 * 60 * 1000, // 5 minutes - strictSSL: true, - noProxy: process.env.NOPROXY, - userAgent: `${pkg.name - }@${ - pkg.version - }/node@${ - process.version - }+${ - process.arch - } (${ - process.platform - })`, -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/errors.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/errors.js deleted file mode 100644 index cf5ddba6f300c..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/errors.js +++ /dev/null @@ -1,80 +0,0 @@ -'use strict' - -const url = require('url') - -function packageName (href) { - try { - let basePath = new url.URL(href).pathname.slice(1) - if (!basePath.match(/^-/)) { - basePath = basePath.split('/') - var index = basePath.indexOf('_rewrite') - if (index === -1) { - index = basePath.length - 1 - } else { - index++ - } - return decodeURIComponent(basePath[index]) - } - } catch (_) { - // this is ok - } -} - -class HttpErrorBase extends Error { - constructor (method, res, body, spec) { - super() - this.name = this.constructor.name - this.headers = res.headers.raw() - this.statusCode = res.status - this.code = `E${res.status}` - this.method = method - this.uri = res.url - this.body = body - this.pkgid = spec ? spec.toString() : packageName(res.url) - } -} -module.exports.HttpErrorBase = HttpErrorBase - -class HttpErrorGeneral extends HttpErrorBase { - constructor (method, res, body, spec) { - super(method, res, body, spec) - this.message = `${res.status} ${res.statusText} - ${ - this.method.toUpperCase() - } ${ - this.spec || this.uri - }${ - (body && body.error) ? ' - ' + body.error : '' - }` - Error.captureStackTrace(this, HttpErrorGeneral) - } -} -module.exports.HttpErrorGeneral = HttpErrorGeneral - -class HttpErrorAuthOTP extends HttpErrorBase { - constructor (method, res, body, spec) { - super(method, res, body, spec) - this.message = 'OTP required for authentication' - this.code = 'EOTP' - Error.captureStackTrace(this, HttpErrorAuthOTP) - } -} -module.exports.HttpErrorAuthOTP = HttpErrorAuthOTP - -class HttpErrorAuthIPAddress extends HttpErrorBase { - constructor (method, res, body, spec) { - super(method, res, body, spec) - this.message = 'Login is not allowed from your IP address' - this.code = 'EAUTHIP' - Error.captureStackTrace(this, HttpErrorAuthIPAddress) - } -} -module.exports.HttpErrorAuthIPAddress = HttpErrorAuthIPAddress - -class HttpErrorAuthUnknown extends HttpErrorBase { - constructor (method, res, body, spec) { - super(method, res, body, spec) - this.message = 'Unable to authenticate, need: ' + res.headers.get('www-authenticate') - Error.captureStackTrace(this, HttpErrorAuthUnknown) - } -} -module.exports.HttpErrorAuthUnknown = HttpErrorAuthUnknown diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/index.js deleted file mode 100644 index 23e349c5c5b96..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/lib/index.js +++ /dev/null @@ -1,247 +0,0 @@ -'use strict' - -const { HttpErrorAuthOTP } = require('./errors.js') -const checkResponse = require('./check-response.js') -const getAuth = require('./auth.js') -const fetch = require('make-fetch-happen') -const JSONStream = require('minipass-json-stream') -const npa = require('npm-package-arg') -const qs = require('querystring') -const url = require('url') -const zlib = require('minizlib') -const { Minipass } = require('minipass') - -const defaultOpts = require('./default-opts.js') - -// WhatWG URL throws if it's not fully resolved -const urlIsValid = u => { - try { - return !!new url.URL(u) - } catch (_) { - return false - } -} - -module.exports = regFetch -function regFetch (uri, /* istanbul ignore next */ opts_ = {}) { - const opts = { - ...defaultOpts, - ...opts_, - } - - // if we did not get a fully qualified URI, then we look at the registry - // config or relevant scope to resolve it. - const uriValid = urlIsValid(uri) - let registry = opts.registry || defaultOpts.registry - if (!uriValid) { - registry = opts.registry = ( - (opts.spec && pickRegistry(opts.spec, opts)) || - opts.registry || - registry - ) - uri = `${ - registry.trim().replace(/\/?$/g, '') - }/${ - uri.trim().replace(/^\//, '') - }` - // asserts that this is now valid - new url.URL(uri) - } - - const method = opts.method || 'GET' - - // through that takes into account the scope, the prefix of `uri`, etc - const startTime = Date.now() - const auth = getAuth(uri, opts) - const headers = getHeaders(uri, auth, opts) - let body = opts.body - const bodyIsStream = Minipass.isStream(body) - const bodyIsPromise = body && - typeof body === 'object' && - typeof body.then === 'function' - - if ( - body && !bodyIsStream && !bodyIsPromise && typeof body !== 'string' && !Buffer.isBuffer(body) - ) { - headers['content-type'] = headers['content-type'] || 'application/json' - body = JSON.stringify(body) - } else if (body && !headers['content-type']) { - headers['content-type'] = 'application/octet-stream' - } - - if (opts.gzip) { - headers['content-encoding'] = 'gzip' - if (bodyIsStream) { - const gz = new zlib.Gzip() - body.on('error', /* istanbul ignore next: unlikely and hard to test */ - err => gz.emit('error', err)) - body = body.pipe(gz) - } else if (!bodyIsPromise) { - body = new zlib.Gzip().end(body).concat() - } - } - - const parsed = new url.URL(uri) - - if (opts.query) { - const q = typeof opts.query === 'string' ? qs.parse(opts.query) - : opts.query - - Object.keys(q).forEach(key => { - if (q[key] !== undefined) { - parsed.searchParams.set(key, q[key]) - } - }) - uri = url.format(parsed) - } - - if (parsed.searchParams.get('write') === 'true' && method === 'GET') { - // do not cache, because this GET is fetching a rev that will be - // used for a subsequent PUT or DELETE, so we need to conditionally - // update cache. - opts.offline = false - opts.preferOffline = false - opts.preferOnline = true - } - - const doFetch = async fetchBody => { - const p = fetch(uri, { - agent: opts.agent, - algorithms: opts.algorithms, - body: fetchBody, - cache: getCacheMode(opts), - cachePath: opts.cache, - ca: opts.ca, - cert: auth.cert || opts.cert, - headers, - integrity: opts.integrity, - key: auth.key || opts.key, - localAddress: opts.localAddress, - maxSockets: opts.maxSockets, - memoize: opts.memoize, - method: method, - noProxy: opts.noProxy, - proxy: opts.httpsProxy || opts.proxy, - retry: opts.retry ? opts.retry : { - retries: opts.fetchRetries, - factor: opts.fetchRetryFactor, - minTimeout: opts.fetchRetryMintimeout, - maxTimeout: opts.fetchRetryMaxtimeout, - }, - strictSSL: opts.strictSSL, - timeout: opts.timeout || 30 * 1000, - }).then(res => checkResponse({ - method, - uri, - res, - registry, - startTime, - auth, - opts, - })) - - if (typeof opts.otpPrompt === 'function') { - return p.catch(async er => { - if (er instanceof HttpErrorAuthOTP) { - let otp - // if otp fails to complete, we fail with that failure - try { - otp = await opts.otpPrompt() - } catch (_) { - // ignore this error - } - // if no otp provided, or otpPrompt errored, throw the original HTTP error - if (!otp) { - throw er - } - return regFetch(uri, { ...opts, otp }) - } - throw er - }) - } else { - return p - } - } - - return Promise.resolve(body).then(doFetch) -} - -module.exports.json = fetchJSON -function fetchJSON (uri, opts) { - return regFetch(uri, opts).then(res => res.json()) -} - -module.exports.json.stream = fetchJSONStream -function fetchJSONStream (uri, jsonPath, - /* istanbul ignore next */ opts_ = {}) { - const opts = { ...defaultOpts, ...opts_ } - const parser = JSONStream.parse(jsonPath, opts.mapJSON) - regFetch(uri, opts).then(res => - res.body.on('error', - /* istanbul ignore next: unlikely and difficult to test */ - er => parser.emit('error', er)).pipe(parser) - ).catch(er => parser.emit('error', er)) - return parser -} - -module.exports.pickRegistry = pickRegistry -function pickRegistry (spec, opts = {}) { - spec = npa(spec) - let registry = spec.scope && - opts[spec.scope.replace(/^@?/, '@') + ':registry'] - - if (!registry && opts.scope) { - registry = opts[opts.scope.replace(/^@?/, '@') + ':registry'] - } - - if (!registry) { - registry = opts.registry || defaultOpts.registry - } - - return registry -} - -function getCacheMode (opts) { - return opts.offline ? 'only-if-cached' - : opts.preferOffline ? 'force-cache' - : opts.preferOnline ? 'no-cache' - : 'default' -} - -function getHeaders (uri, auth, opts) { - const headers = Object.assign({ - 'user-agent': opts.userAgent, - }, opts.headers || {}) - - if (opts.authType) { - headers['npm-auth-type'] = opts.authType - } - - if (opts.scope) { - headers['npm-scope'] = opts.scope - } - - if (opts.npmSession) { - headers['npm-session'] = opts.npmSession - } - - if (opts.npmCommand) { - headers['npm-command'] = opts.npmCommand - } - - // If a tarball is hosted on a different place than the manifest, only send - // credentials on `alwaysAuth` - if (auth.token) { - headers.authorization = `Bearer ${auth.token}` - } else if (auth.auth) { - headers.authorization = `Basic ${auth.auth}` - } - - if (opts.otp) { - headers['npm-otp'] = opts.otp - } - - return headers -} - -module.exports.cleanUrl = require('./clean-url.js') diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/package.json deleted file mode 100644 index 63a44725886cc..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch/package.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "name": "npm-registry-fetch", - "version": "14.0.5", - "description": "Fetch-based http client for use with npm registry APIs", - "main": "lib", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "eslint": "eslint", - "lint": "eslint \"**/*.js\"", - "lintfix": "npm run lint -- --fix", - "test": "tap", - "posttest": "npm run lint", - "npmclilint": "npmcli-lint", - "postsnap": "npm run lintfix --", - "postlint": "template-oss-check", - "snap": "tap", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/npm-registry-fetch.git" - }, - "keywords": [ - "npm", - "registry", - "fetch" - ], - "author": "GitHub Inc.", - "license": "ISC", - "dependencies": { - "make-fetch-happen": "^11.0.0", - "minipass": "^5.0.0", - "minipass-fetch": "^3.0.0", - "minipass-json-stream": "^1.0.1", - "minizlib": "^2.1.2", - "npm-package-arg": "^10.0.0", - "proc-log": "^3.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", - "cacache": "^17.0.0", - "nock": "^13.2.4", - "require-inject": "^1.4.4", - "ssri": "^10.0.0", - "tap": "^16.0.1" - }, - "tap": { - "check-coverage": true, - "test-ignore": "test[\\\\/](util|cache)[\\\\/]", - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", - "publish": "true" - } -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE deleted file mode 100644 index a03cd0ed0b338..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter, Kat Marchán, npm, Inc., and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/bin.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/bin.js deleted file mode 100755 index f35b62ca71a53..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/bin.js +++ /dev/null @@ -1,158 +0,0 @@ -#!/usr/bin/env node - -const run = conf => { - const pacote = require('../') - switch (conf._[0]) { - case 'resolve': - case 'manifest': - case 'packument': - if (conf._[0] === 'resolve' && conf.long) { - return pacote.manifest(conf._[1], conf).then(mani => ({ - resolved: mani._resolved, - integrity: mani._integrity, - from: mani._from, - })) - } - return pacote[conf._[0]](conf._[1], conf) - - case 'tarball': - if (!conf._[2] || conf._[2] === '-') { - return pacote.tarball.stream(conf._[1], stream => { - stream.pipe( - conf.testStdout || - /* istanbul ignore next */ - process.stdout - ) - // make sure it resolves something falsey - return stream.promise().then(() => { - return false - }) - }, conf) - } else { - return pacote.tarball.file(conf._[1], conf._[2], conf) - } - - case 'extract': - return pacote.extract(conf._[1], conf._[2], conf) - - default: /* istanbul ignore next */ { - throw new Error(`bad command: ${conf._[0]}`) - } - } -} - -const version = require('../package.json').version -const usage = () => -`Pacote - The JavaScript Package Handler, v${version} - -Usage: - - pacote resolve - Resolve a specifier and output the fully resolved target - Returns integrity and from if '--long' flag is set. - - pacote manifest - Fetch a manifest and print to stdout - - pacote packument - Fetch a full packument and print to stdout - - pacote tarball [] - Fetch a package tarball and save to - If is missing or '-', the tarball will be streamed to stdout. - - pacote extract - Extract a package to the destination folder. - -Configuration values all match the names of configs passed to npm, or -options passed to Pacote. Additional flags for this executable: - - --long Print an object from 'resolve', including integrity and spec. - --json Print result objects as JSON rather than node's default. - (This is the default if stdout is not a TTY.) - --help -h Print this helpful text. - -For example '--cache=/path/to/folder' will use that folder as the cache. -` - -const shouldJSON = (conf, result) => - conf.json || - !process.stdout.isTTY && - conf.json === undefined && - result && - typeof result === 'object' - -const pretty = (conf, result) => - shouldJSON(conf, result) ? JSON.stringify(result, 0, 2) : result - -let addedLogListener = false -const main = args => { - const conf = parse(args) - if (conf.help || conf.h) { - return console.log(usage()) - } - - if (!addedLogListener) { - process.on('log', console.error) - addedLogListener = true - } - - try { - return run(conf) - .then(result => result && console.log(pretty(conf, result))) - .catch(er => { - console.error(er) - process.exit(1) - }) - } catch (er) { - console.error(er.message) - console.error(usage()) - } -} - -const parseArg = arg => { - const split = arg.slice(2).split('=') - const k = split.shift() - const v = split.join('=') - const no = /^no-/.test(k) && !v - const key = (no ? k.slice(3) : k) - .replace(/^tag$/, 'defaultTag') - .replace(/-([a-z])/g, (_, c) => c.toUpperCase()) - const value = v ? v.replace(/^~/, process.env.HOME) : !no - return { key, value } -} - -const parse = args => { - const conf = { - _: [], - cache: process.env.HOME + '/.npm/_cacache', - } - let dashdash = false - args.forEach(arg => { - if (dashdash) { - conf._.push(arg) - } else if (arg === '--') { - dashdash = true - } else if (arg === '-h') { - conf.help = true - } else if (/^--/.test(arg)) { - const { key, value } = parseArg(arg) - conf[key] = value - } else { - conf._.push(arg) - } - }) - return conf -} - -if (module === require.main) { - main(process.argv.slice(2)) -} else { - module.exports = { - main, - run, - usage, - parseArg, - parse, - } -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js deleted file mode 100644 index 420afc5802cb2..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js +++ /dev/null @@ -1,108 +0,0 @@ -const Fetcher = require('./fetcher.js') -const FileFetcher = require('./file.js') -const { Minipass } = require('minipass') -const tarCreateOptions = require('./util/tar-create-options.js') -const packlist = require('npm-packlist') -const tar = require('tar') -const _prepareDir = Symbol('_prepareDir') -const { resolve } = require('path') -const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson') - -const runScript = require('@npmcli/run-script') - -const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') -class DirFetcher extends Fetcher { - constructor (spec, opts) { - super(spec, opts) - // just the fully resolved filename - this.resolved = this.spec.fetchSpec - - this.tree = opts.tree || null - this.Arborist = opts.Arborist || null - } - - // exposes tarCreateOptions as public API - static tarCreateOptions (manifest) { - return tarCreateOptions(manifest) - } - - get types () { - return ['directory'] - } - - [_prepareDir] () { - return this.manifest().then(mani => { - if (!mani.scripts || !mani.scripts.prepare) { - return - } - - // we *only* run prepare. - // pre/post-pack is run by the npm CLI for publish and pack, - // but this function is *also* run when installing git deps - const stdio = this.opts.foregroundScripts ? 'inherit' : 'pipe' - - // hide the banner if silent opt is passed in, or if prepare running - // in the background. - const banner = this.opts.silent ? false : stdio === 'inherit' - - return runScript({ - pkg: mani, - event: 'prepare', - path: this.resolved, - stdio, - banner, - env: { - npm_package_resolved: this.resolved, - npm_package_integrity: this.integrity, - npm_package_json: resolve(this.resolved, 'package.json'), - }, - }) - }) - } - - [_tarballFromResolved] () { - if (!this.tree && !this.Arborist) { - throw new Error('DirFetcher requires either a tree or an Arborist constructor to pack') - } - - const stream = new Minipass() - stream.resolved = this.resolved - stream.integrity = this.integrity - - const { prefix, workspaces } = this.opts - - // run the prepare script, get the list of files, and tar it up - // pipe to the stream, and proxy errors the chain. - this[_prepareDir]() - .then(async () => { - if (!this.tree) { - const arb = new this.Arborist({ path: this.resolved }) - this.tree = await arb.loadActual() - } - return packlist(this.tree, { path: this.resolved, prefix, workspaces }) - }) - .then(files => tar.c(tarCreateOptions(this.package), files) - .on('error', er => stream.emit('error', er)).pipe(stream)) - .catch(er => stream.emit('error', er)) - return stream - } - - manifest () { - if (this.package) { - return Promise.resolve(this.package) - } - - return this[_readPackageJson](this.resolved + '/package.json') - .then(mani => this.package = { - ...mani, - _integrity: this.integrity && String(this.integrity), - _resolved: this.resolved, - _from: this.from, - }) - } - - packument () { - return FileFetcher.prototype.packument.apply(this) - } -} -module.exports = DirFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js deleted file mode 100644 index f961a45c7d346..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js +++ /dev/null @@ -1,505 +0,0 @@ -// This is the base class that the other fetcher types in lib -// all descend from. -// It handles the unpacking and retry logic that is shared among -// all of the other Fetcher types. - -const npa = require('npm-package-arg') -const ssri = require('ssri') -const { promisify } = require('util') -const { basename, dirname } = require('path') -const tar = require('tar') -const log = require('proc-log') -const retry = require('promise-retry') -const fs = require('fs/promises') -const fsm = require('fs-minipass') -const cacache = require('cacache') -const isPackageBin = require('./util/is-package-bin.js') -const removeTrailingSlashes = require('./util/trailing-slashes.js') -const getContents = require('@npmcli/installed-package-contents') -const readPackageJsonFast = require('read-package-json-fast') -const readPackageJson = promisify(require('read-package-json')) -const { Minipass } = require('minipass') - -const cacheDir = require('./util/cache-dir.js') - -// Private methods. -// Child classes should not have to override these. -// Users should never call them. -const _extract = Symbol('_extract') -const _mkdir = Symbol('_mkdir') -const _empty = Symbol('_empty') -const _toFile = Symbol('_toFile') -const _tarxOptions = Symbol('_tarxOptions') -const _entryMode = Symbol('_entryMode') -const _istream = Symbol('_istream') -const _assertType = Symbol('_assertType') -const _tarballFromCache = Symbol('_tarballFromCache') -const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') -const _cacheFetches = Symbol.for('pacote.Fetcher._cacheFetches') -const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson') - -class FetcherBase { - constructor (spec, opts) { - if (!opts || typeof opts !== 'object') { - throw new TypeError('options object is required') - } - this.spec = npa(spec, opts.where) - - this.allowGitIgnore = !!opts.allowGitIgnore - - // a bit redundant because presumably the caller already knows this, - // but it makes it easier to not have to keep track of the requested - // spec when we're dispatching thousands of these at once, and normalizing - // is nice. saveSpec is preferred if set, because it turns stuff like - // x/y#committish into github:x/y#committish. use name@rawSpec for - // registry deps so that we turn xyz and xyz@ -> xyz@ - this.from = this.spec.registry - ? `${this.spec.name}@${this.spec.rawSpec}` : this.spec.saveSpec - - this[_assertType]() - // clone the opts object so that others aren't upset when we mutate it - // by adding/modifying the integrity value. - this.opts = { ...opts } - - this.cache = opts.cache || cacheDir().cacache - this.tufCache = opts.tufCache || cacheDir().tufcache - this.resolved = opts.resolved || null - - // default to caching/verifying with sha512, that's what we usually have - // need to change this default, or start overriding it, when sha512 - // is no longer strong enough. - this.defaultIntegrityAlgorithm = opts.defaultIntegrityAlgorithm || 'sha512' - - if (typeof opts.integrity === 'string') { - this.opts.integrity = ssri.parse(opts.integrity) - } - - this.package = null - this.type = this.constructor.name - this.fmode = opts.fmode || 0o666 - this.dmode = opts.dmode || 0o777 - // we don't need a default umask, because we don't chmod files coming - // out of package tarballs. they're forced to have a mode that is - // valid, regardless of what's in the tarball entry, and then we let - // the process's umask setting do its job. but if configured, we do - // respect it. - this.umask = opts.umask || 0 - - this.preferOnline = !!opts.preferOnline - this.preferOffline = !!opts.preferOffline - this.offline = !!opts.offline - - this.before = opts.before - this.fullMetadata = this.before ? true : !!opts.fullMetadata - this.fullReadJson = !!opts.fullReadJson - if (this.fullReadJson) { - this[_readPackageJson] = readPackageJson - } else { - this[_readPackageJson] = readPackageJsonFast - } - - // rrh is a registry hostname or 'never' or 'always' - // defaults to registry.npmjs.org - this.replaceRegistryHost = (!opts.replaceRegistryHost || opts.replaceRegistryHost === 'npmjs') ? - 'registry.npmjs.org' : opts.replaceRegistryHost - - this.defaultTag = opts.defaultTag || 'latest' - this.registry = removeTrailingSlashes(opts.registry || 'https://registry.npmjs.org') - - // command to run 'prepare' scripts on directories and git dirs - // To use pacote with yarn, for example, set npmBin to 'yarn' - // and npmCliConfig with yarn's equivalents. - this.npmBin = opts.npmBin || 'npm' - - // command to install deps for preparing - this.npmInstallCmd = opts.npmInstallCmd || ['install', '--force'] - - // XXX fill more of this in based on what we know from this.opts - // we explicitly DO NOT fill in --tag, though, since we are often - // going to be packing in the context of a publish, which may set - // a dist-tag, but certainly wants to keep defaulting to latest. - this.npmCliConfig = opts.npmCliConfig || [ - `--cache=${dirname(this.cache)}`, - `--prefer-offline=${!!this.preferOffline}`, - `--prefer-online=${!!this.preferOnline}`, - `--offline=${!!this.offline}`, - ...(this.before ? [`--before=${this.before.toISOString()}`] : []), - '--no-progress', - '--no-save', - '--no-audit', - // override any omit settings from the environment - '--include=dev', - '--include=peer', - '--include=optional', - // we need the actual things, not just the lockfile - '--no-package-lock-only', - '--no-dry-run', - ] - } - - get integrity () { - return this.opts.integrity || null - } - - set integrity (i) { - if (!i) { - return - } - - i = ssri.parse(i) - const current = this.opts.integrity - - // do not ever update an existing hash value, but do - // merge in NEW algos and hashes that we don't already have. - if (current) { - current.merge(i) - } else { - this.opts.integrity = i - } - } - - get notImplementedError () { - return new Error('not implemented in this fetcher type: ' + this.type) - } - - // override in child classes - // Returns a Promise that resolves to this.resolved string value - resolve () { - return this.resolved ? Promise.resolve(this.resolved) - : Promise.reject(this.notImplementedError) - } - - packument () { - return Promise.reject(this.notImplementedError) - } - - // override in child class - // returns a manifest containing: - // - name - // - version - // - _resolved - // - _integrity - // - plus whatever else was in there (corgi, full metadata, or pj file) - manifest () { - return Promise.reject(this.notImplementedError) - } - - // private, should be overridden. - // Note that they should *not* calculate or check integrity or cache, - // but *just* return the raw tarball data stream. - [_tarballFromResolved] () { - throw this.notImplementedError - } - - // public, should not be overridden - tarball () { - return this.tarballStream(stream => stream.concat().then(data => { - data.integrity = this.integrity && String(this.integrity) - data.resolved = this.resolved - data.from = this.from - return data - })) - } - - // private - // Note: cacache will raise a EINTEGRITY error if the integrity doesn't match - [_tarballFromCache] () { - return cacache.get.stream.byDigest(this.cache, this.integrity, this.opts) - } - - get [_cacheFetches] () { - return true - } - - [_istream] (stream) { - // if not caching this, just return it - if (!this.opts.cache || !this[_cacheFetches]) { - // instead of creating a new integrity stream, we only piggyback on the - // provided stream's events - if (stream.hasIntegrityEmitter) { - stream.on('integrity', i => this.integrity = i) - return stream - } - - const istream = ssri.integrityStream(this.opts) - istream.on('integrity', i => this.integrity = i) - stream.on('error', err => istream.emit('error', err)) - return stream.pipe(istream) - } - - // we have to return a stream that gets ALL the data, and proxies errors, - // but then pipe from the original tarball stream into the cache as well. - // To do this without losing any data, and since the cacache put stream - // is not a passthrough, we have to pipe from the original stream into - // the cache AFTER we pipe into the middleStream. Since the cache stream - // has an asynchronous flush to write its contents to disk, we need to - // defer the middleStream end until the cache stream ends. - const middleStream = new Minipass() - stream.on('error', err => middleStream.emit('error', err)) - stream.pipe(middleStream, { end: false }) - const cstream = cacache.put.stream( - this.opts.cache, - `pacote:tarball:${this.from}`, - this.opts - ) - cstream.on('integrity', i => this.integrity = i) - cstream.on('error', err => stream.emit('error', err)) - stream.pipe(cstream) - - // eslint-disable-next-line promise/catch-or-return - cstream.promise().catch(() => {}).then(() => middleStream.end()) - return middleStream - } - - pickIntegrityAlgorithm () { - return this.integrity ? this.integrity.pickAlgorithm(this.opts) - : this.defaultIntegrityAlgorithm - } - - // TODO: check error class, once those are rolled out to our deps - isDataCorruptionError (er) { - return er.code === 'EINTEGRITY' || er.code === 'Z_DATA_ERROR' - } - - // override the types getter - get types () { - return false - } - - [_assertType] () { - if (this.types && !this.types.includes(this.spec.type)) { - throw new TypeError(`Wrong spec type (${ - this.spec.type - }) for ${ - this.constructor.name - }. Supported types: ${this.types.join(', ')}`) - } - } - - // We allow ENOENTs from cacache, but not anywhere else. - // An ENOENT trying to read a tgz file, for example, is Right Out. - isRetriableError (er) { - // TODO: check error class, once those are rolled out to our deps - return this.isDataCorruptionError(er) || - er.code === 'ENOENT' || - er.code === 'EISDIR' - } - - // Mostly internal, but has some uses - // Pass in a function which returns a promise - // Function will be called 1 or more times with streams that may fail. - // Retries: - // Function MUST handle errors on the stream by rejecting the promise, - // so that retry logic can pick it up and either retry or fail whatever - // promise it was making (ie, failing extraction, etc.) - // - // The return value of this method is a Promise that resolves the same - // as whatever the streamHandler resolves to. - // - // This should never be overridden by child classes, but it is public. - tarballStream (streamHandler) { - // Only short-circuit via cache if we have everything else we'll need, - // and the user has not expressed a preference for checking online. - - const fromCache = ( - !this.preferOnline && - this.integrity && - this.resolved - ) ? streamHandler(this[_tarballFromCache]()).catch(er => { - if (this.isDataCorruptionError(er)) { - log.warn('tarball', `cached data for ${ - this.spec - } (${this.integrity}) seems to be corrupted. Refreshing cache.`) - return this.cleanupCached().then(() => { - throw er - }) - } else { - throw er - } - }) : null - - const fromResolved = er => { - if (er) { - if (!this.isRetriableError(er)) { - throw er - } - log.silly('tarball', `no local data for ${ - this.spec - }. Extracting by manifest.`) - } - return this.resolve().then(() => retry(tryAgain => - streamHandler(this[_istream](this[_tarballFromResolved]())) - .catch(streamErr => { - // Most likely data integrity. A cache ENOENT error is unlikely - // here, since we're definitely not reading from the cache, but it - // IS possible that the fetch subsystem accessed the cache, and the - // entry got blown away or something. Try one more time to be sure. - if (this.isRetriableError(streamErr)) { - log.warn('tarball', `tarball data for ${ - this.spec - } (${this.integrity}) seems to be corrupted. Trying again.`) - return this.cleanupCached().then(() => tryAgain(streamErr)) - } - throw streamErr - }), { retries: 1, minTimeout: 0, maxTimeout: 0 })) - } - - return fromCache ? fromCache.catch(fromResolved) : fromResolved() - } - - cleanupCached () { - return cacache.rm.content(this.cache, this.integrity, this.opts) - } - - [_empty] (path) { - return getContents({ path, depth: 1 }).then(contents => Promise.all( - contents.map(entry => fs.rm(entry, { recursive: true, force: true })))) - } - - async [_mkdir] (dest) { - await this[_empty](dest) - return await fs.mkdir(dest, { recursive: true }) - } - - // extraction is always the same. the only difference is where - // the tarball comes from. - async extract (dest) { - await this[_mkdir](dest) - return this.tarballStream((tarball) => this[_extract](dest, tarball)) - } - - [_toFile] (dest) { - return this.tarballStream(str => new Promise((res, rej) => { - const writer = new fsm.WriteStream(dest) - str.on('error', er => writer.emit('error', er)) - writer.on('error', er => rej(er)) - writer.on('close', () => res({ - integrity: this.integrity && String(this.integrity), - resolved: this.resolved, - from: this.from, - })) - str.pipe(writer) - })) - } - - // don't use this[_mkdir] because we don't want to rimraf anything - async tarballFile (dest) { - const dir = dirname(dest) - await fs.mkdir(dir, { recursive: true }) - return this[_toFile](dest) - } - - [_extract] (dest, tarball) { - const extractor = tar.x(this[_tarxOptions]({ cwd: dest })) - const p = new Promise((resolve, reject) => { - extractor.on('end', () => { - resolve({ - resolved: this.resolved, - integrity: this.integrity && String(this.integrity), - from: this.from, - }) - }) - - extractor.on('error', er => { - log.warn('tar', er.message) - log.silly('tar', er) - reject(er) - }) - - tarball.on('error', er => reject(er)) - }) - - tarball.pipe(extractor) - return p - } - - // always ensure that entries are at least as permissive as our configured - // dmode/fmode, but never more permissive than the umask allows. - [_entryMode] (path, mode, type) { - const m = /Directory|GNUDumpDir/.test(type) ? this.dmode - : /File$/.test(type) ? this.fmode - : /* istanbul ignore next - should never happen in a pkg */ 0 - - // make sure package bins are executable - const exe = isPackageBin(this.package, path) ? 0o111 : 0 - // always ensure that files are read/writable by the owner - return ((mode | m) & ~this.umask) | exe | 0o600 - } - - [_tarxOptions] ({ cwd }) { - const sawIgnores = new Set() - return { - cwd, - noChmod: true, - noMtime: true, - filter: (name, entry) => { - if (/Link$/.test(entry.type)) { - return false - } - entry.mode = this[_entryMode](entry.path, entry.mode, entry.type) - // this replicates the npm pack behavior where .gitignore files - // are treated like .npmignore files, but only if a .npmignore - // file is not present. - if (/File$/.test(entry.type)) { - const base = basename(entry.path) - if (base === '.npmignore') { - sawIgnores.add(entry.path) - } else if (base === '.gitignore' && !this.allowGitIgnore) { - // rename, but only if there's not already a .npmignore - const ni = entry.path.replace(/\.gitignore$/, '.npmignore') - if (sawIgnores.has(ni)) { - return false - } - entry.path = ni - } - return true - } - }, - strip: 1, - onwarn: /* istanbul ignore next - we can trust that tar logs */ - (code, msg, data) => { - log.warn('tar', code, msg) - log.silly('tar', code, msg, data) - }, - umask: this.umask, - // always ignore ownership info from tarball metadata - preserveOwner: false, - } - } -} - -module.exports = FetcherBase - -// Child classes -const GitFetcher = require('./git.js') -const RegistryFetcher = require('./registry.js') -const FileFetcher = require('./file.js') -const DirFetcher = require('./dir.js') -const RemoteFetcher = require('./remote.js') - -// Get an appropriate fetcher object from a spec and options -FetcherBase.get = (rawSpec, opts = {}) => { - const spec = npa(rawSpec, opts.where) - switch (spec.type) { - case 'git': - return new GitFetcher(spec, opts) - - case 'remote': - return new RemoteFetcher(spec, opts) - - case 'version': - case 'range': - case 'tag': - case 'alias': - return new RegistryFetcher(spec.subSpec || spec, opts) - - case 'file': - return new FileFetcher(spec, opts) - - case 'directory': - return new DirFetcher(spec, opts) - - default: - throw new TypeError('Unknown spec type: ' + spec.type) - } -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js deleted file mode 100644 index bf99bb86e359e..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js +++ /dev/null @@ -1,96 +0,0 @@ -const Fetcher = require('./fetcher.js') -const fsm = require('fs-minipass') -const cacache = require('cacache') -const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') -const _exeBins = Symbol('_exeBins') -const { resolve } = require('path') -const fs = require('fs') -const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson') - -class FileFetcher extends Fetcher { - constructor (spec, opts) { - super(spec, opts) - // just the fully resolved filename - this.resolved = this.spec.fetchSpec - } - - get types () { - return ['file'] - } - - manifest () { - if (this.package) { - return Promise.resolve(this.package) - } - - // have to unpack the tarball for this. - return cacache.tmp.withTmp(this.cache, this.opts, dir => - this.extract(dir) - .then(() => this[_readPackageJson](dir + '/package.json')) - .then(mani => this.package = { - ...mani, - _integrity: this.integrity && String(this.integrity), - _resolved: this.resolved, - _from: this.from, - })) - } - - [_exeBins] (pkg, dest) { - if (!pkg.bin) { - return Promise.resolve() - } - - return Promise.all(Object.keys(pkg.bin).map(k => new Promise(res => { - const script = resolve(dest, pkg.bin[k]) - // Best effort. Ignore errors here, the only result is that - // a bin script is not executable. But if it's missing or - // something, we just leave it for a later stage to trip over - // when we can provide a more useful contextual error. - fs.stat(script, (er, st) => { - if (er) { - return res() - } - const mode = st.mode | 0o111 - if (mode === st.mode) { - return res() - } - fs.chmod(script, mode, res) - }) - }))) - } - - extract (dest) { - // if we've already loaded the manifest, then the super got it. - // but if not, read the unpacked manifest and chmod properly. - return super.extract(dest) - .then(result => this.package ? result - : this[_readPackageJson](dest + '/package.json').then(pkg => - this[_exeBins](pkg, dest)).then(() => result)) - } - - [_tarballFromResolved] () { - // create a read stream and return it - return new fsm.ReadStream(this.resolved) - } - - packument () { - // simulate based on manifest - return this.manifest().then(mani => ({ - name: mani.name, - 'dist-tags': { - [this.defaultTag]: mani.version, - }, - versions: { - [mani.version]: { - ...mani, - dist: { - tarball: `file:${this.resolved}`, - integrity: this.integrity && String(this.integrity), - }, - }, - }, - })) - } -} - -module.exports = FileFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js deleted file mode 100644 index 5d24f72497ec9..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js +++ /dev/null @@ -1,327 +0,0 @@ -const Fetcher = require('./fetcher.js') -const FileFetcher = require('./file.js') -const RemoteFetcher = require('./remote.js') -const DirFetcher = require('./dir.js') -const hashre = /^[a-f0-9]{40}$/ -const git = require('@npmcli/git') -const pickManifest = require('npm-pick-manifest') -const npa = require('npm-package-arg') -const { Minipass } = require('minipass') -const cacache = require('cacache') -const log = require('proc-log') -const npm = require('./util/npm.js') - -const _resolvedFromRepo = Symbol('_resolvedFromRepo') -const _resolvedFromHosted = Symbol('_resolvedFromHosted') -const _resolvedFromClone = Symbol('_resolvedFromClone') -const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') -const _addGitSha = Symbol('_addGitSha') -const addGitSha = require('./util/add-git-sha.js') -const _clone = Symbol('_clone') -const _cloneHosted = Symbol('_cloneHosted') -const _cloneRepo = Symbol('_cloneRepo') -const _setResolvedWithSha = Symbol('_setResolvedWithSha') -const _prepareDir = Symbol('_prepareDir') -const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson') - -// get the repository url. -// prefer https if there's auth, since ssh will drop that. -// otherwise, prefer ssh if available (more secure). -// We have to add the git+ back because npa suppresses it. -const repoUrl = (h, opts) => - h.sshurl && !(h.https && h.auth) && addGitPlus(h.sshurl(opts)) || - h.https && addGitPlus(h.https(opts)) - -// add git+ to the url, but only one time. -const addGitPlus = url => url && `git+${url}`.replace(/^(git\+)+/, 'git+') - -class GitFetcher extends Fetcher { - constructor (spec, opts) { - super(spec, opts) - - // we never want to compare integrity for git dependencies: npm/rfcs#525 - if (this.opts.integrity) { - delete this.opts.integrity - log.warn(`skipping integrity check for git dependency ${this.spec.fetchSpec}`) - } - - this.resolvedRef = null - if (this.spec.hosted) { - this.from = this.spec.hosted.shortcut({ noCommittish: false }) - } - - // shortcut: avoid full clone when we can go straight to the tgz - // if we have the full sha and it's a hosted git platform - if (this.spec.gitCommittish && hashre.test(this.spec.gitCommittish)) { - this.resolvedSha = this.spec.gitCommittish - // use hosted.tarball() when we shell to RemoteFetcher later - this.resolved = this.spec.hosted - ? repoUrl(this.spec.hosted, { noCommittish: false }) - : this.spec.rawSpec - } else { - this.resolvedSha = '' - } - - this.Arborist = opts.Arborist || null - } - - // just exposed to make it easier to test all the combinations - static repoUrl (hosted, opts) { - return repoUrl(hosted, opts) - } - - get types () { - return ['git'] - } - - resolve () { - // likely a hosted git repo with a sha, so get the tarball url - // but in general, no reason to resolve() more than necessary! - if (this.resolved) { - return super.resolve() - } - - // fetch the git repo and then look at the current hash - const h = this.spec.hosted - // try to use ssh, fall back to git. - return h ? this[_resolvedFromHosted](h) - : this[_resolvedFromRepo](this.spec.fetchSpec) - } - - // first try https, since that's faster and passphrase-less for - // public repos, and supports private repos when auth is provided. - // Fall back to SSH to support private repos - // NB: we always store the https url in resolved field if auth - // is present, otherwise ssh if the hosted type provides it - [_resolvedFromHosted] (hosted) { - return this[_resolvedFromRepo](hosted.https && hosted.https()) - .catch(er => { - // Throw early since we know pathspec errors will fail again if retried - if (er instanceof git.errors.GitPathspecError) { - throw er - } - const ssh = hosted.sshurl && hosted.sshurl() - // no fallthrough if we can't fall through or have https auth - if (!ssh || hosted.auth) { - throw er - } - return this[_resolvedFromRepo](ssh) - }) - } - - [_resolvedFromRepo] (gitRemote) { - // XXX make this a custom error class - if (!gitRemote) { - return Promise.reject(new Error(`No git url for ${this.spec}`)) - } - const gitRange = this.spec.gitRange - const name = this.spec.name - return git.revs(gitRemote, this.opts).then(remoteRefs => { - return gitRange ? pickManifest({ - versions: remoteRefs.versions, - 'dist-tags': remoteRefs['dist-tags'], - name, - }, gitRange, this.opts) - : this.spec.gitCommittish ? - remoteRefs.refs[this.spec.gitCommittish] || - remoteRefs.refs[remoteRefs.shas[this.spec.gitCommittish]] - : remoteRefs.refs.HEAD // no git committish, get default head - }).then(revDoc => { - // the committish provided isn't in the rev list - // things like HEAD~3 or @yesterday can land here. - if (!revDoc || !revDoc.sha) { - return this[_resolvedFromClone]() - } - - this.resolvedRef = revDoc - this.resolvedSha = revDoc.sha - this[_addGitSha](revDoc.sha) - return this.resolved - }) - } - - [_setResolvedWithSha] (withSha) { - // we haven't cloned, so a tgz download is still faster - // of course, if it's not a known host, we can't do that. - this.resolved = !this.spec.hosted ? withSha - : repoUrl(npa(withSha).hosted, { noCommittish: false }) - } - - // when we get the git sha, we affix it to our spec to build up - // either a git url with a hash, or a tarball download URL - [_addGitSha] (sha) { - this[_setResolvedWithSha](addGitSha(this.spec, sha)) - } - - [_resolvedFromClone] () { - // do a full or shallow clone, then look at the HEAD - // kind of wasteful, but no other option, really - return this[_clone](dir => this.resolved) - } - - [_prepareDir] (dir) { - return this[_readPackageJson](dir + '/package.json').then(mani => { - // no need if we aren't going to do any preparation. - const scripts = mani.scripts - if (!mani.workspaces && (!scripts || !( - scripts.postinstall || - scripts.build || - scripts.preinstall || - scripts.install || - scripts.prepack || - scripts.prepare))) { - return - } - - // to avoid cases where we have an cycle of git deps that depend - // on one another, we only ever do preparation for one instance - // of a given git dep along the chain of installations. - // Note that this does mean that a dependency MAY in theory end up - // trying to run its prepare script using a dependency that has not - // been properly prepared itself, but that edge case is smaller - // and less hazardous than a fork bomb of npm and git commands. - const noPrepare = !process.env._PACOTE_NO_PREPARE_ ? [] - : process.env._PACOTE_NO_PREPARE_.split('\n') - if (noPrepare.includes(this.resolved)) { - log.info('prepare', 'skip prepare, already seen', this.resolved) - return - } - noPrepare.push(this.resolved) - - // the DirFetcher will do its own preparation to run the prepare scripts - // All we have to do is put the deps in place so that it can succeed. - return npm( - this.npmBin, - [].concat(this.npmInstallCmd).concat(this.npmCliConfig), - dir, - { ...process.env, _PACOTE_NO_PREPARE_: noPrepare.join('\n') }, - { message: 'git dep preparation failed' } - ) - }) - } - - [_tarballFromResolved] () { - const stream = new Minipass() - stream.resolved = this.resolved - stream.from = this.from - - // check it out and then shell out to the DirFetcher tarball packer - this[_clone](dir => this[_prepareDir](dir) - .then(() => new Promise((res, rej) => { - if (!this.Arborist) { - throw new Error('GitFetcher requires an Arborist constructor to pack a tarball') - } - const df = new DirFetcher(`file:${dir}`, { - ...this.opts, - Arborist: this.Arborist, - resolved: null, - integrity: null, - }) - const dirStream = df[_tarballFromResolved]() - dirStream.on('error', rej) - dirStream.on('end', res) - dirStream.pipe(stream) - }))).catch( - /* istanbul ignore next: very unlikely and hard to test */ - er => stream.emit('error', er) - ) - return stream - } - - // clone a git repo into a temp folder (or fetch and unpack if possible) - // handler accepts a directory, and returns a promise that resolves - // when we're done with it, at which point, cacache deletes it - // - // TODO: after cloning, create a tarball of the folder, and add to the cache - // with cacache.put.stream(), using a key that's deterministic based on the - // spec and repo, so that we don't ever clone the same thing multiple times. - [_clone] (handler, tarballOk = true) { - const o = { tmpPrefix: 'git-clone' } - const ref = this.resolvedSha || this.spec.gitCommittish - const h = this.spec.hosted - const resolved = this.resolved - - // can be set manually to false to fall back to actual git clone - tarballOk = tarballOk && - h && resolved === repoUrl(h, { noCommittish: false }) && h.tarball - - return cacache.tmp.withTmp(this.cache, o, async tmp => { - // if we're resolved, and have a tarball url, shell out to RemoteFetcher - if (tarballOk) { - const nameat = this.spec.name ? `${this.spec.name}@` : '' - return new RemoteFetcher(h.tarball({ noCommittish: false }), { - ...this.opts, - allowGitIgnore: true, - pkgid: `git:${nameat}${this.resolved}`, - resolved: this.resolved, - integrity: null, // it'll always be different, if we have one - }).extract(tmp).then(() => handler(tmp), er => { - // fall back to ssh download if tarball fails - if (er.constructor.name.match(/^Http/)) { - return this[_clone](handler, false) - } else { - throw er - } - }) - } - - const sha = await ( - h ? this[_cloneHosted](ref, tmp) - : this[_cloneRepo](this.spec.fetchSpec, ref, tmp) - ) - this.resolvedSha = sha - if (!this.resolved) { - await this[_addGitSha](sha) - } - return handler(tmp) - }) - } - - // first try https, since that's faster and passphrase-less for - // public repos, and supports private repos when auth is provided. - // Fall back to SSH to support private repos - // NB: we always store the https url in resolved field if auth - // is present, otherwise ssh if the hosted type provides it - [_cloneHosted] (ref, tmp) { - const hosted = this.spec.hosted - return this[_cloneRepo](hosted.https({ noCommittish: true }), ref, tmp) - .catch(er => { - // Throw early since we know pathspec errors will fail again if retried - if (er instanceof git.errors.GitPathspecError) { - throw er - } - const ssh = hosted.sshurl && hosted.sshurl({ noCommittish: true }) - // no fallthrough if we can't fall through or have https auth - if (!ssh || hosted.auth) { - throw er - } - return this[_cloneRepo](ssh, ref, tmp) - }) - } - - [_cloneRepo] (repo, ref, tmp) { - const { opts, spec } = this - return git.clone(repo, ref, tmp, { ...opts, spec }) - } - - manifest () { - if (this.package) { - return Promise.resolve(this.package) - } - - return this.spec.hosted && this.resolved - ? FileFetcher.prototype.manifest.apply(this) - : this[_clone](dir => - this[_readPackageJson](dir + '/package.json') - .then(mani => this.package = { - ...mani, - _resolved: this.resolved, - _from: this.from, - })) - } - - packument () { - return FileFetcher.prototype.packument.apply(this) - } -} -module.exports = GitFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js deleted file mode 100644 index cbcbd7c92d15f..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js +++ /dev/null @@ -1,23 +0,0 @@ -const { get } = require('./fetcher.js') -const GitFetcher = require('./git.js') -const RegistryFetcher = require('./registry.js') -const FileFetcher = require('./file.js') -const DirFetcher = require('./dir.js') -const RemoteFetcher = require('./remote.js') - -module.exports = { - GitFetcher, - RegistryFetcher, - FileFetcher, - DirFetcher, - RemoteFetcher, - resolve: (spec, opts) => get(spec, opts).resolve(), - extract: (spec, dest, opts) => get(spec, opts).extract(dest), - manifest: (spec, opts) => get(spec, opts).manifest(), - tarball: (spec, opts) => get(spec, opts).tarball(), - packument: (spec, opts) => get(spec, opts).packument(), -} -module.exports.tarball.stream = (spec, handler, opts) => - get(spec, opts).tarballStream(handler) -module.exports.tarball.file = (spec, dest, opts) => - get(spec, opts).tarballFile(dest) diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js deleted file mode 100644 index 34d9b2b87f3f3..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js +++ /dev/null @@ -1,344 +0,0 @@ -const Fetcher = require('./fetcher.js') -const RemoteFetcher = require('./remote.js') -const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') -const pacoteVersion = require('../package.json').version -const removeTrailingSlashes = require('./util/trailing-slashes.js') -const rpj = require('read-package-json-fast') -const pickManifest = require('npm-pick-manifest') -const ssri = require('ssri') -const crypto = require('crypto') -const npa = require('npm-package-arg') -const { sigstore } = require('sigstore') - -// Corgis are cute. 🐕🐶 -const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*' -const fullDoc = 'application/json' - -const fetch = require('npm-registry-fetch') - -const _headers = Symbol('_headers') -class RegistryFetcher extends Fetcher { - constructor (spec, opts) { - super(spec, opts) - - // you usually don't want to fetch the same packument multiple times in - // the span of a given script or command, no matter how many pacote calls - // are made, so this lets us avoid doing that. It's only relevant for - // registry fetchers, because other types simulate their packument from - // the manifest, which they memoize on this.package, so it's very cheap - // already. - this.packumentCache = this.opts.packumentCache || null - - this.registry = fetch.pickRegistry(spec, opts) - this.packumentUrl = removeTrailingSlashes(this.registry) + '/' + - this.spec.escapedName - - const parsed = new URL(this.registry) - const regKey = `//${parsed.host}${parsed.pathname}` - // unlike the nerf-darted auth keys, this one does *not* allow a mismatch - // of trailing slashes. It must match exactly. - if (this.opts[`${regKey}:_keys`]) { - this.registryKeys = this.opts[`${regKey}:_keys`] - } - - // XXX pacote <=9 has some logic to ignore opts.resolved if - // the resolved URL doesn't go to the same registry. - // Consider reproducing that here, to throw away this.resolved - // in that case. - } - - async resolve () { - // fetching the manifest sets resolved and (if present) integrity - await this.manifest() - if (!this.resolved) { - throw Object.assign( - new Error('Invalid package manifest: no `dist.tarball` field'), - { package: this.spec.toString() } - ) - } - return this.resolved - } - - [_headers] () { - return { - // npm will override UA, but ensure that we always send *something* - 'user-agent': this.opts.userAgent || - `pacote/${pacoteVersion} node/${process.version}`, - ...(this.opts.headers || {}), - 'pacote-version': pacoteVersion, - 'pacote-req-type': 'packument', - 'pacote-pkg-id': `registry:${this.spec.name}`, - accept: this.fullMetadata ? fullDoc : corgiDoc, - } - } - - async packument () { - // note this might be either an in-flight promise for a request, - // or the actual packument, but we never want to make more than - // one request at a time for the same thing regardless. - if (this.packumentCache && this.packumentCache.has(this.packumentUrl)) { - return this.packumentCache.get(this.packumentUrl) - } - - // npm-registry-fetch the packument - // set the appropriate header for corgis if fullMetadata isn't set - // return the res.json() promise - try { - const res = await fetch(this.packumentUrl, { - ...this.opts, - headers: this[_headers](), - spec: this.spec, - // never check integrity for packuments themselves - integrity: null, - }) - const packument = await res.json() - packument._contentLength = +res.headers.get('content-length') - if (this.packumentCache) { - this.packumentCache.set(this.packumentUrl, packument) - } - return packument - } catch (err) { - if (this.packumentCache) { - this.packumentCache.delete(this.packumentUrl) - } - if (err.code !== 'E404' || this.fullMetadata) { - throw err - } - // possible that corgis are not supported by this registry - this.fullMetadata = true - return this.packument() - } - } - - async manifest () { - if (this.package) { - return this.package - } - - const packument = await this.packument() - let mani = await pickManifest(packument, this.spec.fetchSpec, { - ...this.opts, - defaultTag: this.defaultTag, - before: this.before, - }) - mani = rpj.normalize(mani) - /* XXX add ETARGET and E403 revalidation of cached packuments here */ - - // add _resolved and _integrity from dist object - const { dist } = mani - if (dist) { - this.resolved = mani._resolved = dist.tarball - mani._from = this.from - const distIntegrity = dist.integrity ? ssri.parse(dist.integrity) - : dist.shasum ? ssri.fromHex(dist.shasum, 'sha1', { ...this.opts }) - : null - if (distIntegrity) { - if (this.integrity && !this.integrity.match(distIntegrity)) { - // only bork if they have algos in common. - // otherwise we end up breaking if we have saved a sha512 - // previously for the tarball, but the manifest only - // provides a sha1, which is possible for older publishes. - // Otherwise, this is almost certainly a case of holding it - // wrong, and will result in weird or insecure behavior - // later on when building package tree. - for (const algo of Object.keys(this.integrity)) { - if (distIntegrity[algo]) { - throw Object.assign(new Error( - `Integrity checksum failed when using ${algo}: ` + - `wanted ${this.integrity} but got ${distIntegrity}.` - ), { code: 'EINTEGRITY' }) - } - } - } - // made it this far, the integrity is worthwhile. accept it. - // the setter here will take care of merging it into what we already - // had. - this.integrity = distIntegrity - } - } - if (this.integrity) { - mani._integrity = String(this.integrity) - if (dist.signatures) { - if (this.opts.verifySignatures) { - // validate and throw on error, then set _signatures - const message = `${mani._id}:${mani._integrity}` - for (const signature of dist.signatures) { - const publicKey = this.registryKeys && - this.registryKeys.filter(key => (key.keyid === signature.keyid))[0] - if (!publicKey) { - throw Object.assign(new Error( - `${mani._id} has a registry signature with keyid: ${signature.keyid} ` + - 'but no corresponding public key can be found' - ), { code: 'EMISSINGSIGNATUREKEY' }) - } - const validPublicKey = - !publicKey.expires || (Date.parse(publicKey.expires) > Date.now()) - if (!validPublicKey) { - throw Object.assign(new Error( - `${mani._id} has a registry signature with keyid: ${signature.keyid} ` + - `but the corresponding public key has expired ${publicKey.expires}` - ), { code: 'EEXPIREDSIGNATUREKEY' }) - } - const verifier = crypto.createVerify('SHA256') - verifier.write(message) - verifier.end() - const valid = verifier.verify( - publicKey.pemkey, - signature.sig, - 'base64' - ) - if (!valid) { - throw Object.assign(new Error( - `${mani._id} has an invalid registry signature with ` + - `keyid: ${publicKey.keyid} and signature: ${signature.sig}` - ), { - code: 'EINTEGRITYSIGNATURE', - keyid: publicKey.keyid, - signature: signature.sig, - resolved: mani._resolved, - integrity: mani._integrity, - }) - } - } - mani._signatures = dist.signatures - } else { - mani._signatures = dist.signatures - } - } - - if (dist.attestations) { - if (this.opts.verifyAttestations) { - // Always fetch attestations from the current registry host - const attestationsPath = new URL(dist.attestations.url).pathname - const attestationsUrl = removeTrailingSlashes(this.registry) + attestationsPath - const res = await fetch(attestationsUrl, { - ...this.opts, - // disable integrity check for attestations json payload, we check the - // integrity in the verification steps below - integrity: null, - }) - const { attestations } = await res.json() - const bundles = attestations.map(({ predicateType, bundle }) => { - const statement = JSON.parse( - Buffer.from(bundle.dsseEnvelope.payload, 'base64').toString('utf8') - ) - const keyid = bundle.dsseEnvelope.signatures[0].keyid - const signature = bundle.dsseEnvelope.signatures[0].sig - - return { - predicateType, - bundle, - statement, - keyid, - signature, - } - }) - - const attestationKeyIds = bundles.map((b) => b.keyid).filter((k) => !!k) - const attestationRegistryKeys = (this.registryKeys || []) - .filter(key => attestationKeyIds.includes(key.keyid)) - if (!attestationRegistryKeys.length) { - throw Object.assign(new Error( - `${mani._id} has attestations but no corresponding public key(s) can be found` - ), { code: 'EMISSINGSIGNATUREKEY' }) - } - - for (const { predicateType, bundle, keyid, signature, statement } of bundles) { - const publicKey = attestationRegistryKeys.find(key => key.keyid === keyid) - // Publish attestations have a keyid set and a valid public key must be found - if (keyid) { - if (!publicKey) { - throw Object.assign(new Error( - `${mani._id} has attestations with keyid: ${keyid} ` + - 'but no corresponding public key can be found' - ), { code: 'EMISSINGSIGNATUREKEY' }) - } - - const validPublicKey = - !publicKey.expires || (Date.parse(publicKey.expires) > Date.now()) - if (!validPublicKey) { - throw Object.assign(new Error( - `${mani._id} has attestations with keyid: ${keyid} ` + - `but the corresponding public key has expired ${publicKey.expires}` - ), { code: 'EEXPIREDSIGNATUREKEY' }) - } - } - - const subject = { - name: statement.subject[0].name, - sha512: statement.subject[0].digest.sha512, - } - - // Only type 'version' can be turned into a PURL - const purl = this.spec.type === 'version' ? npa.toPurl(this.spec) : this.spec - // Verify the statement subject matches the package, version - if (subject.name !== purl) { - throw Object.assign(new Error( - `${mani._id} package name and version (PURL): ${purl} ` + - `doesn't match what was signed: ${subject.name}` - ), { code: 'EATTESTATIONSUBJECT' }) - } - - // Verify the statement subject matches the tarball integrity - const integrityHexDigest = ssri.parse(this.integrity).hexDigest() - if (subject.sha512 !== integrityHexDigest) { - throw Object.assign(new Error( - `${mani._id} package integrity (hex digest): ` + - `${integrityHexDigest} ` + - `doesn't match what was signed: ${subject.sha512}` - ), { code: 'EATTESTATIONSUBJECT' }) - } - - try { - // Provenance attestations are signed with a signing certificate - // (including the key) so we don't need to return a public key. - // - // Publish attestations are signed with a keyid so we need to - // specify a public key from the keys endpoint: `registry-host.tld/-/npm/v1/keys` - const options = { - tufCachePath: this.tufCache, - keySelector: publicKey ? () => publicKey.pemkey : undefined, - } - await sigstore.verify(bundle, null, options) - } catch (e) { - throw Object.assign(new Error( - `${mani._id} failed to verify attestation: ${e.message}` - ), { - code: 'EATTESTATIONVERIFY', - predicateType, - keyid, - signature, - resolved: mani._resolved, - integrity: mani._integrity, - }) - } - } - mani._attestations = dist.attestations - } else { - mani._attestations = dist.attestations - } - } - } - - this.package = mani - return this.package - } - - [_tarballFromResolved] () { - // we use a RemoteFetcher to get the actual tarball stream - return new RemoteFetcher(this.resolved, { - ...this.opts, - resolved: this.resolved, - pkgid: `registry:${this.spec.name}@${this.resolved}`, - })[_tarballFromResolved]() - } - - get types () { - return [ - 'tag', - 'version', - 'range', - ] - } -} -module.exports = RegistryFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js deleted file mode 100644 index fd617459fb031..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js +++ /dev/null @@ -1,91 +0,0 @@ -const Fetcher = require('./fetcher.js') -const FileFetcher = require('./file.js') -const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') -const pacoteVersion = require('../package.json').version -const fetch = require('npm-registry-fetch') -const { Minipass } = require('minipass') - -const _cacheFetches = Symbol.for('pacote.Fetcher._cacheFetches') -const _headers = Symbol('_headers') -class RemoteFetcher extends Fetcher { - constructor (spec, opts) { - super(spec, opts) - this.resolved = this.spec.fetchSpec - const resolvedURL = new URL(this.resolved) - if (this.replaceRegistryHost !== 'never' - && (this.replaceRegistryHost === 'always' - || this.replaceRegistryHost === resolvedURL.host)) { - this.resolved = new URL(resolvedURL.pathname, this.registry).href - } - - // nam is a fermented pork sausage that is good to eat - const nameat = this.spec.name ? `${this.spec.name}@` : '' - this.pkgid = opts.pkgid ? opts.pkgid : `remote:${nameat}${this.resolved}` - } - - // Don't need to cache tarball fetches in pacote, because make-fetch-happen - // will write into cacache anyway. - get [_cacheFetches] () { - return false - } - - [_tarballFromResolved] () { - const stream = new Minipass() - stream.hasIntegrityEmitter = true - - const fetchOpts = { - ...this.opts, - headers: this[_headers](), - spec: this.spec, - integrity: this.integrity, - algorithms: [this.pickIntegrityAlgorithm()], - } - - // eslint-disable-next-line promise/always-return - fetch(this.resolved, fetchOpts).then(res => { - res.body.on('error', - /* istanbul ignore next - exceedingly rare and hard to simulate */ - er => stream.emit('error', er) - ) - - res.body.on('integrity', i => { - this.integrity = i - stream.emit('integrity', i) - }) - - res.body.pipe(stream) - }).catch(er => stream.emit('error', er)) - - return stream - } - - [_headers] () { - return { - // npm will override this, but ensure that we always send *something* - 'user-agent': this.opts.userAgent || - `pacote/${pacoteVersion} node/${process.version}`, - ...(this.opts.headers || {}), - 'pacote-version': pacoteVersion, - 'pacote-req-type': 'tarball', - 'pacote-pkg-id': this.pkgid, - ...(this.integrity ? { 'pacote-integrity': String(this.integrity) } - : {}), - ...(this.opts.headers || {}), - } - } - - get types () { - return ['remote'] - } - - // getting a packument and/or manifest is the same as with a file: spec. - // unpack the tarball stream, and then read from the package.json file. - packument () { - return FileFetcher.prototype.packument.apply(this) - } - - manifest () { - return FileFetcher.prototype.manifest.apply(this) - } -} -module.exports = RemoteFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js deleted file mode 100644 index 843fe5b600caf..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js +++ /dev/null @@ -1,15 +0,0 @@ -// add a sha to a git remote url spec -const addGitSha = (spec, sha) => { - if (spec.hosted) { - const h = spec.hosted - const opt = { noCommittish: true } - const base = h.https && h.auth ? h.https(opt) : h.shortcut(opt) - - return `${base}#${sha}` - } else { - // don't use new URL for this, because it doesn't handle scp urls - return spec.rawSpec.replace(/#.*$/, '') + `#${sha}` - } -} - -module.exports = addGitSha diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js deleted file mode 100644 index ac83b1793f199..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js +++ /dev/null @@ -1,15 +0,0 @@ -const os = require('os') -const { resolve } = require('path') - -module.exports = (fakePlatform = false) => { - const temp = os.tmpdir() - const uidOrPid = process.getuid ? process.getuid() : process.pid - const home = os.homedir() || resolve(temp, 'npm-' + uidOrPid) - const platform = fakePlatform || process.platform - const cacheExtra = platform === 'win32' ? 'npm-cache' : '.npm' - const cacheRoot = (platform === 'win32' && process.env.LOCALAPPDATA) || home - return { - cacache: resolve(cacheRoot, cacheExtra, '_cacache'), - tufcache: resolve(cacheRoot, cacheExtra, '_tuf'), - } -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js deleted file mode 100644 index 49a3f73f537ce..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js +++ /dev/null @@ -1,25 +0,0 @@ -// Function to determine whether a path is in the package.bin set. -// Used to prevent issues when people publish a package from a -// windows machine, and then install with --no-bin-links. -// -// Note: this is not possible in remote or file fetchers, since -// we don't have the manifest until AFTER we've unpacked. But the -// main use case is registry fetching with git a distant second, -// so that's an acceptable edge case to not handle. - -const binObj = (name, bin) => - typeof bin === 'string' ? { [name]: bin } : bin - -const hasBin = (pkg, path) => { - const bin = binObj(pkg.name, pkg.bin) - const p = path.replace(/^[^\\/]*\//, '') - for (const kv of Object.entries(bin)) { - if (kv[1] === p) { - return true - } - } - return false -} - -module.exports = (pkg, path) => - pkg && pkg.bin ? hasBin(pkg, path) : false diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js deleted file mode 100644 index a3005c255565f..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js +++ /dev/null @@ -1,14 +0,0 @@ -// run an npm command -const spawn = require('@npmcli/promise-spawn') - -module.exports = (npmBin, npmCommand, cwd, env, extra) => { - const isJS = npmBin.endsWith('.js') - const cmd = isJS ? process.execPath : npmBin - const args = (isJS ? [npmBin] : []).concat(npmCommand) - // when installing to run the `prepare` script for a git dep, we need - // to ensure that we don't run into a cycle of checking out packages - // in temp directories. this lets us link previously-seen repos that - // are also being prepared. - - return spawn(cmd, args, { cwd, env }, extra) -} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js deleted file mode 100644 index d070f0f7ba2d4..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js +++ /dev/null @@ -1,31 +0,0 @@ -const isPackageBin = require('./is-package-bin.js') - -const tarCreateOptions = manifest => ({ - cwd: manifest._resolved, - prefix: 'package/', - portable: true, - gzip: { - // forcing the level to 9 seems to avoid some - // platform specific optimizations that cause - // integrity mismatch errors due to differing - // end results after compression - level: 9, - }, - - // ensure that package bins are always executable - // Note that npm-packlist is already filtering out - // anything that is not a regular file, ignored by - // .npmignore or package.json "files", etc. - filter: (path, stat) => { - if (isPackageBin(manifest, path)) { - stat.mode |= 0o111 - } - return true - }, - - // Provide a specific date in the 1980s for the benefit of zip, - // which is confounded by files dated at the Unix epoch 0. - mtime: new Date('1985-10-26T08:15:00.000Z'), -}) - -module.exports = tarCreateOptions diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js deleted file mode 100644 index c50cb6173b92e..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js +++ /dev/null @@ -1,10 +0,0 @@ -const removeTrailingSlashes = (input) => { - // in order to avoid regexp redos detection - let output = input - while (output.endsWith('/')) { - output = output.slice(0, -1) - } - return output -} - -module.exports = removeTrailingSlashes diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json deleted file mode 100644 index bc8d984704af5..0000000000000 --- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json +++ /dev/null @@ -1,79 +0,0 @@ -{ - "name": "pacote", - "version": "15.2.0", - "description": "JavaScript package downloader", - "author": "GitHub Inc.", - "bin": { - "pacote": "lib/bin.js" - }, - "license": "ISC", - "main": "lib/index.js", - "scripts": { - "test": "tap", - "snap": "tap", - "lint": "eslint \"**/*.js\"", - "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" - }, - "tap": { - "timeout": 300, - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - }, - "devDependencies": { - "@npmcli/arborist": "^6.0.0 || ^6.0.0-pre.0", - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", - "hosted-git-info": "^6.0.0", - "mutate-fs": "^2.1.1", - "nock": "^13.2.4", - "npm-registry-mock": "^1.3.2", - "tap": "^16.0.1" - }, - "files": [ - "bin/", - "lib/" - ], - "keywords": [ - "packages", - "npm", - "git" - ], - "dependencies": { - "@npmcli/git": "^4.0.0", - "@npmcli/installed-package-contents": "^2.0.1", - "@npmcli/promise-spawn": "^6.0.1", - "@npmcli/run-script": "^6.0.0", - "cacache": "^17.0.0", - "fs-minipass": "^3.0.0", - "minipass": "^5.0.0", - "npm-package-arg": "^10.0.0", - "npm-packlist": "^7.0.0", - "npm-pick-manifest": "^8.0.0", - "npm-registry-fetch": "^14.0.0", - "proc-log": "^3.0.0", - "promise-retry": "^2.0.1", - "read-package-json": "^6.0.0", - "read-package-json-fast": "^3.0.0", - "sigstore": "^1.3.0", - "ssri": "^10.0.0", - "tar": "^6.1.11" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/pacote.git" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", - "windowsCI": false, - "publish": "true" - } -} diff --git a/node_modules/@npmcli/metavuln-calculator/package.json b/node_modules/@npmcli/metavuln-calculator/package.json index 105254e168ee8..8db917ab9524e 100644 --- a/node_modules/@npmcli/metavuln-calculator/package.json +++ b/node_modules/@npmcli/metavuln-calculator/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/metavuln-calculator", - "version": "6.0.0", + "version": "6.0.1", "main": "lib/index.js", "files": [ "bin/", @@ -41,7 +41,7 @@ "dependencies": { "cacache": "^17.0.0", "json-parse-even-better-errors": "^3.0.0", - "pacote": "^15.0.0", + "pacote": "^16.0.0", "semver": "^7.3.5" }, "engines": { diff --git a/package-lock.json b/package-lock.json index 0cb2e813fea21..421762d3106f8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2460,100 +2460,19 @@ } }, "node_modules/@npmcli/metavuln-calculator": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-6.0.0.tgz", - "integrity": "sha512-h3zA2YSo7H3ZV1W4ZvlDTLaAbBwyOs6HEYhxrhl25Wtl49P7dLb8V2uFUb3dFZ8e4Ic+iF1cRMMWq9ATriYVqg==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-6.0.1.tgz", + "integrity": "sha512-S7Mgb2gizh3LK+VEMYbPfIwJNaEnZuFGwNBAGkXSjvBqkU8rx/y6L14dMZjAIgS4st2vgkWs1bWKHi8mWkl41Q==", "dependencies": { "cacache": "^17.0.0", "json-parse-even-better-errors": "^3.0.0", - "pacote": "^15.0.0", + "pacote": "^16.0.0", "semver": "^7.3.5" }, "engines": { "node": "^16.13.0 || >=18.0.0" } }, - "node_modules/@npmcli/metavuln-calculator/node_modules/make-fetch-happen": { - "version": "11.1.1", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz", - "integrity": "sha512-rLWS7GCSTcEujjVBs2YqG7Y4643u8ucvCJeSRqiLYhesrDuzeuFIk37xREzAsfQaqzl8b9rNCE4m6J8tvX4Q8w==", - "dependencies": { - "agentkeepalive": "^4.2.1", - "cacache": "^17.0.0", - "http-cache-semantics": "^4.1.1", - "http-proxy-agent": "^5.0.0", - "https-proxy-agent": "^5.0.0", - "is-lambda": "^1.0.1", - "lru-cache": "^7.7.1", - "minipass": "^5.0.0", - "minipass-fetch": "^3.0.0", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", - "promise-retry": "^2.0.1", - "socks-proxy-agent": "^7.0.0", - "ssri": "^10.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/metavuln-calculator/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/@npmcli/metavuln-calculator/node_modules/npm-registry-fetch": { - "version": "14.0.5", - "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-14.0.5.tgz", - "integrity": "sha512-kIDMIo4aBm6xg7jOttupWZamsZRkAqMqwqqbVXnUqstY5+tapvv6bkH/qMR76jdgV+YljEUCyWx3hRYMrJiAgA==", - "dependencies": { - "make-fetch-happen": "^11.0.0", - "minipass": "^5.0.0", - "minipass-fetch": "^3.0.0", - "minipass-json-stream": "^1.0.1", - "minizlib": "^2.1.2", - "npm-package-arg": "^10.0.0", - "proc-log": "^3.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/metavuln-calculator/node_modules/pacote": { - "version": "15.2.0", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-15.2.0.tgz", - "integrity": "sha512-rJVZeIwHTUta23sIZgEIM62WYwbmGbThdbnkt81ravBplQv+HjyroqnLRNH2+sLJHcGZmLRmhPwACqhfTcOmnA==", - "dependencies": { - "@npmcli/git": "^4.0.0", - "@npmcli/installed-package-contents": "^2.0.1", - "@npmcli/promise-spawn": "^6.0.1", - "@npmcli/run-script": "^6.0.0", - "cacache": "^17.0.0", - "fs-minipass": "^3.0.0", - "minipass": "^5.0.0", - "npm-package-arg": "^10.0.0", - "npm-packlist": "^7.0.0", - "npm-pick-manifest": "^8.0.0", - "npm-registry-fetch": "^14.0.0", - "proc-log": "^3.0.0", - "promise-retry": "^2.0.1", - "read-package-json": "^6.0.0", - "read-package-json-fast": "^3.0.0", - "sigstore": "^1.3.0", - "ssri": "^10.0.0", - "tar": "^6.1.11" - }, - "bin": { - "pacote": "lib/bin.js" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, "node_modules/@npmcli/mock-globals": { "resolved": "mock-globals", "link": true @@ -15815,7 +15734,7 @@ "@npmcli/fs": "^3.1.0", "@npmcli/installed-package-contents": "^2.0.2", "@npmcli/map-workspaces": "^3.0.2", - "@npmcli/metavuln-calculator": "^6.0.0", + "@npmcli/metavuln-calculator": "^6.0.1", "@npmcli/name-from-folder": "^2.0.0", "@npmcli/node-gyp": "^3.0.0", "@npmcli/package-json": "^4.0.0", diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json index 975a91f030007..55409e4266d81 100644 --- a/workspaces/arborist/package.json +++ b/workspaces/arborist/package.json @@ -7,7 +7,7 @@ "@npmcli/fs": "^3.1.0", "@npmcli/installed-package-contents": "^2.0.2", "@npmcli/map-workspaces": "^3.0.2", - "@npmcli/metavuln-calculator": "^6.0.0", + "@npmcli/metavuln-calculator": "^6.0.1", "@npmcli/name-from-folder": "^2.0.0", "@npmcli/node-gyp": "^3.0.0", "@npmcli/package-json": "^4.0.0", From 2f8af5596831384e5a86bbaf3d564dc0ce93c41a Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Mon, 14 Aug 2023 13:15:20 -0700 Subject: [PATCH 27/68] deps: minipass-fetch@3.0.4 --- DEPENDENCIES.md | 2 + node_modules/.gitignore | 3 - .../node_modules/minipass/LICENSE | 15 - .../node_modules/minipass/index.js | 702 ------------------ .../node_modules/minipass/index.mjs | 702 ------------------ .../node_modules/minipass/package.json | 76 -- node_modules/minipass-fetch/package.json | 8 +- package-lock.json | 17 +- 8 files changed, 10 insertions(+), 1515 deletions(-) delete mode 100644 node_modules/minipass-fetch/node_modules/minipass/LICENSE delete mode 100644 node_modules/minipass-fetch/node_modules/minipass/index.js delete mode 100644 node_modules/minipass-fetch/node_modules/minipass/index.mjs delete mode 100644 node_modules/minipass-fetch/node_modules/minipass/package.json diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md index 324dbb190ca34..99ab5f3e2e992 100644 --- a/DEPENDENCIES.md +++ b/DEPENDENCIES.md @@ -106,6 +106,7 @@ graph LR; npm-->libnpmteam; npm-->libnpmversion; npm-->make-fetch-happen; + npm-->minipass-fetch; npm-->nopt; npm-->npm-audit-report; npm-->npm-install-checks; @@ -526,6 +527,7 @@ graph LR; npm-->licensee; npm-->make-fetch-happen; npm-->minimatch; + npm-->minipass-fetch; npm-->minipass-pipeline; npm-->minipass; npm-->ms; diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 0ea07f6b415c3..c0f87b39bc957 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -144,9 +144,6 @@ /minipass-collect/node_modules/* !/minipass-collect/node_modules/minipass !/minipass-fetch -!/minipass-fetch/node_modules/ -/minipass-fetch/node_modules/* -!/minipass-fetch/node_modules/minipass !/minipass-flush !/minipass-flush/node_modules/ /minipass-flush/node_modules/* diff --git a/node_modules/minipass-fetch/node_modules/minipass/LICENSE b/node_modules/minipass-fetch/node_modules/minipass/LICENSE deleted file mode 100644 index 97f8e32ed82e4..0000000000000 --- a/node_modules/minipass-fetch/node_modules/minipass/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/minipass-fetch/node_modules/minipass/index.js b/node_modules/minipass-fetch/node_modules/minipass/index.js deleted file mode 100644 index ed07c17acd97b..0000000000000 --- a/node_modules/minipass-fetch/node_modules/minipass/index.js +++ /dev/null @@ -1,702 +0,0 @@ -'use strict' -const proc = - typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - } -const EE = require('events') -const Stream = require('stream') -const stringdecoder = require('string_decoder') -const SD = stringdecoder.StringDecoder - -const EOF = Symbol('EOF') -const MAYBE_EMIT_END = Symbol('maybeEmitEnd') -const EMITTED_END = Symbol('emittedEnd') -const EMITTING_END = Symbol('emittingEnd') -const EMITTED_ERROR = Symbol('emittedError') -const CLOSED = Symbol('closed') -const READ = Symbol('read') -const FLUSH = Symbol('flush') -const FLUSHCHUNK = Symbol('flushChunk') -const ENCODING = Symbol('encoding') -const DECODER = Symbol('decoder') -const FLOWING = Symbol('flowing') -const PAUSED = Symbol('paused') -const RESUME = Symbol('resume') -const BUFFER = Symbol('buffer') -const PIPES = Symbol('pipes') -const BUFFERLENGTH = Symbol('bufferLength') -const BUFFERPUSH = Symbol('bufferPush') -const BUFFERSHIFT = Symbol('bufferShift') -const OBJECTMODE = Symbol('objectMode') -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed') -// internal event when stream has an error -const ERROR = Symbol('error') -const EMITDATA = Symbol('emitData') -const EMITEND = Symbol('emitEnd') -const EMITEND2 = Symbol('emitEnd2') -const ASYNC = Symbol('async') -const ABORT = Symbol('abort') -const ABORTED = Symbol('aborted') -const SIGNAL = Symbol('signal') - -const defer = fn => Promise.resolve().then(fn) - -// TODO remove when Node v8 support drops -const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' -const ASYNCITERATOR = - (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') -const ITERATOR = - (doIter && Symbol.iterator) || Symbol('iterator not implemented') - -// events that mean 'the stream is over' -// these are treated specially, and re-emitted -// if they are listened for after emitting. -const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' - -const isArrayBuffer = b => - b instanceof ArrayBuffer || - (typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0) - -const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) - -class Pipe { - constructor(src, dest, opts) { - this.src = src - this.dest = dest - this.opts = opts - this.ondrain = () => src[RESUME]() - dest.on('drain', this.ondrain) - } - unpipe() { - this.dest.removeListener('drain', this.ondrain) - } - // istanbul ignore next - only here for the prototype - proxyErrors() {} - end() { - this.unpipe() - if (this.opts.end) this.dest.end() - } -} - -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors) - super.unpipe() - } - constructor(src, dest, opts) { - super(src, dest, opts) - this.proxyErrors = er => dest.emit('error', er) - src.on('error', this.proxyErrors) - } -} - -class Minipass extends Stream { - constructor(options) { - super() - this[FLOWING] = false - // whether we're explicitly paused - this[PAUSED] = false - this[PIPES] = [] - this[BUFFER] = [] - this[OBJECTMODE] = (options && options.objectMode) || false - if (this[OBJECTMODE]) this[ENCODING] = null - else this[ENCODING] = (options && options.encoding) || null - if (this[ENCODING] === 'buffer') this[ENCODING] = null - this[ASYNC] = (options && !!options.async) || false - this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null - this[EOF] = false - this[EMITTED_END] = false - this[EMITTING_END] = false - this[CLOSED] = false - this[EMITTED_ERROR] = null - this.writable = true - this.readable = true - this[BUFFERLENGTH] = 0 - this[DESTROYED] = false - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) - } - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) - } - this[SIGNAL] = options && options.signal - this[ABORTED] = false - if (this[SIGNAL]) { - this[SIGNAL].addEventListener('abort', () => this[ABORT]()) - if (this[SIGNAL].aborted) { - this[ABORT]() - } - } - } - - get bufferLength() { - return this[BUFFERLENGTH] - } - - get encoding() { - return this[ENCODING] - } - set encoding(enc) { - if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') - - if ( - this[ENCODING] && - enc !== this[ENCODING] && - ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) - ) - throw new Error('cannot change encoding') - - if (this[ENCODING] !== enc) { - this[DECODER] = enc ? new SD(enc) : null - if (this[BUFFER].length) - this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) - } - - this[ENCODING] = enc - } - - setEncoding(enc) { - this.encoding = enc - } - - get objectMode() { - return this[OBJECTMODE] - } - set objectMode(om) { - this[OBJECTMODE] = this[OBJECTMODE] || !!om - } - - get ['async']() { - return this[ASYNC] - } - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a - } - - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true - this.emit('abort', this[SIGNAL].reason) - this.destroy(this[SIGNAL].reason) - } - - get aborted() { - return this[ABORTED] - } - set aborted(_) {} - - write(chunk, encoding, cb) { - if (this[ABORTED]) return false - if (this[EOF]) throw new Error('write after end') - - if (this[DESTROYED]) { - this.emit( - 'error', - Object.assign( - new Error('Cannot call write after a stream was destroyed'), - { code: 'ERR_STREAM_DESTROYED' } - ) - ) - return true - } - - if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') - - if (!encoding) encoding = 'utf8' - - const fn = this[ASYNC] ? defer : f => f() - - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything else switches us into object mode - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) - else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) - else if (typeof chunk !== 'string') - // use the setter so we throw if we have encoding set - this.objectMode = true - } - - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - /* istanbul ignore if - maybe impossible? */ - if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) - - if (this.flowing) this.emit('data', chunk) - else this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - - if (cb) fn(cb) - - return this.flowing - } - - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - if (cb) fn(cb) - return this.flowing - } - - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if ( - typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER].lastNeed) - ) { - chunk = Buffer.from(chunk, encoding) - } - - if (Buffer.isBuffer(chunk) && this[ENCODING]) - chunk = this[DECODER].write(chunk) - - // Note: flushing CAN potentially switch us into not-flowing mode - if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) - - if (this.flowing) this.emit('data', chunk) - else this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - - if (cb) fn(cb) - - return this.flowing - } - - read(n) { - if (this[DESTROYED]) return null - - if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { - this[MAYBE_EMIT_END]() - return null - } - - if (this[OBJECTMODE]) n = null - - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] - else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] - } - - const ret = this[READ](n || null, this[BUFFER][0]) - this[MAYBE_EMIT_END]() - return ret - } - - [READ](n, chunk) { - if (n === chunk.length || n === null) this[BUFFERSHIFT]() - else { - this[BUFFER][0] = chunk.slice(n) - chunk = chunk.slice(0, n) - this[BUFFERLENGTH] -= n - } - - this.emit('data', chunk) - - if (!this[BUFFER].length && !this[EOF]) this.emit('drain') - - return chunk - } - - end(chunk, encoding, cb) { - if (typeof chunk === 'function') (cb = chunk), (chunk = null) - if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') - if (chunk) this.write(chunk, encoding) - if (cb) this.once('end', cb) - this[EOF] = true - this.writable = false - - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() - return this - } - - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) return - - this[PAUSED] = false - this[FLOWING] = true - this.emit('resume') - if (this[BUFFER].length) this[FLUSH]() - else if (this[EOF]) this[MAYBE_EMIT_END]() - else this.emit('drain') - } - - resume() { - return this[RESUME]() - } - - pause() { - this[FLOWING] = false - this[PAUSED] = true - } - - get destroyed() { - return this[DESTROYED] - } - - get flowing() { - return this[FLOWING] - } - - get paused() { - return this[PAUSED] - } - - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 - else this[BUFFERLENGTH] += chunk.length - this[BUFFER].push(chunk) - } - - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 - else this[BUFFERLENGTH] -= this[BUFFER][0].length - return this[BUFFER].shift() - } - - [FLUSH](noDrain) { - do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) - - if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') - } - - [FLUSHCHUNK](chunk) { - this.emit('data', chunk) - return this.flowing - } - - pipe(dest, opts) { - if (this[DESTROYED]) return - - const ended = this[EMITTED_END] - opts = opts || {} - if (dest === proc.stdout || dest === proc.stderr) opts.end = false - else opts.end = opts.end !== false - opts.proxyErrors = !!opts.proxyErrors - - // piping an ended stream ends immediately - if (ended) { - if (opts.end) dest.end() - } else { - this[PIPES].push( - !opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts) - ) - if (this[ASYNC]) defer(() => this[RESUME]()) - else this[RESUME]() - } - - return dest - } - - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest) - if (p) { - this[PIPES].splice(this[PIPES].indexOf(p), 1) - p.unpipe() - } - } - - addListener(ev, fn) { - return this.on(ev, fn) - } - - on(ev, fn) { - const ret = super.on(ev, fn) - if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) - super.emit('readable') - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev) - this.removeAllListeners(ev) - } else if (ev === 'error' && this[EMITTED_ERROR]) { - if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) - else fn.call(this, this[EMITTED_ERROR]) - } - return ret - } - - get emittedEnd() { - return this[EMITTED_END] - } - - [MAYBE_EMIT_END]() { - if ( - !this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF] - ) { - this[EMITTING_END] = true - this.emit('end') - this.emit('prefinish') - this.emit('finish') - if (this[CLOSED]) this.emit('close') - this[EMITTING_END] = false - } - } - - emit(ev, data, ...extra) { - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) - return - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? defer(() => this[EMITDATA](data)) - : this[EMITDATA](data) - } else if (ev === 'end') { - return this[EMITEND]() - } else if (ev === 'close') { - this[CLOSED] = true - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) return - const ret = super.emit('close') - this.removeAllListeners('close') - return ret - } else if (ev === 'error') { - this[EMITTED_ERROR] = data - super.emit(ERROR, data) - const ret = - !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'resume') { - const ret = super.emit('resume') - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev) - this.removeAllListeners(ev) - return ret - } - - // Some other unknown event - const ret = super.emit(ev, data, ...extra) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) this.pause() - } - const ret = super.emit('data', data) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITEND]() { - if (this[EMITTED_END]) return - - this[EMITTED_END] = true - this.readable = false - if (this[ASYNC]) defer(() => this[EMITEND2]()) - else this[EMITEND2]() - } - - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end() - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data) - } - super.emit('data', data) - } - } - - for (const p of this[PIPES]) { - p.end() - } - const ret = super.emit('end') - this.removeAllListeners('end') - return ret - } - - // const all = await stream.collect() - collect() { - const buf = [] - if (!this[OBJECTMODE]) buf.dataLength = 0 - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise() - this.on('data', c => { - buf.push(c) - if (!this[OBJECTMODE]) buf.dataLength += c.length - }) - return p.then(() => buf) - } - - // const data = await stream.concat() - concat() { - return this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this.collect().then(buf => - this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength) - ) - } - - // stream.promise().then(() => done, er => emitted error) - promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))) - this.on('error', er => reject(er)) - this.on('end', () => resolve()) - }) - } - - // for await (let chunk of stream) - [ASYNCITERATOR]() { - let stopped = false - const stop = () => { - this.pause() - stopped = true - return Promise.resolve({ done: true }) - } - const next = () => { - if (stopped) return stop() - const res = this.read() - if (res !== null) return Promise.resolve({ done: false, value: res }) - - if (this[EOF]) return stop() - - let resolve = null - let reject = null - const onerr = er => { - this.removeListener('data', ondata) - this.removeListener('end', onend) - this.removeListener(DESTROYED, ondestroy) - stop() - reject(er) - } - const ondata = value => { - this.removeListener('error', onerr) - this.removeListener('end', onend) - this.removeListener(DESTROYED, ondestroy) - this.pause() - resolve({ value: value, done: !!this[EOF] }) - } - const onend = () => { - this.removeListener('error', onerr) - this.removeListener('data', ondata) - this.removeListener(DESTROYED, ondestroy) - stop() - resolve({ done: true }) - } - const ondestroy = () => onerr(new Error('stream destroyed')) - return new Promise((res, rej) => { - reject = rej - resolve = res - this.once(DESTROYED, ondestroy) - this.once('error', onerr) - this.once('end', onend) - this.once('data', ondata) - }) - } - - return { - next, - throw: stop, - return: stop, - [ASYNCITERATOR]() { - return this - }, - } - } - - // for (let chunk of stream) - [ITERATOR]() { - let stopped = false - const stop = () => { - this.pause() - this.removeListener(ERROR, stop) - this.removeListener(DESTROYED, stop) - this.removeListener('end', stop) - stopped = true - return { done: true } - } - - const next = () => { - if (stopped) return stop() - const value = this.read() - return value === null ? stop() : { value } - } - this.once('end', stop) - this.once(ERROR, stop) - this.once(DESTROYED, stop) - - return { - next, - throw: stop, - return: stop, - [ITERATOR]() { - return this - }, - } - } - - destroy(er) { - if (this[DESTROYED]) { - if (er) this.emit('error', er) - else this.emit(DESTROYED) - return this - } - - this[DESTROYED] = true - - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0 - this[BUFFERLENGTH] = 0 - - if (typeof this.close === 'function' && !this[CLOSED]) this.close() - - if (er) this.emit('error', er) - // if no error to emit, still reject pending promises - else this.emit(DESTROYED) - - return this - } - - static isStream(s) { - return ( - !!s && - (s instanceof Minipass || - s instanceof Stream || - (s instanceof EE && - // readable - (typeof s.pipe === 'function' || - // writable - (typeof s.write === 'function' && typeof s.end === 'function')))) - ) - } -} - -exports.Minipass = Minipass diff --git a/node_modules/minipass-fetch/node_modules/minipass/index.mjs b/node_modules/minipass-fetch/node_modules/minipass/index.mjs deleted file mode 100644 index 6ef6cd8cf0703..0000000000000 --- a/node_modules/minipass-fetch/node_modules/minipass/index.mjs +++ /dev/null @@ -1,702 +0,0 @@ -'use strict' -const proc = - typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - } -import EE from 'events' -import Stream from 'stream' -import stringdecoder from 'string_decoder' -const SD = stringdecoder.StringDecoder - -const EOF = Symbol('EOF') -const MAYBE_EMIT_END = Symbol('maybeEmitEnd') -const EMITTED_END = Symbol('emittedEnd') -const EMITTING_END = Symbol('emittingEnd') -const EMITTED_ERROR = Symbol('emittedError') -const CLOSED = Symbol('closed') -const READ = Symbol('read') -const FLUSH = Symbol('flush') -const FLUSHCHUNK = Symbol('flushChunk') -const ENCODING = Symbol('encoding') -const DECODER = Symbol('decoder') -const FLOWING = Symbol('flowing') -const PAUSED = Symbol('paused') -const RESUME = Symbol('resume') -const BUFFER = Symbol('buffer') -const PIPES = Symbol('pipes') -const BUFFERLENGTH = Symbol('bufferLength') -const BUFFERPUSH = Symbol('bufferPush') -const BUFFERSHIFT = Symbol('bufferShift') -const OBJECTMODE = Symbol('objectMode') -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed') -// internal event when stream has an error -const ERROR = Symbol('error') -const EMITDATA = Symbol('emitData') -const EMITEND = Symbol('emitEnd') -const EMITEND2 = Symbol('emitEnd2') -const ASYNC = Symbol('async') -const ABORT = Symbol('abort') -const ABORTED = Symbol('aborted') -const SIGNAL = Symbol('signal') - -const defer = fn => Promise.resolve().then(fn) - -// TODO remove when Node v8 support drops -const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' -const ASYNCITERATOR = - (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') -const ITERATOR = - (doIter && Symbol.iterator) || Symbol('iterator not implemented') - -// events that mean 'the stream is over' -// these are treated specially, and re-emitted -// if they are listened for after emitting. -const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' - -const isArrayBuffer = b => - b instanceof ArrayBuffer || - (typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0) - -const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) - -class Pipe { - constructor(src, dest, opts) { - this.src = src - this.dest = dest - this.opts = opts - this.ondrain = () => src[RESUME]() - dest.on('drain', this.ondrain) - } - unpipe() { - this.dest.removeListener('drain', this.ondrain) - } - // istanbul ignore next - only here for the prototype - proxyErrors() {} - end() { - this.unpipe() - if (this.opts.end) this.dest.end() - } -} - -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors) - super.unpipe() - } - constructor(src, dest, opts) { - super(src, dest, opts) - this.proxyErrors = er => dest.emit('error', er) - src.on('error', this.proxyErrors) - } -} - -export class Minipass extends Stream { - constructor(options) { - super() - this[FLOWING] = false - // whether we're explicitly paused - this[PAUSED] = false - this[PIPES] = [] - this[BUFFER] = [] - this[OBJECTMODE] = (options && options.objectMode) || false - if (this[OBJECTMODE]) this[ENCODING] = null - else this[ENCODING] = (options && options.encoding) || null - if (this[ENCODING] === 'buffer') this[ENCODING] = null - this[ASYNC] = (options && !!options.async) || false - this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null - this[EOF] = false - this[EMITTED_END] = false - this[EMITTING_END] = false - this[CLOSED] = false - this[EMITTED_ERROR] = null - this.writable = true - this.readable = true - this[BUFFERLENGTH] = 0 - this[DESTROYED] = false - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) - } - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) - } - this[SIGNAL] = options && options.signal - this[ABORTED] = false - if (this[SIGNAL]) { - this[SIGNAL].addEventListener('abort', () => this[ABORT]()) - if (this[SIGNAL].aborted) { - this[ABORT]() - } - } - } - - get bufferLength() { - return this[BUFFERLENGTH] - } - - get encoding() { - return this[ENCODING] - } - set encoding(enc) { - if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') - - if ( - this[ENCODING] && - enc !== this[ENCODING] && - ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) - ) - throw new Error('cannot change encoding') - - if (this[ENCODING] !== enc) { - this[DECODER] = enc ? new SD(enc) : null - if (this[BUFFER].length) - this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) - } - - this[ENCODING] = enc - } - - setEncoding(enc) { - this.encoding = enc - } - - get objectMode() { - return this[OBJECTMODE] - } - set objectMode(om) { - this[OBJECTMODE] = this[OBJECTMODE] || !!om - } - - get ['async']() { - return this[ASYNC] - } - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a - } - - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true - this.emit('abort', this[SIGNAL].reason) - this.destroy(this[SIGNAL].reason) - } - - get aborted() { - return this[ABORTED] - } - set aborted(_) {} - - write(chunk, encoding, cb) { - if (this[ABORTED]) return false - if (this[EOF]) throw new Error('write after end') - - if (this[DESTROYED]) { - this.emit( - 'error', - Object.assign( - new Error('Cannot call write after a stream was destroyed'), - { code: 'ERR_STREAM_DESTROYED' } - ) - ) - return true - } - - if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') - - if (!encoding) encoding = 'utf8' - - const fn = this[ASYNC] ? defer : f => f() - - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything else switches us into object mode - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) - else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) - else if (typeof chunk !== 'string') - // use the setter so we throw if we have encoding set - this.objectMode = true - } - - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - /* istanbul ignore if - maybe impossible? */ - if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) - - if (this.flowing) this.emit('data', chunk) - else this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - - if (cb) fn(cb) - - return this.flowing - } - - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - if (cb) fn(cb) - return this.flowing - } - - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if ( - typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER].lastNeed) - ) { - chunk = Buffer.from(chunk, encoding) - } - - if (Buffer.isBuffer(chunk) && this[ENCODING]) - chunk = this[DECODER].write(chunk) - - // Note: flushing CAN potentially switch us into not-flowing mode - if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) - - if (this.flowing) this.emit('data', chunk) - else this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - - if (cb) fn(cb) - - return this.flowing - } - - read(n) { - if (this[DESTROYED]) return null - - if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { - this[MAYBE_EMIT_END]() - return null - } - - if (this[OBJECTMODE]) n = null - - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] - else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] - } - - const ret = this[READ](n || null, this[BUFFER][0]) - this[MAYBE_EMIT_END]() - return ret - } - - [READ](n, chunk) { - if (n === chunk.length || n === null) this[BUFFERSHIFT]() - else { - this[BUFFER][0] = chunk.slice(n) - chunk = chunk.slice(0, n) - this[BUFFERLENGTH] -= n - } - - this.emit('data', chunk) - - if (!this[BUFFER].length && !this[EOF]) this.emit('drain') - - return chunk - } - - end(chunk, encoding, cb) { - if (typeof chunk === 'function') (cb = chunk), (chunk = null) - if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') - if (chunk) this.write(chunk, encoding) - if (cb) this.once('end', cb) - this[EOF] = true - this.writable = false - - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() - return this - } - - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) return - - this[PAUSED] = false - this[FLOWING] = true - this.emit('resume') - if (this[BUFFER].length) this[FLUSH]() - else if (this[EOF]) this[MAYBE_EMIT_END]() - else this.emit('drain') - } - - resume() { - return this[RESUME]() - } - - pause() { - this[FLOWING] = false - this[PAUSED] = true - } - - get destroyed() { - return this[DESTROYED] - } - - get flowing() { - return this[FLOWING] - } - - get paused() { - return this[PAUSED] - } - - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 - else this[BUFFERLENGTH] += chunk.length - this[BUFFER].push(chunk) - } - - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 - else this[BUFFERLENGTH] -= this[BUFFER][0].length - return this[BUFFER].shift() - } - - [FLUSH](noDrain) { - do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) - - if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') - } - - [FLUSHCHUNK](chunk) { - this.emit('data', chunk) - return this.flowing - } - - pipe(dest, opts) { - if (this[DESTROYED]) return - - const ended = this[EMITTED_END] - opts = opts || {} - if (dest === proc.stdout || dest === proc.stderr) opts.end = false - else opts.end = opts.end !== false - opts.proxyErrors = !!opts.proxyErrors - - // piping an ended stream ends immediately - if (ended) { - if (opts.end) dest.end() - } else { - this[PIPES].push( - !opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts) - ) - if (this[ASYNC]) defer(() => this[RESUME]()) - else this[RESUME]() - } - - return dest - } - - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest) - if (p) { - this[PIPES].splice(this[PIPES].indexOf(p), 1) - p.unpipe() - } - } - - addListener(ev, fn) { - return this.on(ev, fn) - } - - on(ev, fn) { - const ret = super.on(ev, fn) - if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) - super.emit('readable') - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev) - this.removeAllListeners(ev) - } else if (ev === 'error' && this[EMITTED_ERROR]) { - if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) - else fn.call(this, this[EMITTED_ERROR]) - } - return ret - } - - get emittedEnd() { - return this[EMITTED_END] - } - - [MAYBE_EMIT_END]() { - if ( - !this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF] - ) { - this[EMITTING_END] = true - this.emit('end') - this.emit('prefinish') - this.emit('finish') - if (this[CLOSED]) this.emit('close') - this[EMITTING_END] = false - } - } - - emit(ev, data, ...extra) { - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) - return - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? defer(() => this[EMITDATA](data)) - : this[EMITDATA](data) - } else if (ev === 'end') { - return this[EMITEND]() - } else if (ev === 'close') { - this[CLOSED] = true - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) return - const ret = super.emit('close') - this.removeAllListeners('close') - return ret - } else if (ev === 'error') { - this[EMITTED_ERROR] = data - super.emit(ERROR, data) - const ret = - !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'resume') { - const ret = super.emit('resume') - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev) - this.removeAllListeners(ev) - return ret - } - - // Some other unknown event - const ret = super.emit(ev, data, ...extra) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) this.pause() - } - const ret = super.emit('data', data) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITEND]() { - if (this[EMITTED_END]) return - - this[EMITTED_END] = true - this.readable = false - if (this[ASYNC]) defer(() => this[EMITEND2]()) - else this[EMITEND2]() - } - - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end() - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data) - } - super.emit('data', data) - } - } - - for (const p of this[PIPES]) { - p.end() - } - const ret = super.emit('end') - this.removeAllListeners('end') - return ret - } - - // const all = await stream.collect() - collect() { - const buf = [] - if (!this[OBJECTMODE]) buf.dataLength = 0 - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise() - this.on('data', c => { - buf.push(c) - if (!this[OBJECTMODE]) buf.dataLength += c.length - }) - return p.then(() => buf) - } - - // const data = await stream.concat() - concat() { - return this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this.collect().then(buf => - this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength) - ) - } - - // stream.promise().then(() => done, er => emitted error) - promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))) - this.on('error', er => reject(er)) - this.on('end', () => resolve()) - }) - } - - // for await (let chunk of stream) - [ASYNCITERATOR]() { - let stopped = false - const stop = () => { - this.pause() - stopped = true - return Promise.resolve({ done: true }) - } - const next = () => { - if (stopped) return stop() - const res = this.read() - if (res !== null) return Promise.resolve({ done: false, value: res }) - - if (this[EOF]) return stop() - - let resolve = null - let reject = null - const onerr = er => { - this.removeListener('data', ondata) - this.removeListener('end', onend) - this.removeListener(DESTROYED, ondestroy) - stop() - reject(er) - } - const ondata = value => { - this.removeListener('error', onerr) - this.removeListener('end', onend) - this.removeListener(DESTROYED, ondestroy) - this.pause() - resolve({ value: value, done: !!this[EOF] }) - } - const onend = () => { - this.removeListener('error', onerr) - this.removeListener('data', ondata) - this.removeListener(DESTROYED, ondestroy) - stop() - resolve({ done: true }) - } - const ondestroy = () => onerr(new Error('stream destroyed')) - return new Promise((res, rej) => { - reject = rej - resolve = res - this.once(DESTROYED, ondestroy) - this.once('error', onerr) - this.once('end', onend) - this.once('data', ondata) - }) - } - - return { - next, - throw: stop, - return: stop, - [ASYNCITERATOR]() { - return this - }, - } - } - - // for (let chunk of stream) - [ITERATOR]() { - let stopped = false - const stop = () => { - this.pause() - this.removeListener(ERROR, stop) - this.removeListener(DESTROYED, stop) - this.removeListener('end', stop) - stopped = true - return { done: true } - } - - const next = () => { - if (stopped) return stop() - const value = this.read() - return value === null ? stop() : { value } - } - this.once('end', stop) - this.once(ERROR, stop) - this.once(DESTROYED, stop) - - return { - next, - throw: stop, - return: stop, - [ITERATOR]() { - return this - }, - } - } - - destroy(er) { - if (this[DESTROYED]) { - if (er) this.emit('error', er) - else this.emit(DESTROYED) - return this - } - - this[DESTROYED] = true - - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0 - this[BUFFERLENGTH] = 0 - - if (typeof this.close === 'function' && !this[CLOSED]) this.close() - - if (er) this.emit('error', er) - // if no error to emit, still reject pending promises - else this.emit(DESTROYED) - - return this - } - - static isStream(s) { - return ( - !!s && - (s instanceof Minipass || - s instanceof Stream || - (s instanceof EE && - // readable - (typeof s.pipe === 'function' || - // writable - (typeof s.write === 'function' && typeof s.end === 'function')))) - ) - } -} - - diff --git a/node_modules/minipass-fetch/node_modules/minipass/package.json b/node_modules/minipass-fetch/node_modules/minipass/package.json deleted file mode 100644 index 0e20e988047f2..0000000000000 --- a/node_modules/minipass-fetch/node_modules/minipass/package.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "name": "minipass", - "version": "5.0.0", - "description": "minimal implementation of a PassThrough stream", - "main": "./index.js", - "module": "./index.mjs", - "types": "./index.d.ts", - "exports": { - ".": { - "import": { - "types": "./index.d.ts", - "default": "./index.mjs" - }, - "require": { - "types": "./index.d.ts", - "default": "./index.js" - } - }, - "./package.json": "./package.json" - }, - "devDependencies": { - "@types/node": "^17.0.41", - "end-of-stream": "^1.4.0", - "node-abort-controller": "^3.1.1", - "prettier": "^2.6.2", - "tap": "^16.2.0", - "through2": "^2.0.3", - "ts-node": "^10.8.1", - "typedoc": "^0.23.24", - "typescript": "^4.7.3" - }, - "scripts": { - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "prepare": "node ./scripts/transpile-to-esm.js", - "snap": "tap", - "test": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "postpublish": "git push origin --follow-tags", - "typedoc": "typedoc ./index.d.ts", - "format": "prettier --write . --loglevel warn" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/isaacs/minipass.git" - }, - "keywords": [ - "passthrough", - "stream" - ], - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "files": [ - "index.d.ts", - "index.js", - "index.mjs" - ], - "tap": { - "check-coverage": true - }, - "engines": { - "node": ">=8" - }, - "prettier": { - "semi": false, - "printWidth": 80, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - } -} diff --git a/node_modules/minipass-fetch/package.json b/node_modules/minipass-fetch/package.json index 78024317d8be4..581275ba27d4f 100644 --- a/node_modules/minipass-fetch/package.json +++ b/node_modules/minipass-fetch/package.json @@ -1,6 +1,6 @@ { "name": "minipass-fetch", - "version": "3.0.3", + "version": "3.0.4", "description": "An implementation of window.fetch in Node.js using Minipass streams", "license": "MIT", "main": "lib/index.js", @@ -24,7 +24,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/template-oss": "4.18.0", "@ungap/url-search-params": "^0.2.2", "abort-controller": "^3.0.0", "abortcontroller-polyfill": "~1.7.3", @@ -36,7 +36,7 @@ "tap": "^16.0.0" }, "dependencies": { - "minipass": "^5.0.0", + "minipass": "^7.0.3", "minipass-sized": "^1.0.3", "minizlib": "^2.1.2" }, @@ -63,7 +63,7 @@ "author": "GitHub Inc.", "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", + "version": "4.18.0", "publish": "true" } } diff --git a/package-lock.json b/package-lock.json index 421762d3106f8..e0ef51b02bc00 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9017,12 +9017,12 @@ } }, "node_modules/minipass-fetch": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-3.0.3.tgz", - "integrity": "sha512-n5ITsTkDqYkYJZjcRWzZt9qnZKCT7nKCosJhHoj7S7zD+BP4jVbWs+odsniw5TA3E0sLomhTKOKjF86wf11PuQ==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-3.0.4.tgz", + "integrity": "sha512-jHAqnA728uUpIaFm7NWsCnqKT6UqZz7GcI/bDpPATuwYyKwJwW0remxSCxUlKiEty+eopHGa3oc8WxgQ1FFJqg==", "inBundle": true, "dependencies": { - "minipass": "^5.0.0", + "minipass": "^7.0.3", "minipass-sized": "^1.0.3", "minizlib": "^2.1.2" }, @@ -9033,15 +9033,6 @@ "encoding": "^0.1.13" } }, - "node_modules/minipass-fetch/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "inBundle": true, - "engines": { - "node": ">=8" - } - }, "node_modules/minipass-flush": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz", From 5d776c8db6ff28567c5da397d19aa8b8450d4dce Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Tue, 15 Aug 2023 12:08:21 -0700 Subject: [PATCH 28/68] deps: @npmcli/git@5.0.0 --- DEPENDENCIES.md | 2 - node_modules/.gitignore | 13 + node_modules/@npmcli/git/lib/revs.js | 4 +- .../git/node_modules/lru-cache/LICENSE | 15 + .../node_modules/lru-cache/dist/cjs/index.js | 1404 +++++++++++++++++ .../lru-cache/dist/cjs/index.min.js | 2 + .../lru-cache/dist/cjs/package.json | 3 + .../node_modules/lru-cache/dist/mjs/index.js | 1400 ++++++++++++++++ .../lru-cache/dist/mjs/index.min.js | 2 + .../lru-cache/dist/mjs/package.json | 3 + .../git/node_modules/lru-cache/package.json | 108 ++ node_modules/@npmcli/git/package.json | 18 +- .../node_modules/@npmcli/git/LICENSE | 15 + .../node_modules/@npmcli/git/lib/clone.js | 172 ++ .../node_modules/@npmcli/git/lib/errors.js | 36 + .../node_modules/@npmcli/git/lib/find.js | 15 + .../node_modules/@npmcli/git/lib/index.js | 9 + .../node_modules/@npmcli/git/lib/is-clean.js | 6 + .../node_modules/@npmcli/git/lib/is.js | 6 + .../@npmcli/git/lib/lines-to-revs.js | 147 ++ .../@npmcli/git/lib/make-error.js | 33 + .../node_modules/@npmcli/git/lib/opts.js | 12 + .../node_modules/@npmcli/git/lib/revs.js | 28 + .../node_modules/@npmcli/git/lib/spawn.js | 44 + .../node_modules/@npmcli/git/lib/utils.js | 3 + .../node_modules/@npmcli/git/lib/which.js | 18 + .../node_modules/@npmcli/git/package.json | 57 + .../pacote/node_modules/@npmcli/git/LICENSE | 15 + .../node_modules/@npmcli/git/lib/clone.js | 172 ++ .../node_modules/@npmcli/git/lib/errors.js | 36 + .../node_modules/@npmcli/git/lib/find.js | 15 + .../node_modules/@npmcli/git/lib/index.js | 9 + .../node_modules/@npmcli/git/lib/is-clean.js | 6 + .../pacote/node_modules/@npmcli/git/lib/is.js | 6 + .../@npmcli/git/lib/lines-to-revs.js | 147 ++ .../@npmcli/git/lib/make-error.js | 33 + .../node_modules/@npmcli/git/lib/opts.js | 12 + .../node_modules/@npmcli/git/lib/revs.js | 28 + .../node_modules/@npmcli/git/lib/spawn.js | 44 + .../node_modules/@npmcli/git/lib/utils.js | 3 + .../node_modules/@npmcli/git/lib/which.js | 18 + .../node_modules/@npmcli/git/package.json | 57 + package-lock.json | 80 +- package.json | 2 +- workspaces/libnpmversion/package.json | 2 +- 45 files changed, 4240 insertions(+), 20 deletions(-) create mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/LICENSE create mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.js create mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.min.js create mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/package.json create mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.js create mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.min.js create mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/package.json create mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/LICENSE create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/clone.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/errors.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/find.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/index.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is-clean.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/lines-to-revs.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/make-error.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/opts.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/revs.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/spawn.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/utils.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/which.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/package.json create mode 100644 node_modules/pacote/node_modules/@npmcli/git/LICENSE create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/clone.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/errors.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/find.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/index.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/is.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/opts.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/revs.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/utils.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/which.js create mode 100644 node_modules/pacote/node_modules/@npmcli/git/package.json diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md index 99ab5f3e2e992..324dbb190ca34 100644 --- a/DEPENDENCIES.md +++ b/DEPENDENCIES.md @@ -106,7 +106,6 @@ graph LR; npm-->libnpmteam; npm-->libnpmversion; npm-->make-fetch-happen; - npm-->minipass-fetch; npm-->nopt; npm-->npm-audit-report; npm-->npm-install-checks; @@ -527,7 +526,6 @@ graph LR; npm-->licensee; npm-->make-fetch-happen; npm-->minimatch; - npm-->minipass-fetch; npm-->minipass-pipeline; npm-->minipass; npm-->ms; diff --git a/node_modules/.gitignore b/node_modules/.gitignore index c0f87b39bc957..24f1780ead0e5 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -22,12 +22,20 @@ !/@npmcli/disparity-colors !/@npmcli/fs !/@npmcli/git +!/@npmcli/git/node_modules/ +/@npmcli/git/node_modules/* +!/@npmcli/git/node_modules/lru-cache !/@npmcli/installed-package-contents !/@npmcli/map-workspaces !/@npmcli/metavuln-calculator !/@npmcli/name-from-folder !/@npmcli/node-gyp !/@npmcli/package-json +!/@npmcli/package-json/node_modules/ +/@npmcli/package-json/node_modules/* +!/@npmcli/package-json/node_modules/@npmcli/ +/@npmcli/package-json/node_modules/@npmcli/* +!/@npmcli/package-json/node_modules/@npmcli/git !/@npmcli/promise-spawn !/@npmcli/query !/@npmcli/run-script @@ -201,6 +209,11 @@ !/once !/p-map !/pacote +!/pacote/node_modules/ +/pacote/node_modules/* +!/pacote/node_modules/@npmcli/ +/pacote/node_modules/@npmcli/* +!/pacote/node_modules/@npmcli/git !/parse-conflict-json !/path-is-absolute !/path-key diff --git a/node_modules/@npmcli/git/lib/revs.js b/node_modules/@npmcli/git/lib/revs.js index ee72370d5b7ec..ca14837de1b87 100644 --- a/node_modules/@npmcli/git/lib/revs.js +++ b/node_modules/@npmcli/git/lib/revs.js @@ -1,8 +1,8 @@ const pinflight = require('promise-inflight') const spawn = require('./spawn.js') -const LRU = require('lru-cache') +const { LRUCache } = require('lru-cache') -const revsCache = new LRU({ +const revsCache = new LRUCache({ max: 100, ttl: 5 * 60 * 1000, }) diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/LICENSE b/node_modules/@npmcli/git/node_modules/lru-cache/LICENSE new file mode 100644 index 0000000000000..f785757cd63f8 --- /dev/null +++ b/node_modules/@npmcli/git/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.js b/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.js new file mode 100644 index 0000000000000..02d76ec800a92 --- /dev/null +++ b/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.js @@ -0,0 +1,1404 @@ +"use strict"; +/** + * @module LRUCache + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.LRUCache = void 0; +const perf = typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date; +const warned = new Set(); +/* c8 ignore start */ +const PROCESS = (typeof process === 'object' && !!process ? process : {}); +/* c8 ignore start */ +const emitWarning = (msg, type, code, fn) => { + typeof PROCESS.emitWarning === 'function' + ? PROCESS.emitWarning(msg, type, code, fn) + : console.error(`[${code}] ${type}: ${msg}`); +}; +let AC = globalThis.AbortController; +let AS = globalThis.AbortSignal; +/* c8 ignore start */ +if (typeof AC === 'undefined') { + //@ts-ignore + AS = class AbortSignal { + onabort; + _onabort = []; + reason; + aborted = false; + addEventListener(_, fn) { + this._onabort.push(fn); + } + }; + //@ts-ignore + AC = class AbortController { + constructor() { + warnACPolyfill(); + } + signal = new AS(); + abort(reason) { + if (this.signal.aborted) + return; + //@ts-ignore + this.signal.reason = reason; + //@ts-ignore + this.signal.aborted = true; + //@ts-ignore + for (const fn of this.signal._onabort) { + fn(reason); + } + this.signal.onabort?.(reason); + } + }; + let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; + const warnACPolyfill = () => { + if (!printACPolyfillWarning) + return; + printACPolyfillWarning = false; + emitWarning('AbortController is not defined. If using lru-cache in ' + + 'node 14, load an AbortController polyfill from the ' + + '`node-abort-controller` package. A minimal polyfill is ' + + 'provided for use by LRUCache.fetch(), but it should not be ' + + 'relied upon in other contexts (eg, passing it to other APIs that ' + + 'use AbortController/AbortSignal might have undesirable effects). ' + + 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); + }; +} +/* c8 ignore stop */ +const shouldWarn = (code) => !warned.has(code); +const TYPE = Symbol('type'); +const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); +/* c8 ignore start */ +// This is a little bit ridiculous, tbh. +// The maximum array length is 2^32-1 or thereabouts on most JS impls. +// And well before that point, you're caching the entire world, I mean, +// that's ~32GB of just integers for the next/prev links, plus whatever +// else to hold that many keys and values. Just filling the memory with +// zeroes at init time is brutal when you get that big. +// But why not be complete? +// Maybe in the future, these limits will have expanded. +const getUintArray = (max) => !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null; +/* c8 ignore stop */ +class ZeroArray extends Array { + constructor(size) { + super(size); + this.fill(0); + } +} +class Stack { + heap; + length; + // private constructor + static #constructing = false; + static create(max) { + const HeapCls = getUintArray(max); + if (!HeapCls) + return []; + Stack.#constructing = true; + const s = new Stack(max, HeapCls); + Stack.#constructing = false; + return s; + } + constructor(max, HeapCls) { + /* c8 ignore start */ + if (!Stack.#constructing) { + throw new TypeError('instantiate Stack using Stack.create(n)'); + } + /* c8 ignore stop */ + this.heap = new HeapCls(max); + this.length = 0; + } + push(n) { + this.heap[this.length++] = n; + } + pop() { + return this.heap[--this.length]; + } +} +/** + * Default export, the thing you're using this module to get. + * + * All properties from the options object (with the exception of + * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as + * normal public members. (`max` and `maxBase` are read-only getters.) + * Changing any of these will alter the defaults for subsequent method calls, + * but is otherwise safe. + */ +class LRUCache { + // properties coming in from the options of these, only max and maxSize + // really *need* to be protected. The rest can be modified, as they just + // set defaults for various methods. + #max; + #maxSize; + #dispose; + #disposeAfter; + #fetchMethod; + /** + * {@link LRUCache.OptionsBase.ttl} + */ + ttl; + /** + * {@link LRUCache.OptionsBase.ttlResolution} + */ + ttlResolution; + /** + * {@link LRUCache.OptionsBase.ttlAutopurge} + */ + ttlAutopurge; + /** + * {@link LRUCache.OptionsBase.updateAgeOnGet} + */ + updateAgeOnGet; + /** + * {@link LRUCache.OptionsBase.updateAgeOnHas} + */ + updateAgeOnHas; + /** + * {@link LRUCache.OptionsBase.allowStale} + */ + allowStale; + /** + * {@link LRUCache.OptionsBase.noDisposeOnSet} + */ + noDisposeOnSet; + /** + * {@link LRUCache.OptionsBase.noUpdateTTL} + */ + noUpdateTTL; + /** + * {@link LRUCache.OptionsBase.maxEntrySize} + */ + maxEntrySize; + /** + * {@link LRUCache.OptionsBase.sizeCalculation} + */ + sizeCalculation; + /** + * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} + */ + noDeleteOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} + */ + noDeleteOnStaleGet; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} + */ + allowStaleOnFetchAbort; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} + */ + allowStaleOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.ignoreFetchAbort} + */ + ignoreFetchAbort; + // computed properties + #size; + #calculatedSize; + #keyMap; + #keyList; + #valList; + #next; + #prev; + #head; + #tail; + #free; + #disposed; + #sizes; + #starts; + #ttls; + #hasDispose; + #hasFetchMethod; + #hasDisposeAfter; + /** + * Do not call this method unless you need to inspect the + * inner workings of the cache. If anything returned by this + * object is modified in any way, strange breakage may occur. + * + * These fields are private for a reason! + * + * @internal + */ + static unsafeExposeInternals(c) { + return { + // properties + starts: c.#starts, + ttls: c.#ttls, + sizes: c.#sizes, + keyMap: c.#keyMap, + keyList: c.#keyList, + valList: c.#valList, + next: c.#next, + prev: c.#prev, + get head() { + return c.#head; + }, + get tail() { + return c.#tail; + }, + free: c.#free, + // methods + isBackgroundFetch: (p) => c.#isBackgroundFetch(p), + backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), + moveToTail: (index) => c.#moveToTail(index), + indexes: (options) => c.#indexes(options), + rindexes: (options) => c.#rindexes(options), + isStale: (index) => c.#isStale(index), + }; + } + // Protected read-only members + /** + * {@link LRUCache.OptionsBase.max} (read-only) + */ + get max() { + return this.#max; + } + /** + * {@link LRUCache.OptionsBase.maxSize} (read-only) + */ + get maxSize() { + return this.#maxSize; + } + /** + * The total computed size of items in the cache (read-only) + */ + get calculatedSize() { + return this.#calculatedSize; + } + /** + * The number of items stored in the cache (read-only) + */ + get size() { + return this.#size; + } + /** + * {@link LRUCache.OptionsBase.fetchMethod} (read-only) + */ + get fetchMethod() { + return this.#fetchMethod; + } + /** + * {@link LRUCache.OptionsBase.dispose} (read-only) + */ + get dispose() { + return this.#dispose; + } + /** + * {@link LRUCache.OptionsBase.disposeAfter} (read-only) + */ + get disposeAfter() { + return this.#disposeAfter; + } + constructor(options) { + const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer'); + } + const UintArray = max ? getUintArray(max) : Array; + if (!UintArray) { + throw new Error('invalid max value: ' + max); + } + this.#max = max; + this.#maxSize = maxSize; + this.maxEntrySize = maxEntrySize || this.#maxSize; + this.sizeCalculation = sizeCalculation; + if (this.sizeCalculation) { + if (!this.#maxSize && !this.maxEntrySize) { + throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function'); + } + } + if (fetchMethod !== undefined && + typeof fetchMethod !== 'function') { + throw new TypeError('fetchMethod must be a function if specified'); + } + this.#fetchMethod = fetchMethod; + this.#hasFetchMethod = !!fetchMethod; + this.#keyMap = new Map(); + this.#keyList = new Array(max).fill(undefined); + this.#valList = new Array(max).fill(undefined); + this.#next = new UintArray(max); + this.#prev = new UintArray(max); + this.#head = 0; + this.#tail = 0; + this.#free = Stack.create(max); + this.#size = 0; + this.#calculatedSize = 0; + if (typeof dispose === 'function') { + this.#dispose = dispose; + } + if (typeof disposeAfter === 'function') { + this.#disposeAfter = disposeAfter; + this.#disposed = []; + } + else { + this.#disposeAfter = undefined; + this.#disposed = undefined; + } + this.#hasDispose = !!this.#dispose; + this.#hasDisposeAfter = !!this.#disposeAfter; + this.noDisposeOnSet = !!noDisposeOnSet; + this.noUpdateTTL = !!noUpdateTTL; + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; + this.ignoreFetchAbort = !!ignoreFetchAbort; + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.#maxSize !== 0) { + if (!isPosInt(this.#maxSize)) { + throw new TypeError('maxSize must be a positive integer if specified'); + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError('maxEntrySize must be a positive integer if specified'); + } + this.#initializeSizeTracking(); + } + this.allowStale = !!allowStale; + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; + this.updateAgeOnGet = !!updateAgeOnGet; + this.updateAgeOnHas = !!updateAgeOnHas; + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1; + this.ttlAutopurge = !!ttlAutopurge; + this.ttl = ttl || 0; + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError('ttl must be a positive integer if specified'); + } + this.#initializeTTLTracking(); + } + // do not allow completely unbounded caches + if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { + throw new TypeError('At least one of max, maxSize, or ttl is required'); + } + if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { + const code = 'LRU_CACHE_UNBOUNDED'; + if (shouldWarn(code)) { + warned.add(code); + const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.'; + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); + } + } + } + /** + * Return the remaining TTL time for a given entry key + */ + getRemainingTTL(key) { + return this.#keyMap.has(key) ? Infinity : 0; + } + #initializeTTLTracking() { + const ttls = new ZeroArray(this.#max); + const starts = new ZeroArray(this.#max); + this.#ttls = ttls; + this.#starts = starts; + this.#setItemTTL = (index, ttl, start = perf.now()) => { + starts[index] = ttl !== 0 ? start : 0; + ttls[index] = ttl; + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.#isStale(index)) { + this.delete(this.#keyList[index]); + } + }, ttl + 1); + // unref() not supported on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + }; + this.#updateItemAge = index => { + starts[index] = ttls[index] !== 0 ? perf.now() : 0; + }; + this.#statusTTL = (status, index) => { + if (ttls[index]) { + const ttl = ttls[index]; + const start = starts[index]; + status.ttl = ttl; + status.start = start; + status.now = cachedNow || getNow(); + const age = status.now - start; + status.remainingTTL = ttl - age; + } + }; + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0; + const getNow = () => { + const n = perf.now(); + if (this.ttlResolution > 0) { + cachedNow = n; + const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); + // not available on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + return n; + }; + this.getRemainingTTL = key => { + const index = this.#keyMap.get(key); + if (index === undefined) { + return 0; + } + const ttl = ttls[index]; + const start = starts[index]; + if (ttl === 0 || start === 0) { + return Infinity; + } + const age = (cachedNow || getNow()) - start; + return ttl - age; + }; + this.#isStale = index => { + return (ttls[index] !== 0 && + starts[index] !== 0 && + (cachedNow || getNow()) - starts[index] > ttls[index]); + }; + } + // conditionally set private methods related to TTL + #updateItemAge = () => { }; + #statusTTL = () => { }; + #setItemTTL = () => { }; + /* c8 ignore stop */ + #isStale = () => false; + #initializeSizeTracking() { + const sizes = new ZeroArray(this.#max); + this.#calculatedSize = 0; + this.#sizes = sizes; + this.#removeItemSize = index => { + this.#calculatedSize -= sizes[index]; + sizes[index] = 0; + }; + this.#requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.#isBackgroundFetch(v)) { + return 0; + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function'); + } + size = sizeCalculation(v, k); + if (!isPosInt(size)) { + throw new TypeError('sizeCalculation return invalid (expect positive integer)'); + } + } + else { + throw new TypeError('invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation ' + + 'or size must be set.'); + } + } + return size; + }; + this.#addItemSize = (index, size, status) => { + sizes[index] = size; + if (this.#maxSize) { + const maxSize = this.#maxSize - sizes[index]; + while (this.#calculatedSize > maxSize) { + this.#evict(true); + } + } + this.#calculatedSize += sizes[index]; + if (status) { + status.entrySize = size; + status.totalCalculatedSize = this.#calculatedSize; + } + }; + } + #removeItemSize = _i => { }; + #addItemSize = (_i, _s, _st) => { }; + #requireSize = (_k, _v, size, sizeCalculation) => { + if (size || sizeCalculation) { + throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); + } + return 0; + }; + *#indexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#tail; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#head) { + break; + } + else { + i = this.#prev[i]; + } + } + } + } + *#rindexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#head; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#tail) { + break; + } + else { + i = this.#next[i]; + } + } + } + } + #isValidIndex(index) { + return (index !== undefined && + this.#keyMap.get(this.#keyList[index]) === index); + } + /** + * Return a generator yielding `[key, value]` pairs, + * in order from most recently used to least recently used. + */ + *entries() { + for (const i of this.#indexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Inverse order version of {@link LRUCache.entries} + * + * Return a generator yielding `[key, value]` pairs, + * in order from least recently used to most recently used. + */ + *rentries() { + for (const i of this.#rindexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Return a generator yielding the keys in the cache, + * in order from most recently used to least recently used. + */ + *keys() { + for (const i of this.#indexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Inverse order version of {@link LRUCache.keys} + * + * Return a generator yielding the keys in the cache, + * in order from least recently used to most recently used. + */ + *rkeys() { + for (const i of this.#rindexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Return a generator yielding the values in the cache, + * in order from most recently used to least recently used. + */ + *values() { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Inverse order version of {@link LRUCache.values} + * + * Return a generator yielding the values in the cache, + * in order from least recently used to most recently used. + */ + *rvalues() { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Iterating over the cache itself yields the same results as + * {@link LRUCache.entries} + */ + [Symbol.iterator]() { + return this.entries(); + } + /** + * Find a value for which the supplied fn method returns a truthy value, + * similar to Array.find(). fn is called as fn(value, key, cache). + */ + find(fn, getOptions = {}) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + if (fn(value, this.#keyList[i], this)) { + return this.get(this.#keyList[i], getOptions); + } + } + } + /** + * Call the supplied function on each item in the cache, in order from + * most recently used to least recently used. fn is called as + * fn(value, key, cache). Does not update age or recenty of use. + * Does not iterate over stale values. + */ + forEach(fn, thisp = this) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * The same as {@link LRUCache.forEach} but items are iterated over in + * reverse order. (ie, less recently used items are iterated over first.) + */ + rforEach(fn, thisp = this) { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * Delete any stale entries. Returns true if anything was removed, + * false otherwise. + */ + purgeStale() { + let deleted = false; + for (const i of this.#rindexes({ allowStale: true })) { + if (this.#isStale(i)) { + this.delete(this.#keyList[i]); + deleted = true; + } + } + return deleted; + } + /** + * Return an array of [key, {@link LRUCache.Entry}] tuples which can be + * passed to cache.load() + */ + dump() { + const arr = []; + for (const i of this.#indexes({ allowStale: true })) { + const key = this.#keyList[i]; + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined || key === undefined) + continue; + const entry = { value }; + if (this.#ttls && this.#starts) { + entry.ttl = this.#ttls[i]; + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.#starts[i]; + entry.start = Math.floor(Date.now() - age); + } + if (this.#sizes) { + entry.size = this.#sizes[i]; + } + arr.unshift([key, entry]); + } + return arr; + } + /** + * Reset the cache and load in the items in entries in the order listed. + * Note that the shape of the resulting cache may be different if the + * same options are not used in both caches. + */ + load(arr) { + this.clear(); + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset, so that + // we get the intended remaining TTL, no matter how long it's + // been on ice. + // + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start; + entry.start = perf.now() - age; + } + this.set(key, entry.value, entry); + } + } + /** + * Add a value to the cache. + * + * Note: if `undefined` is specified as a value, this is an alias for + * {@link LRUCache#delete} + */ + set(k, v, setOptions = {}) { + if (v === undefined) { + this.delete(k); + return this; + } + const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; + let { noUpdateTTL = this.noUpdateTTL } = setOptions; + const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss'; + status.maxEntrySizeExceeded = true; + } + // have to delete, in case something is there already. + this.delete(k); + return this; + } + let index = this.#size === 0 ? undefined : this.#keyMap.get(k); + if (index === undefined) { + // addition + index = (this.#size === 0 + ? this.#tail + : this.#free.length !== 0 + ? this.#free.pop() + : this.#size === this.#max + ? this.#evict(false) + : this.#size); + this.#keyList[index] = k; + this.#valList[index] = v; + this.#keyMap.set(k, index); + this.#next[this.#tail] = index; + this.#prev[index] = this.#tail; + this.#tail = index; + this.#size++; + this.#addItemSize(index, size, status); + if (status) + status.set = 'add'; + noUpdateTTL = false; + } + else { + // update + this.#moveToTail(index); + const oldVal = this.#valList[index]; + if (v !== oldVal) { + if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')); + const { __staleWhileFetching: s } = oldVal; + if (s !== undefined && !noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(s, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([s, k, 'set']); + } + } + } + else if (!noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(oldVal, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([oldVal, k, 'set']); + } + } + this.#removeItemSize(index); + this.#addItemSize(index, size, status); + this.#valList[index] = v; + if (status) { + status.set = 'replace'; + const oldValue = oldVal && this.#isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal; + if (oldValue !== undefined) + status.oldValue = oldValue; + } + } + else if (status) { + status.set = 'update'; + } + } + if (ttl !== 0 && !this.#ttls) { + this.#initializeTTLTracking(); + } + if (this.#ttls) { + if (!noUpdateTTL) { + this.#setItemTTL(index, ttl, start); + } + if (status) + this.#statusTTL(status, index); + } + if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return this; + } + /** + * Evict the least recently used item, returning its value or + * `undefined` if cache is empty. + */ + pop() { + try { + while (this.#size) { + const val = this.#valList[this.#head]; + this.#evict(true); + if (this.#isBackgroundFetch(val)) { + if (val.__staleWhileFetching) { + return val.__staleWhileFetching; + } + } + else if (val !== undefined) { + return val; + } + } + } + finally { + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } + } + #evict(free) { + const head = this.#head; + const k = this.#keyList[head]; + const v = this.#valList[head]; + if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'evict'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'evict']); + } + } + this.#removeItemSize(head); + // if we aren't about to use the index, then null these out + if (free) { + this.#keyList[head] = undefined; + this.#valList[head] = undefined; + this.#free.push(head); + } + if (this.#size === 1) { + this.#head = this.#tail = 0; + this.#free.length = 0; + } + else { + this.#head = this.#next[head]; + } + this.#keyMap.delete(k); + this.#size--; + return head; + } + /** + * Check if a key is in the cache, without updating the recency of use. + * Will return false if the item is stale, even though it is technically + * in the cache. + * + * Will not update item age unless + * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. + */ + has(k, hasOptions = {}) { + const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v) && + v.__staleWhileFetching === undefined) { + return false; + } + if (!this.#isStale(index)) { + if (updateAgeOnHas) { + this.#updateItemAge(index); + } + if (status) { + status.has = 'hit'; + this.#statusTTL(status, index); + } + return true; + } + else if (status) { + status.has = 'stale'; + this.#statusTTL(status, index); + } + } + else if (status) { + status.has = 'miss'; + } + return false; + } + /** + * Like {@link LRUCache#get} but doesn't update recency or delete stale + * items. + * + * Returns `undefined` if the item is stale, unless + * {@link LRUCache.OptionsBase.allowStale} is set. + */ + peek(k, peekOptions = {}) { + const { allowStale = this.allowStale } = peekOptions; + const index = this.#keyMap.get(k); + if (index !== undefined && + (allowStale || !this.#isStale(index))) { + const v = this.#valList[index]; + // either stale and allowed, or forcing a refresh of non-stale value + return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + } + } + #backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + return v; + } + const ac = new AC(); + const { signal } = options; + // when/if our AC signals, then stop listening to theirs. + signal?.addEventListener('abort', () => ac.abort(signal.reason), { + signal: ac.signal, + }); + const fetchOpts = { + signal: ac.signal, + options, + context, + }; + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal; + const ignoreAbort = options.ignoreFetchAbort && v !== undefined; + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true; + options.status.fetchError = ac.signal.reason; + if (ignoreAbort) + options.status.fetchAbortIgnored = true; + } + else { + options.status.fetchResolved = true; + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason); + } + // either we didn't abort, and are still here, or we did, and ignored + const bf = p; + if (this.#valList[index] === p) { + if (v === undefined) { + if (bf.__staleWhileFetching) { + this.#valList[index] = bf.__staleWhileFetching; + } + else { + this.delete(k); + } + } + else { + if (options.status) + options.status.fetchUpdated = true; + this.set(k, v, fetchOpts.options); + } + } + return v; + }; + const eb = (er) => { + if (options.status) { + options.status.fetchRejected = true; + options.status.fetchError = er; + } + return fetchFail(er); + }; + const fetchFail = (er) => { + const { aborted } = ac.signal; + const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; + const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; + const noDelete = allowStale || options.noDeleteOnFetchRejection; + const bf = p; + if (this.#valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || bf.__staleWhileFetching === undefined; + if (del) { + this.delete(k); + } + else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.#valList[index] = bf.__staleWhileFetching; + } + } + if (allowStale) { + if (options.status && bf.__staleWhileFetching !== undefined) { + options.status.returnedStale = true; + } + return bf.__staleWhileFetching; + } + else if (bf.__returned === bf) { + throw er; + } + }; + const pcall = (res, rej) => { + const fmp = this.#fetchMethod?.(k, v, fetchOpts); + if (fmp && fmp instanceof Promise) { + fmp.then(v => res(v === undefined ? undefined : v), rej); + } + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if (!options.ignoreFetchAbort || + options.allowStaleOnFetchAbort) { + res(undefined); + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true); + } + } + }); + }; + if (options.status) + options.status.fetchDispatched = true; + const p = new Promise(pcall).then(cb, eb); + const bf = Object.assign(p, { + __abortController: ac, + __staleWhileFetching: v, + __returned: undefined, + }); + if (index === undefined) { + // internal, don't expose status. + this.set(k, bf, { ...fetchOpts.options, status: undefined }); + index = this.#keyMap.get(k); + } + else { + this.#valList[index] = bf; + } + return bf; + } + #isBackgroundFetch(p) { + if (!this.#hasFetchMethod) + return false; + const b = p; + return (!!b && + b instanceof Promise && + b.hasOwnProperty('__staleWhileFetching') && + b.__abortController instanceof AC); + } + async fetch(k, fetchOptions = {}) { + const { + // get options + allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; + if (!this.#hasFetchMethod) { + if (status) + status.fetch = 'get'; + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }); + } + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + }; + let index = this.#keyMap.get(k); + if (index === undefined) { + if (status) + status.fetch = 'miss'; + const p = this.#backgroundFetch(k, index, options, context); + return (p.__returned = p); + } + else { + // in cache, maybe already fetching + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + const stale = allowStale && v.__staleWhileFetching !== undefined; + if (status) { + status.fetch = 'inflight'; + if (stale) + status.returnedStale = true; + } + return stale ? v.__staleWhileFetching : (v.__returned = v); + } + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.#isStale(index); + if (!forceRefresh && !isStale) { + if (status) + status.fetch = 'hit'; + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + if (status) + this.#statusTTL(status, index); + return v; + } + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.#backgroundFetch(k, index, options, context); + const hasStale = p.__staleWhileFetching !== undefined; + const staleVal = hasStale && allowStale; + if (status) { + status.fetch = isStale ? 'stale' : 'refresh'; + if (staleVal && isStale) + status.returnedStale = true; + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p); + } + } + /** + * Return a value from the cache. Will update the recency of the cache + * entry found. + * + * If the key is not found, get() will return `undefined`. + */ + get(k, getOptions = {}) { + const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const value = this.#valList[index]; + const fetching = this.#isBackgroundFetch(value); + if (status) + this.#statusTTL(status, index); + if (this.#isStale(index)) { + if (status) + status.get = 'stale'; + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k); + } + if (status && allowStale) + status.returnedStale = true; + return allowStale ? value : undefined; + } + else { + if (status && + allowStale && + value.__staleWhileFetching !== undefined) { + status.returnedStale = true; + } + return allowStale ? value.__staleWhileFetching : undefined; + } + } + else { + if (status) + status.get = 'hit'; + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching; + } + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + return value; + } + } + else if (status) { + status.get = 'miss'; + } + } + #connect(p, n) { + this.#prev[n] = p; + this.#next[p] = n; + } + #moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.#tail) { + if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#connect(this.#prev[index], this.#next[index]); + } + this.#connect(this.#tail, index); + this.#tail = index; + } + } + /** + * Deletes a key out of the cache. + * Returns true if the key was deleted, false otherwise. + */ + delete(k) { + let deleted = false; + if (this.#size !== 0) { + const index = this.#keyMap.get(k); + if (index !== undefined) { + deleted = true; + if (this.#size === 1) { + this.clear(); + } + else { + this.#removeItemSize(index); + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + this.#keyMap.delete(k); + this.#keyList[index] = undefined; + this.#valList[index] = undefined; + if (index === this.#tail) { + this.#tail = this.#prev[index]; + } + else if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#next[this.#prev[index]] = this.#next[index]; + this.#prev[this.#next[index]] = this.#prev[index]; + } + this.#size--; + this.#free.push(index); + } + } + } + if (this.#hasDisposeAfter && this.#disposed?.length) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return deleted; + } + /** + * Clear the cache entirely, throwing away all values. + */ + clear() { + for (const index of this.#rindexes({ allowStale: true })) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else { + const k = this.#keyList[index]; + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + } + this.#keyMap.clear(); + this.#valList.fill(undefined); + this.#keyList.fill(undefined); + if (this.#ttls && this.#starts) { + this.#ttls.fill(0); + this.#starts.fill(0); + } + if (this.#sizes) { + this.#sizes.fill(0); + } + this.#head = 0; + this.#tail = 0; + this.#free.length = 0; + this.#calculatedSize = 0; + this.#size = 0; + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } +} +exports.LRUCache = LRUCache; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.min.js b/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.min.js new file mode 100644 index 0000000000000..8d34a03041d25 --- /dev/null +++ b/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.min.js @@ -0,0 +1,2 @@ +"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=R.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#W(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};exports.LRUCache=C; +//# sourceMappingURL=index.min.js.map diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/package.json b/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/package.json new file mode 100644 index 0000000000000..5bbefffbabee3 --- /dev/null +++ b/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.js b/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.js new file mode 100644 index 0000000000000..23b9754ad6c76 --- /dev/null +++ b/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.js @@ -0,0 +1,1400 @@ +/** + * @module LRUCache + */ +const perf = typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date; +const warned = new Set(); +/* c8 ignore start */ +const PROCESS = (typeof process === 'object' && !!process ? process : {}); +/* c8 ignore start */ +const emitWarning = (msg, type, code, fn) => { + typeof PROCESS.emitWarning === 'function' + ? PROCESS.emitWarning(msg, type, code, fn) + : console.error(`[${code}] ${type}: ${msg}`); +}; +let AC = globalThis.AbortController; +let AS = globalThis.AbortSignal; +/* c8 ignore start */ +if (typeof AC === 'undefined') { + //@ts-ignore + AS = class AbortSignal { + onabort; + _onabort = []; + reason; + aborted = false; + addEventListener(_, fn) { + this._onabort.push(fn); + } + }; + //@ts-ignore + AC = class AbortController { + constructor() { + warnACPolyfill(); + } + signal = new AS(); + abort(reason) { + if (this.signal.aborted) + return; + //@ts-ignore + this.signal.reason = reason; + //@ts-ignore + this.signal.aborted = true; + //@ts-ignore + for (const fn of this.signal._onabort) { + fn(reason); + } + this.signal.onabort?.(reason); + } + }; + let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; + const warnACPolyfill = () => { + if (!printACPolyfillWarning) + return; + printACPolyfillWarning = false; + emitWarning('AbortController is not defined. If using lru-cache in ' + + 'node 14, load an AbortController polyfill from the ' + + '`node-abort-controller` package. A minimal polyfill is ' + + 'provided for use by LRUCache.fetch(), but it should not be ' + + 'relied upon in other contexts (eg, passing it to other APIs that ' + + 'use AbortController/AbortSignal might have undesirable effects). ' + + 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); + }; +} +/* c8 ignore stop */ +const shouldWarn = (code) => !warned.has(code); +const TYPE = Symbol('type'); +const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); +/* c8 ignore start */ +// This is a little bit ridiculous, tbh. +// The maximum array length is 2^32-1 or thereabouts on most JS impls. +// And well before that point, you're caching the entire world, I mean, +// that's ~32GB of just integers for the next/prev links, plus whatever +// else to hold that many keys and values. Just filling the memory with +// zeroes at init time is brutal when you get that big. +// But why not be complete? +// Maybe in the future, these limits will have expanded. +const getUintArray = (max) => !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null; +/* c8 ignore stop */ +class ZeroArray extends Array { + constructor(size) { + super(size); + this.fill(0); + } +} +class Stack { + heap; + length; + // private constructor + static #constructing = false; + static create(max) { + const HeapCls = getUintArray(max); + if (!HeapCls) + return []; + Stack.#constructing = true; + const s = new Stack(max, HeapCls); + Stack.#constructing = false; + return s; + } + constructor(max, HeapCls) { + /* c8 ignore start */ + if (!Stack.#constructing) { + throw new TypeError('instantiate Stack using Stack.create(n)'); + } + /* c8 ignore stop */ + this.heap = new HeapCls(max); + this.length = 0; + } + push(n) { + this.heap[this.length++] = n; + } + pop() { + return this.heap[--this.length]; + } +} +/** + * Default export, the thing you're using this module to get. + * + * All properties from the options object (with the exception of + * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as + * normal public members. (`max` and `maxBase` are read-only getters.) + * Changing any of these will alter the defaults for subsequent method calls, + * but is otherwise safe. + */ +export class LRUCache { + // properties coming in from the options of these, only max and maxSize + // really *need* to be protected. The rest can be modified, as they just + // set defaults for various methods. + #max; + #maxSize; + #dispose; + #disposeAfter; + #fetchMethod; + /** + * {@link LRUCache.OptionsBase.ttl} + */ + ttl; + /** + * {@link LRUCache.OptionsBase.ttlResolution} + */ + ttlResolution; + /** + * {@link LRUCache.OptionsBase.ttlAutopurge} + */ + ttlAutopurge; + /** + * {@link LRUCache.OptionsBase.updateAgeOnGet} + */ + updateAgeOnGet; + /** + * {@link LRUCache.OptionsBase.updateAgeOnHas} + */ + updateAgeOnHas; + /** + * {@link LRUCache.OptionsBase.allowStale} + */ + allowStale; + /** + * {@link LRUCache.OptionsBase.noDisposeOnSet} + */ + noDisposeOnSet; + /** + * {@link LRUCache.OptionsBase.noUpdateTTL} + */ + noUpdateTTL; + /** + * {@link LRUCache.OptionsBase.maxEntrySize} + */ + maxEntrySize; + /** + * {@link LRUCache.OptionsBase.sizeCalculation} + */ + sizeCalculation; + /** + * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} + */ + noDeleteOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} + */ + noDeleteOnStaleGet; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} + */ + allowStaleOnFetchAbort; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} + */ + allowStaleOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.ignoreFetchAbort} + */ + ignoreFetchAbort; + // computed properties + #size; + #calculatedSize; + #keyMap; + #keyList; + #valList; + #next; + #prev; + #head; + #tail; + #free; + #disposed; + #sizes; + #starts; + #ttls; + #hasDispose; + #hasFetchMethod; + #hasDisposeAfter; + /** + * Do not call this method unless you need to inspect the + * inner workings of the cache. If anything returned by this + * object is modified in any way, strange breakage may occur. + * + * These fields are private for a reason! + * + * @internal + */ + static unsafeExposeInternals(c) { + return { + // properties + starts: c.#starts, + ttls: c.#ttls, + sizes: c.#sizes, + keyMap: c.#keyMap, + keyList: c.#keyList, + valList: c.#valList, + next: c.#next, + prev: c.#prev, + get head() { + return c.#head; + }, + get tail() { + return c.#tail; + }, + free: c.#free, + // methods + isBackgroundFetch: (p) => c.#isBackgroundFetch(p), + backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), + moveToTail: (index) => c.#moveToTail(index), + indexes: (options) => c.#indexes(options), + rindexes: (options) => c.#rindexes(options), + isStale: (index) => c.#isStale(index), + }; + } + // Protected read-only members + /** + * {@link LRUCache.OptionsBase.max} (read-only) + */ + get max() { + return this.#max; + } + /** + * {@link LRUCache.OptionsBase.maxSize} (read-only) + */ + get maxSize() { + return this.#maxSize; + } + /** + * The total computed size of items in the cache (read-only) + */ + get calculatedSize() { + return this.#calculatedSize; + } + /** + * The number of items stored in the cache (read-only) + */ + get size() { + return this.#size; + } + /** + * {@link LRUCache.OptionsBase.fetchMethod} (read-only) + */ + get fetchMethod() { + return this.#fetchMethod; + } + /** + * {@link LRUCache.OptionsBase.dispose} (read-only) + */ + get dispose() { + return this.#dispose; + } + /** + * {@link LRUCache.OptionsBase.disposeAfter} (read-only) + */ + get disposeAfter() { + return this.#disposeAfter; + } + constructor(options) { + const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer'); + } + const UintArray = max ? getUintArray(max) : Array; + if (!UintArray) { + throw new Error('invalid max value: ' + max); + } + this.#max = max; + this.#maxSize = maxSize; + this.maxEntrySize = maxEntrySize || this.#maxSize; + this.sizeCalculation = sizeCalculation; + if (this.sizeCalculation) { + if (!this.#maxSize && !this.maxEntrySize) { + throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function'); + } + } + if (fetchMethod !== undefined && + typeof fetchMethod !== 'function') { + throw new TypeError('fetchMethod must be a function if specified'); + } + this.#fetchMethod = fetchMethod; + this.#hasFetchMethod = !!fetchMethod; + this.#keyMap = new Map(); + this.#keyList = new Array(max).fill(undefined); + this.#valList = new Array(max).fill(undefined); + this.#next = new UintArray(max); + this.#prev = new UintArray(max); + this.#head = 0; + this.#tail = 0; + this.#free = Stack.create(max); + this.#size = 0; + this.#calculatedSize = 0; + if (typeof dispose === 'function') { + this.#dispose = dispose; + } + if (typeof disposeAfter === 'function') { + this.#disposeAfter = disposeAfter; + this.#disposed = []; + } + else { + this.#disposeAfter = undefined; + this.#disposed = undefined; + } + this.#hasDispose = !!this.#dispose; + this.#hasDisposeAfter = !!this.#disposeAfter; + this.noDisposeOnSet = !!noDisposeOnSet; + this.noUpdateTTL = !!noUpdateTTL; + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; + this.ignoreFetchAbort = !!ignoreFetchAbort; + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.#maxSize !== 0) { + if (!isPosInt(this.#maxSize)) { + throw new TypeError('maxSize must be a positive integer if specified'); + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError('maxEntrySize must be a positive integer if specified'); + } + this.#initializeSizeTracking(); + } + this.allowStale = !!allowStale; + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; + this.updateAgeOnGet = !!updateAgeOnGet; + this.updateAgeOnHas = !!updateAgeOnHas; + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1; + this.ttlAutopurge = !!ttlAutopurge; + this.ttl = ttl || 0; + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError('ttl must be a positive integer if specified'); + } + this.#initializeTTLTracking(); + } + // do not allow completely unbounded caches + if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { + throw new TypeError('At least one of max, maxSize, or ttl is required'); + } + if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { + const code = 'LRU_CACHE_UNBOUNDED'; + if (shouldWarn(code)) { + warned.add(code); + const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.'; + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); + } + } + } + /** + * Return the remaining TTL time for a given entry key + */ + getRemainingTTL(key) { + return this.#keyMap.has(key) ? Infinity : 0; + } + #initializeTTLTracking() { + const ttls = new ZeroArray(this.#max); + const starts = new ZeroArray(this.#max); + this.#ttls = ttls; + this.#starts = starts; + this.#setItemTTL = (index, ttl, start = perf.now()) => { + starts[index] = ttl !== 0 ? start : 0; + ttls[index] = ttl; + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.#isStale(index)) { + this.delete(this.#keyList[index]); + } + }, ttl + 1); + // unref() not supported on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + }; + this.#updateItemAge = index => { + starts[index] = ttls[index] !== 0 ? perf.now() : 0; + }; + this.#statusTTL = (status, index) => { + if (ttls[index]) { + const ttl = ttls[index]; + const start = starts[index]; + status.ttl = ttl; + status.start = start; + status.now = cachedNow || getNow(); + const age = status.now - start; + status.remainingTTL = ttl - age; + } + }; + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0; + const getNow = () => { + const n = perf.now(); + if (this.ttlResolution > 0) { + cachedNow = n; + const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); + // not available on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + return n; + }; + this.getRemainingTTL = key => { + const index = this.#keyMap.get(key); + if (index === undefined) { + return 0; + } + const ttl = ttls[index]; + const start = starts[index]; + if (ttl === 0 || start === 0) { + return Infinity; + } + const age = (cachedNow || getNow()) - start; + return ttl - age; + }; + this.#isStale = index => { + return (ttls[index] !== 0 && + starts[index] !== 0 && + (cachedNow || getNow()) - starts[index] > ttls[index]); + }; + } + // conditionally set private methods related to TTL + #updateItemAge = () => { }; + #statusTTL = () => { }; + #setItemTTL = () => { }; + /* c8 ignore stop */ + #isStale = () => false; + #initializeSizeTracking() { + const sizes = new ZeroArray(this.#max); + this.#calculatedSize = 0; + this.#sizes = sizes; + this.#removeItemSize = index => { + this.#calculatedSize -= sizes[index]; + sizes[index] = 0; + }; + this.#requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.#isBackgroundFetch(v)) { + return 0; + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function'); + } + size = sizeCalculation(v, k); + if (!isPosInt(size)) { + throw new TypeError('sizeCalculation return invalid (expect positive integer)'); + } + } + else { + throw new TypeError('invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation ' + + 'or size must be set.'); + } + } + return size; + }; + this.#addItemSize = (index, size, status) => { + sizes[index] = size; + if (this.#maxSize) { + const maxSize = this.#maxSize - sizes[index]; + while (this.#calculatedSize > maxSize) { + this.#evict(true); + } + } + this.#calculatedSize += sizes[index]; + if (status) { + status.entrySize = size; + status.totalCalculatedSize = this.#calculatedSize; + } + }; + } + #removeItemSize = _i => { }; + #addItemSize = (_i, _s, _st) => { }; + #requireSize = (_k, _v, size, sizeCalculation) => { + if (size || sizeCalculation) { + throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); + } + return 0; + }; + *#indexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#tail; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#head) { + break; + } + else { + i = this.#prev[i]; + } + } + } + } + *#rindexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#head; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#tail) { + break; + } + else { + i = this.#next[i]; + } + } + } + } + #isValidIndex(index) { + return (index !== undefined && + this.#keyMap.get(this.#keyList[index]) === index); + } + /** + * Return a generator yielding `[key, value]` pairs, + * in order from most recently used to least recently used. + */ + *entries() { + for (const i of this.#indexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Inverse order version of {@link LRUCache.entries} + * + * Return a generator yielding `[key, value]` pairs, + * in order from least recently used to most recently used. + */ + *rentries() { + for (const i of this.#rindexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Return a generator yielding the keys in the cache, + * in order from most recently used to least recently used. + */ + *keys() { + for (const i of this.#indexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Inverse order version of {@link LRUCache.keys} + * + * Return a generator yielding the keys in the cache, + * in order from least recently used to most recently used. + */ + *rkeys() { + for (const i of this.#rindexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Return a generator yielding the values in the cache, + * in order from most recently used to least recently used. + */ + *values() { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Inverse order version of {@link LRUCache.values} + * + * Return a generator yielding the values in the cache, + * in order from least recently used to most recently used. + */ + *rvalues() { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Iterating over the cache itself yields the same results as + * {@link LRUCache.entries} + */ + [Symbol.iterator]() { + return this.entries(); + } + /** + * Find a value for which the supplied fn method returns a truthy value, + * similar to Array.find(). fn is called as fn(value, key, cache). + */ + find(fn, getOptions = {}) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + if (fn(value, this.#keyList[i], this)) { + return this.get(this.#keyList[i], getOptions); + } + } + } + /** + * Call the supplied function on each item in the cache, in order from + * most recently used to least recently used. fn is called as + * fn(value, key, cache). Does not update age or recenty of use. + * Does not iterate over stale values. + */ + forEach(fn, thisp = this) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * The same as {@link LRUCache.forEach} but items are iterated over in + * reverse order. (ie, less recently used items are iterated over first.) + */ + rforEach(fn, thisp = this) { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * Delete any stale entries. Returns true if anything was removed, + * false otherwise. + */ + purgeStale() { + let deleted = false; + for (const i of this.#rindexes({ allowStale: true })) { + if (this.#isStale(i)) { + this.delete(this.#keyList[i]); + deleted = true; + } + } + return deleted; + } + /** + * Return an array of [key, {@link LRUCache.Entry}] tuples which can be + * passed to cache.load() + */ + dump() { + const arr = []; + for (const i of this.#indexes({ allowStale: true })) { + const key = this.#keyList[i]; + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined || key === undefined) + continue; + const entry = { value }; + if (this.#ttls && this.#starts) { + entry.ttl = this.#ttls[i]; + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.#starts[i]; + entry.start = Math.floor(Date.now() - age); + } + if (this.#sizes) { + entry.size = this.#sizes[i]; + } + arr.unshift([key, entry]); + } + return arr; + } + /** + * Reset the cache and load in the items in entries in the order listed. + * Note that the shape of the resulting cache may be different if the + * same options are not used in both caches. + */ + load(arr) { + this.clear(); + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset, so that + // we get the intended remaining TTL, no matter how long it's + // been on ice. + // + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start; + entry.start = perf.now() - age; + } + this.set(key, entry.value, entry); + } + } + /** + * Add a value to the cache. + * + * Note: if `undefined` is specified as a value, this is an alias for + * {@link LRUCache#delete} + */ + set(k, v, setOptions = {}) { + if (v === undefined) { + this.delete(k); + return this; + } + const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; + let { noUpdateTTL = this.noUpdateTTL } = setOptions; + const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss'; + status.maxEntrySizeExceeded = true; + } + // have to delete, in case something is there already. + this.delete(k); + return this; + } + let index = this.#size === 0 ? undefined : this.#keyMap.get(k); + if (index === undefined) { + // addition + index = (this.#size === 0 + ? this.#tail + : this.#free.length !== 0 + ? this.#free.pop() + : this.#size === this.#max + ? this.#evict(false) + : this.#size); + this.#keyList[index] = k; + this.#valList[index] = v; + this.#keyMap.set(k, index); + this.#next[this.#tail] = index; + this.#prev[index] = this.#tail; + this.#tail = index; + this.#size++; + this.#addItemSize(index, size, status); + if (status) + status.set = 'add'; + noUpdateTTL = false; + } + else { + // update + this.#moveToTail(index); + const oldVal = this.#valList[index]; + if (v !== oldVal) { + if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')); + const { __staleWhileFetching: s } = oldVal; + if (s !== undefined && !noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(s, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([s, k, 'set']); + } + } + } + else if (!noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(oldVal, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([oldVal, k, 'set']); + } + } + this.#removeItemSize(index); + this.#addItemSize(index, size, status); + this.#valList[index] = v; + if (status) { + status.set = 'replace'; + const oldValue = oldVal && this.#isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal; + if (oldValue !== undefined) + status.oldValue = oldValue; + } + } + else if (status) { + status.set = 'update'; + } + } + if (ttl !== 0 && !this.#ttls) { + this.#initializeTTLTracking(); + } + if (this.#ttls) { + if (!noUpdateTTL) { + this.#setItemTTL(index, ttl, start); + } + if (status) + this.#statusTTL(status, index); + } + if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return this; + } + /** + * Evict the least recently used item, returning its value or + * `undefined` if cache is empty. + */ + pop() { + try { + while (this.#size) { + const val = this.#valList[this.#head]; + this.#evict(true); + if (this.#isBackgroundFetch(val)) { + if (val.__staleWhileFetching) { + return val.__staleWhileFetching; + } + } + else if (val !== undefined) { + return val; + } + } + } + finally { + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } + } + #evict(free) { + const head = this.#head; + const k = this.#keyList[head]; + const v = this.#valList[head]; + if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'evict'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'evict']); + } + } + this.#removeItemSize(head); + // if we aren't about to use the index, then null these out + if (free) { + this.#keyList[head] = undefined; + this.#valList[head] = undefined; + this.#free.push(head); + } + if (this.#size === 1) { + this.#head = this.#tail = 0; + this.#free.length = 0; + } + else { + this.#head = this.#next[head]; + } + this.#keyMap.delete(k); + this.#size--; + return head; + } + /** + * Check if a key is in the cache, without updating the recency of use. + * Will return false if the item is stale, even though it is technically + * in the cache. + * + * Will not update item age unless + * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. + */ + has(k, hasOptions = {}) { + const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v) && + v.__staleWhileFetching === undefined) { + return false; + } + if (!this.#isStale(index)) { + if (updateAgeOnHas) { + this.#updateItemAge(index); + } + if (status) { + status.has = 'hit'; + this.#statusTTL(status, index); + } + return true; + } + else if (status) { + status.has = 'stale'; + this.#statusTTL(status, index); + } + } + else if (status) { + status.has = 'miss'; + } + return false; + } + /** + * Like {@link LRUCache#get} but doesn't update recency or delete stale + * items. + * + * Returns `undefined` if the item is stale, unless + * {@link LRUCache.OptionsBase.allowStale} is set. + */ + peek(k, peekOptions = {}) { + const { allowStale = this.allowStale } = peekOptions; + const index = this.#keyMap.get(k); + if (index !== undefined && + (allowStale || !this.#isStale(index))) { + const v = this.#valList[index]; + // either stale and allowed, or forcing a refresh of non-stale value + return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + } + } + #backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + return v; + } + const ac = new AC(); + const { signal } = options; + // when/if our AC signals, then stop listening to theirs. + signal?.addEventListener('abort', () => ac.abort(signal.reason), { + signal: ac.signal, + }); + const fetchOpts = { + signal: ac.signal, + options, + context, + }; + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal; + const ignoreAbort = options.ignoreFetchAbort && v !== undefined; + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true; + options.status.fetchError = ac.signal.reason; + if (ignoreAbort) + options.status.fetchAbortIgnored = true; + } + else { + options.status.fetchResolved = true; + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason); + } + // either we didn't abort, and are still here, or we did, and ignored + const bf = p; + if (this.#valList[index] === p) { + if (v === undefined) { + if (bf.__staleWhileFetching) { + this.#valList[index] = bf.__staleWhileFetching; + } + else { + this.delete(k); + } + } + else { + if (options.status) + options.status.fetchUpdated = true; + this.set(k, v, fetchOpts.options); + } + } + return v; + }; + const eb = (er) => { + if (options.status) { + options.status.fetchRejected = true; + options.status.fetchError = er; + } + return fetchFail(er); + }; + const fetchFail = (er) => { + const { aborted } = ac.signal; + const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; + const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; + const noDelete = allowStale || options.noDeleteOnFetchRejection; + const bf = p; + if (this.#valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || bf.__staleWhileFetching === undefined; + if (del) { + this.delete(k); + } + else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.#valList[index] = bf.__staleWhileFetching; + } + } + if (allowStale) { + if (options.status && bf.__staleWhileFetching !== undefined) { + options.status.returnedStale = true; + } + return bf.__staleWhileFetching; + } + else if (bf.__returned === bf) { + throw er; + } + }; + const pcall = (res, rej) => { + const fmp = this.#fetchMethod?.(k, v, fetchOpts); + if (fmp && fmp instanceof Promise) { + fmp.then(v => res(v === undefined ? undefined : v), rej); + } + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if (!options.ignoreFetchAbort || + options.allowStaleOnFetchAbort) { + res(undefined); + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true); + } + } + }); + }; + if (options.status) + options.status.fetchDispatched = true; + const p = new Promise(pcall).then(cb, eb); + const bf = Object.assign(p, { + __abortController: ac, + __staleWhileFetching: v, + __returned: undefined, + }); + if (index === undefined) { + // internal, don't expose status. + this.set(k, bf, { ...fetchOpts.options, status: undefined }); + index = this.#keyMap.get(k); + } + else { + this.#valList[index] = bf; + } + return bf; + } + #isBackgroundFetch(p) { + if (!this.#hasFetchMethod) + return false; + const b = p; + return (!!b && + b instanceof Promise && + b.hasOwnProperty('__staleWhileFetching') && + b.__abortController instanceof AC); + } + async fetch(k, fetchOptions = {}) { + const { + // get options + allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; + if (!this.#hasFetchMethod) { + if (status) + status.fetch = 'get'; + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }); + } + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + }; + let index = this.#keyMap.get(k); + if (index === undefined) { + if (status) + status.fetch = 'miss'; + const p = this.#backgroundFetch(k, index, options, context); + return (p.__returned = p); + } + else { + // in cache, maybe already fetching + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + const stale = allowStale && v.__staleWhileFetching !== undefined; + if (status) { + status.fetch = 'inflight'; + if (stale) + status.returnedStale = true; + } + return stale ? v.__staleWhileFetching : (v.__returned = v); + } + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.#isStale(index); + if (!forceRefresh && !isStale) { + if (status) + status.fetch = 'hit'; + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + if (status) + this.#statusTTL(status, index); + return v; + } + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.#backgroundFetch(k, index, options, context); + const hasStale = p.__staleWhileFetching !== undefined; + const staleVal = hasStale && allowStale; + if (status) { + status.fetch = isStale ? 'stale' : 'refresh'; + if (staleVal && isStale) + status.returnedStale = true; + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p); + } + } + /** + * Return a value from the cache. Will update the recency of the cache + * entry found. + * + * If the key is not found, get() will return `undefined`. + */ + get(k, getOptions = {}) { + const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const value = this.#valList[index]; + const fetching = this.#isBackgroundFetch(value); + if (status) + this.#statusTTL(status, index); + if (this.#isStale(index)) { + if (status) + status.get = 'stale'; + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k); + } + if (status && allowStale) + status.returnedStale = true; + return allowStale ? value : undefined; + } + else { + if (status && + allowStale && + value.__staleWhileFetching !== undefined) { + status.returnedStale = true; + } + return allowStale ? value.__staleWhileFetching : undefined; + } + } + else { + if (status) + status.get = 'hit'; + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching; + } + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + return value; + } + } + else if (status) { + status.get = 'miss'; + } + } + #connect(p, n) { + this.#prev[n] = p; + this.#next[p] = n; + } + #moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.#tail) { + if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#connect(this.#prev[index], this.#next[index]); + } + this.#connect(this.#tail, index); + this.#tail = index; + } + } + /** + * Deletes a key out of the cache. + * Returns true if the key was deleted, false otherwise. + */ + delete(k) { + let deleted = false; + if (this.#size !== 0) { + const index = this.#keyMap.get(k); + if (index !== undefined) { + deleted = true; + if (this.#size === 1) { + this.clear(); + } + else { + this.#removeItemSize(index); + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + this.#keyMap.delete(k); + this.#keyList[index] = undefined; + this.#valList[index] = undefined; + if (index === this.#tail) { + this.#tail = this.#prev[index]; + } + else if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#next[this.#prev[index]] = this.#next[index]; + this.#prev[this.#next[index]] = this.#prev[index]; + } + this.#size--; + this.#free.push(index); + } + } + } + if (this.#hasDisposeAfter && this.#disposed?.length) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return deleted; + } + /** + * Clear the cache entirely, throwing away all values. + */ + clear() { + for (const index of this.#rindexes({ allowStale: true })) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else { + const k = this.#keyList[index]; + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + } + this.#keyMap.clear(); + this.#valList.fill(undefined); + this.#keyList.fill(undefined); + if (this.#ttls && this.#starts) { + this.#ttls.fill(0); + this.#starts.fill(0); + } + if (this.#sizes) { + this.#sizes.fill(0); + } + this.#head = 0; + this.#tail = 0; + this.#free.length = 0; + this.#calculatedSize = 0; + this.#size = 0; + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.min.js b/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.min.js new file mode 100644 index 0000000000000..5a16b3940d6df --- /dev/null +++ b/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.min.js @@ -0,0 +1,2 @@ +var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var R=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=C.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,R))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#W=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#R(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#W=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#R(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#W(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#W(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#R(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#R(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};export{R as LRUCache}; +//# sourceMappingURL=index.min.js.map diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/package.json b/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/package.json new file mode 100644 index 0000000000000..3dbc1ca591c05 --- /dev/null +++ b/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/package.json b/node_modules/@npmcli/git/node_modules/lru-cache/package.json new file mode 100644 index 0000000000000..bae4a04839d1f --- /dev/null +++ b/node_modules/@npmcli/git/node_modules/lru-cache/package.json @@ -0,0 +1,108 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "10.0.1", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "sideEffects": false, + "scripts": { + "build": "npm run prepare", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", + "postprepare": "bash fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write .", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts", + "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh", + "prebenchmark": "npm run prepare", + "benchmark": "make -C benchmark", + "preprofile": "npm run prepare", + "profile": "make -C benchmark profile" + }, + "main": "./dist/cjs/index.js", + "module": "./dist/mjs/index.js", + "exports": { + "./min": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.min.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.min.js" + } + }, + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + } + }, + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "@size-limit/preset-small-lib": "^7.0.8", + "@types/node": "^20.2.5", + "@types/tap": "^15.0.6", + "benchmark": "^2.1.4", + "c8": "^7.11.2", + "clock-mock": "^1.0.6", + "esbuild": "^0.17.11", + "eslint-config-prettier": "^8.5.0", + "marked": "^4.2.12", + "mkdirp": "^2.1.5", + "prettier": "^2.6.2", + "size-limit": "^7.0.8", + "tap": "^16.3.4", + "ts-node": "^10.9.1", + "tslib": "^2.4.0", + "typedoc": "^0.24.6", + "typescript": "^5.0.4" + }, + "license": "ISC", + "files": [ + "dist" + ], + "engines": { + "node": "14 || >=16.14" + }, + "prettier": { + "semi": false, + "printWidth": 70, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "tap": { + "coverage": false, + "node-arg": [ + "--expose-gc", + "-r", + "ts-node/register" + ], + "ts": false + }, + "size-limit": [ + { + "path": "./dist/mjs/index.js" + } + ] +} diff --git a/node_modules/@npmcli/git/package.json b/node_modules/@npmcli/git/package.json index eeba1c0415788..a7e7f09d12c47 100644 --- a/node_modules/@npmcli/git/package.json +++ b/node_modules/@npmcli/git/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/git", - "version": "4.1.0", + "version": "5.0.0", "main": "lib/index.js", "files": [ "bin/", @@ -31,14 +31,14 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.15.1", + "@npmcli/template-oss": "4.18.0", "npm-package-arg": "^10.0.0", "slash": "^3.0.0", "tap": "^16.0.1" }, "dependencies": { "@npmcli/promise-spawn": "^6.0.0", - "lru-cache": "^7.4.4", + "lru-cache": "^10.0.1", "npm-pick-manifest": "^8.0.0", "proc-log": "^3.0.0", "promise-inflight": "^1.0.1", @@ -47,11 +47,17 @@ "which": "^3.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.15.1", - "publish": true + "version": "4.18.0", + "publish": true, + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] } } diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/LICENSE b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/LICENSE new file mode 100644 index 0000000000000..8f90f96f4c6c5 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, +OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS +ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/clone.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/clone.js new file mode 100644 index 0000000000000..e25a4d1426821 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/clone.js @@ -0,0 +1,172 @@ +// The goal here is to minimize both git workload and +// the number of refs we download over the network. +// +// Every method ends up with the checked out working dir +// at the specified ref, and resolves with the git sha. + +// Only certain whitelisted hosts get shallow cloning. +// Many hosts (including GHE) don't always support it. +// A failed shallow fetch takes a LOT longer than a full +// fetch in most cases, so we skip it entirely. +// Set opts.gitShallow = true/false to force this behavior +// one way or the other. +const shallowHosts = new Set([ + 'github.com', + 'gist.github.com', + 'gitlab.com', + 'bitbucket.com', + 'bitbucket.org', +]) +// we have to use url.parse until we add the same shim that hosted-git-info has +// to handle scp:// urls +const { parse } = require('url') // eslint-disable-line node/no-deprecated-api +const path = require('path') + +const getRevs = require('./revs.js') +const spawn = require('./spawn.js') +const { isWindows } = require('./utils.js') + +const pickManifest = require('npm-pick-manifest') +const fs = require('fs/promises') + +module.exports = (repo, ref = 'HEAD', target = null, opts = {}) => + getRevs(repo, opts).then(revs => clone( + repo, + revs, + ref, + resolveRef(revs, ref, opts), + target || defaultTarget(repo, opts.cwd), + opts + )) + +const maybeShallow = (repo, opts) => { + if (opts.gitShallow === false || opts.gitShallow) { + return opts.gitShallow + } + return shallowHosts.has(parse(repo).host) +} + +const defaultTarget = (repo, /* istanbul ignore next */ cwd = process.cwd()) => + path.resolve(cwd, path.basename(repo.replace(/[/\\]?\.git$/, ''))) + +const clone = (repo, revs, ref, revDoc, target, opts) => { + if (!revDoc) { + return unresolved(repo, ref, target, opts) + } + if (revDoc.sha === revs.refs.HEAD.sha) { + return plain(repo, revDoc, target, opts) + } + if (revDoc.type === 'tag' || revDoc.type === 'branch') { + return branch(repo, revDoc, target, opts) + } + return other(repo, revDoc, target, opts) +} + +const resolveRef = (revs, ref, opts) => { + const { spec = {} } = opts + ref = spec.gitCommittish || ref + /* istanbul ignore next - will fail anyway, can't pull */ + if (!revs) { + return null + } + if (spec.gitRange) { + return pickManifest(revs, spec.gitRange, opts) + } + if (!ref) { + return revs.refs.HEAD + } + if (revs.refs[ref]) { + return revs.refs[ref] + } + if (revs.shas[ref]) { + return revs.refs[revs.shas[ref][0]] + } + return null +} + +// pull request or some other kind of advertised ref +const other = (repo, revDoc, target, opts) => { + const shallow = maybeShallow(repo, opts) + + const fetchOrigin = ['fetch', 'origin', revDoc.rawRef] + .concat(shallow ? ['--depth=1'] : []) + + const git = (args) => spawn(args, { ...opts, cwd: target }) + return fs.mkdir(target, { recursive: true }) + .then(() => git(['init'])) + .then(() => isWindows(opts) + ? git(['config', '--local', '--add', 'core.longpaths', 'true']) + : null) + .then(() => git(['remote', 'add', 'origin', repo])) + .then(() => git(fetchOrigin)) + .then(() => git(['checkout', revDoc.sha])) + .then(() => updateSubmodules(target, opts)) + .then(() => revDoc.sha) +} + +// tag or branches. use -b +const branch = (repo, revDoc, target, opts) => { + const args = [ + 'clone', + '-b', + revDoc.ref, + repo, + target, + '--recurse-submodules', + ] + if (maybeShallow(repo, opts)) { + args.push('--depth=1') + } + if (isWindows(opts)) { + args.push('--config', 'core.longpaths=true') + } + return spawn(args, opts).then(() => revDoc.sha) +} + +// just the head. clone it +const plain = (repo, revDoc, target, opts) => { + const args = [ + 'clone', + repo, + target, + '--recurse-submodules', + ] + if (maybeShallow(repo, opts)) { + args.push('--depth=1') + } + if (isWindows(opts)) { + args.push('--config', 'core.longpaths=true') + } + return spawn(args, opts).then(() => revDoc.sha) +} + +const updateSubmodules = async (target, opts) => { + const hasSubmodules = await fs.stat(`${target}/.gitmodules`) + .then(() => true) + .catch(() => false) + if (!hasSubmodules) { + return null + } + return spawn([ + 'submodule', + 'update', + '-q', + '--init', + '--recursive', + ], { ...opts, cwd: target }) +} + +const unresolved = (repo, ref, target, opts) => { + // can't do this one shallowly, because the ref isn't advertised + // but we can avoid checking out the working dir twice, at least + const lp = isWindows(opts) ? ['--config', 'core.longpaths=true'] : [] + const cloneArgs = ['clone', '--mirror', '-q', repo, target + '/.git'] + const git = (args) => spawn(args, { ...opts, cwd: target }) + return fs.mkdir(target, { recursive: true }) + .then(() => git(cloneArgs.concat(lp))) + .then(() => git(['init'])) + .then(() => git(['checkout', ref])) + .then(() => updateSubmodules(target, opts)) + .then(() => git(['rev-parse', '--revs-only', 'HEAD'])) + .then(({ stdout }) => stdout.trim()) +} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/errors.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/errors.js new file mode 100644 index 0000000000000..7aeac4762866f --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/errors.js @@ -0,0 +1,36 @@ + +const maxRetry = 3 + +class GitError extends Error { + shouldRetry () { + return false + } +} + +class GitConnectionError extends GitError { + constructor (message) { + super('A git connection error occurred') + } + + shouldRetry (number) { + return number < maxRetry + } +} + +class GitPathspecError extends GitError { + constructor (message) { + super('The git reference could not be found') + } +} + +class GitUnknownError extends GitError { + constructor (message) { + super('An unknown git error occurred') + } +} + +module.exports = { + GitConnectionError, + GitPathspecError, + GitUnknownError, +} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/find.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/find.js new file mode 100644 index 0000000000000..34bd310b88e5d --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/find.js @@ -0,0 +1,15 @@ +const is = require('./is.js') +const { dirname } = require('path') + +module.exports = async ({ cwd = process.cwd(), root } = {}) => { + while (true) { + if (await is({ cwd })) { + return cwd + } + const next = dirname(cwd) + if (cwd === root || cwd === next) { + return null + } + cwd = next + } +} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/index.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/index.js new file mode 100644 index 0000000000000..10a65f782e6da --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/index.js @@ -0,0 +1,9 @@ +module.exports = { + clone: require('./clone.js'), + revs: require('./revs.js'), + spawn: require('./spawn.js'), + is: require('./is.js'), + find: require('./find.js'), + isClean: require('./is-clean.js'), + errors: require('./errors.js'), +} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is-clean.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is-clean.js new file mode 100644 index 0000000000000..182373be94193 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is-clean.js @@ -0,0 +1,6 @@ +const spawn = require('./spawn.js') + +module.exports = (opts = {}) => + spawn(['status', '--porcelain=v1', '-uno'], opts) + .then(res => !res.stdout.trim().split(/\r?\n+/) + .map(l => l.trim()).filter(l => l).length) diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is.js new file mode 100644 index 0000000000000..e2542f2157727 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is.js @@ -0,0 +1,6 @@ +// not an airtight indicator, but a good gut-check to even bother trying +const { promisify } = require('util') +const fs = require('fs') +const stat = promisify(fs.stat) +module.exports = ({ cwd = process.cwd() } = {}) => + stat(cwd + '/.git').then(() => true, () => false) diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/lines-to-revs.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/lines-to-revs.js new file mode 100644 index 0000000000000..6bd7e7a4c1531 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/lines-to-revs.js @@ -0,0 +1,147 @@ +// turn an array of lines from `git ls-remote` into a thing +// vaguely resembling a packument, where docs are a resolved ref + +const semver = require('semver') + +module.exports = lines => finish(lines.reduce(linesToRevsReducer, { + versions: {}, + 'dist-tags': {}, + refs: {}, + shas: {}, +})) + +const finish = revs => distTags(shaList(peelTags(revs))) + +// We can check out shallow clones on specific SHAs if we have a ref +const shaList = revs => { + Object.keys(revs.refs).forEach(ref => { + const doc = revs.refs[ref] + if (!revs.shas[doc.sha]) { + revs.shas[doc.sha] = [ref] + } else { + revs.shas[doc.sha].push(ref) + } + }) + return revs +} + +// Replace any tags with their ^{} counterparts, if those exist +const peelTags = revs => { + Object.keys(revs.refs).filter(ref => ref.endsWith('^{}')).forEach(ref => { + const peeled = revs.refs[ref] + const unpeeled = revs.refs[ref.replace(/\^\{\}$/, '')] + if (unpeeled) { + unpeeled.sha = peeled.sha + delete revs.refs[ref] + } + }) + return revs +} + +const distTags = revs => { + // not entirely sure what situations would result in an + // ichabod repo, but best to be careful in Sleepy Hollow anyway + const HEAD = revs.refs.HEAD || /* istanbul ignore next */ {} + const versions = Object.keys(revs.versions) + versions.forEach(v => { + // simulate a dist-tags with latest pointing at the + // 'latest' branch if one exists and is a version, + // or HEAD if not. + const ver = revs.versions[v] + if (revs.refs.latest && ver.sha === revs.refs.latest.sha) { + revs['dist-tags'].latest = v + } else if (ver.sha === HEAD.sha) { + revs['dist-tags'].HEAD = v + if (!revs.refs.latest) { + revs['dist-tags'].latest = v + } + } + }) + return revs +} + +const refType = ref => { + if (ref.startsWith('refs/tags/')) { + return 'tag' + } + if (ref.startsWith('refs/heads/')) { + return 'branch' + } + if (ref.startsWith('refs/pull/')) { + return 'pull' + } + if (ref === 'HEAD') { + return 'head' + } + // Could be anything, ignore for now + /* istanbul ignore next */ + return 'other' +} + +// return the doc, or null if we should ignore it. +const lineToRevDoc = line => { + const split = line.trim().split(/\s+/, 2) + if (split.length < 2) { + return null + } + + const sha = split[0].trim() + const rawRef = split[1].trim() + const type = refType(rawRef) + + if (type === 'tag') { + // refs/tags/foo^{} is the 'peeled tag', ie the commit + // that is tagged by refs/tags/foo they resolve to the same + // content, just different objects in git's data structure. + // But, we care about the thing the tag POINTS to, not the tag + // object itself, so we only look at the peeled tag refs, and + // ignore the pointer. + // For now, though, we have to save both, because some tags + // don't have peels, if they were not annotated. + const ref = rawRef.slice('refs/tags/'.length) + return { sha, ref, rawRef, type } + } + + if (type === 'branch') { + const ref = rawRef.slice('refs/heads/'.length) + return { sha, ref, rawRef, type } + } + + if (type === 'pull') { + // NB: merged pull requests installable with #pull/123/merge + // for the merged pr, or #pull/123 for the PR head + const ref = rawRef.slice('refs/'.length).replace(/\/head$/, '') + return { sha, ref, rawRef, type } + } + + if (type === 'head') { + const ref = 'HEAD' + return { sha, ref, rawRef, type } + } + + // at this point, all we can do is leave the ref un-munged + return { sha, ref: rawRef, rawRef, type } +} + +const linesToRevsReducer = (revs, line) => { + const doc = lineToRevDoc(line) + + if (!doc) { + return revs + } + + revs.refs[doc.ref] = doc + revs.refs[doc.rawRef] = doc + + if (doc.type === 'tag') { + // try to pull a semver value out of tags like `release-v1.2.3` + // which is a pretty common pattern. + const match = !doc.ref.endsWith('^{}') && + doc.ref.match(/v?(\d+\.\d+\.\d+(?:[-+].+)?)$/) + if (match && semver.valid(match[1], true)) { + revs.versions[semver.clean(match[1], true)] = doc + } + } + + return revs +} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/make-error.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/make-error.js new file mode 100644 index 0000000000000..7540ec7c8b9f7 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/make-error.js @@ -0,0 +1,33 @@ +const { + GitConnectionError, + GitPathspecError, + GitUnknownError, +} = require('./errors.js') + +const connectionErrorRe = new RegExp([ + 'remote error: Internal Server Error', + 'The remote end hung up unexpectedly', + 'Connection timed out', + 'Operation timed out', + 'Failed to connect to .* Timed out', + 'Connection reset by peer', + 'SSL_ERROR_SYSCALL', + 'The requested URL returned error: 503', +].join('|')) + +const missingPathspecRe = /pathspec .* did not match any file\(s\) known to git/ + +function makeError (er) { + const message = er.stderr + let gitEr + if (connectionErrorRe.test(message)) { + gitEr = new GitConnectionError(message) + } else if (missingPathspecRe.test(message)) { + gitEr = new GitPathspecError(message) + } else { + gitEr = new GitUnknownError(message) + } + return Object.assign(gitEr, er) +} + +module.exports = makeError diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/opts.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/opts.js new file mode 100644 index 0000000000000..3119af16e0cf1 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/opts.js @@ -0,0 +1,12 @@ +// Values we want to set if they're not already defined by the end user +// This defaults to accepting new ssh host key fingerprints +const gitEnv = { + GIT_ASKPASS: 'echo', + GIT_SSH_COMMAND: 'ssh -oStrictHostKeyChecking=accept-new', +} +module.exports = (opts = {}) => ({ + stdioString: true, + ...opts, + shell: false, + env: opts.env || { ...gitEnv, ...process.env }, +}) diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/revs.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/revs.js new file mode 100644 index 0000000000000..ee72370d5b7ec --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/revs.js @@ -0,0 +1,28 @@ +const pinflight = require('promise-inflight') +const spawn = require('./spawn.js') +const LRU = require('lru-cache') + +const revsCache = new LRU({ + max: 100, + ttl: 5 * 60 * 1000, +}) + +const linesToRevs = require('./lines-to-revs.js') + +module.exports = async (repo, opts = {}) => { + if (!opts.noGitRevCache) { + const cached = revsCache.get(repo) + if (cached) { + return cached + } + } + + return pinflight(`ls-remote:${repo}`, () => + spawn(['ls-remote', repo], opts) + .then(({ stdout }) => linesToRevs(stdout.trim().split('\n'))) + .then(revs => { + revsCache.set(repo, revs) + return revs + }) + ) +} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/spawn.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/spawn.js new file mode 100644 index 0000000000000..7098d7b872942 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/spawn.js @@ -0,0 +1,44 @@ +const spawn = require('@npmcli/promise-spawn') +const promiseRetry = require('promise-retry') +const log = require('proc-log') +const makeError = require('./make-error.js') +const whichGit = require('./which.js') +const makeOpts = require('./opts.js') + +module.exports = (gitArgs, opts = {}) => { + const gitPath = whichGit(opts) + + if (gitPath instanceof Error) { + return Promise.reject(gitPath) + } + + // undocumented option, mostly only here for tests + const args = opts.allowReplace || gitArgs[0] === '--no-replace-objects' + ? gitArgs + : ['--no-replace-objects', ...gitArgs] + + let retryOpts = opts.retry + if (retryOpts === null || retryOpts === undefined) { + retryOpts = { + retries: opts.fetchRetries || 2, + factor: opts.fetchRetryFactor || 10, + maxTimeout: opts.fetchRetryMaxtimeout || 60000, + minTimeout: opts.fetchRetryMintimeout || 1000, + } + } + return promiseRetry((retryFn, number) => { + if (number !== 1) { + log.silly('git', `Retrying git command: ${ + args.join(' ')} attempt # ${number}`) + } + + return spawn(gitPath, args, makeOpts(opts)) + .catch(er => { + const gitError = makeError(er) + if (!gitError.shouldRetry(number)) { + throw gitError + } + retryFn(gitError) + }) + }, retryOpts) +} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/utils.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/utils.js new file mode 100644 index 0000000000000..fcd9578a19597 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/utils.js @@ -0,0 +1,3 @@ +const isWindows = opts => (opts.fakePlatform || process.platform) === 'win32' + +exports.isWindows = isWindows diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/which.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/which.js new file mode 100644 index 0000000000000..dc2a1ad212166 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/which.js @@ -0,0 +1,18 @@ +const which = require('which') + +let gitPath +try { + gitPath = which.sync('git') +} catch { + // ignore errors +} + +module.exports = (opts = {}) => { + if (opts.git) { + return opts.git + } + if (!gitPath || opts.git === false) { + return Object.assign(new Error('No git binary found in $PATH'), { code: 'ENOGIT' }) + } + return gitPath +} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/package.json b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/package.json new file mode 100644 index 0000000000000..eeba1c0415788 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/package.json @@ -0,0 +1,57 @@ +{ + "name": "@npmcli/git", + "version": "4.1.0", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "description": "a util for spawning git from npm CLI contexts", + "repository": { + "type": "git", + "url": "https://github.com/npm/git.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "scripts": { + "lint": "eslint \"**/*.js\"", + "snap": "tap", + "test": "tap", + "posttest": "npm run lint", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "template-oss-apply": "template-oss-apply --force" + }, + "tap": { + "timeout": 600, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.15.1", + "npm-package-arg": "^10.0.0", + "slash": "^3.0.0", + "tap": "^16.0.1" + }, + "dependencies": { + "@npmcli/promise-spawn": "^6.0.0", + "lru-cache": "^7.4.4", + "npm-pick-manifest": "^8.0.0", + "proc-log": "^3.0.0", + "promise-inflight": "^1.0.1", + "promise-retry": "^2.0.1", + "semver": "^7.3.5", + "which": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.15.1", + "publish": true + } +} diff --git a/node_modules/pacote/node_modules/@npmcli/git/LICENSE b/node_modules/pacote/node_modules/@npmcli/git/LICENSE new file mode 100644 index 0000000000000..8f90f96f4c6c5 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, +OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS +ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/clone.js b/node_modules/pacote/node_modules/@npmcli/git/lib/clone.js new file mode 100644 index 0000000000000..e25a4d1426821 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/clone.js @@ -0,0 +1,172 @@ +// The goal here is to minimize both git workload and +// the number of refs we download over the network. +// +// Every method ends up with the checked out working dir +// at the specified ref, and resolves with the git sha. + +// Only certain whitelisted hosts get shallow cloning. +// Many hosts (including GHE) don't always support it. +// A failed shallow fetch takes a LOT longer than a full +// fetch in most cases, so we skip it entirely. +// Set opts.gitShallow = true/false to force this behavior +// one way or the other. +const shallowHosts = new Set([ + 'github.com', + 'gist.github.com', + 'gitlab.com', + 'bitbucket.com', + 'bitbucket.org', +]) +// we have to use url.parse until we add the same shim that hosted-git-info has +// to handle scp:// urls +const { parse } = require('url') // eslint-disable-line node/no-deprecated-api +const path = require('path') + +const getRevs = require('./revs.js') +const spawn = require('./spawn.js') +const { isWindows } = require('./utils.js') + +const pickManifest = require('npm-pick-manifest') +const fs = require('fs/promises') + +module.exports = (repo, ref = 'HEAD', target = null, opts = {}) => + getRevs(repo, opts).then(revs => clone( + repo, + revs, + ref, + resolveRef(revs, ref, opts), + target || defaultTarget(repo, opts.cwd), + opts + )) + +const maybeShallow = (repo, opts) => { + if (opts.gitShallow === false || opts.gitShallow) { + return opts.gitShallow + } + return shallowHosts.has(parse(repo).host) +} + +const defaultTarget = (repo, /* istanbul ignore next */ cwd = process.cwd()) => + path.resolve(cwd, path.basename(repo.replace(/[/\\]?\.git$/, ''))) + +const clone = (repo, revs, ref, revDoc, target, opts) => { + if (!revDoc) { + return unresolved(repo, ref, target, opts) + } + if (revDoc.sha === revs.refs.HEAD.sha) { + return plain(repo, revDoc, target, opts) + } + if (revDoc.type === 'tag' || revDoc.type === 'branch') { + return branch(repo, revDoc, target, opts) + } + return other(repo, revDoc, target, opts) +} + +const resolveRef = (revs, ref, opts) => { + const { spec = {} } = opts + ref = spec.gitCommittish || ref + /* istanbul ignore next - will fail anyway, can't pull */ + if (!revs) { + return null + } + if (spec.gitRange) { + return pickManifest(revs, spec.gitRange, opts) + } + if (!ref) { + return revs.refs.HEAD + } + if (revs.refs[ref]) { + return revs.refs[ref] + } + if (revs.shas[ref]) { + return revs.refs[revs.shas[ref][0]] + } + return null +} + +// pull request or some other kind of advertised ref +const other = (repo, revDoc, target, opts) => { + const shallow = maybeShallow(repo, opts) + + const fetchOrigin = ['fetch', 'origin', revDoc.rawRef] + .concat(shallow ? ['--depth=1'] : []) + + const git = (args) => spawn(args, { ...opts, cwd: target }) + return fs.mkdir(target, { recursive: true }) + .then(() => git(['init'])) + .then(() => isWindows(opts) + ? git(['config', '--local', '--add', 'core.longpaths', 'true']) + : null) + .then(() => git(['remote', 'add', 'origin', repo])) + .then(() => git(fetchOrigin)) + .then(() => git(['checkout', revDoc.sha])) + .then(() => updateSubmodules(target, opts)) + .then(() => revDoc.sha) +} + +// tag or branches. use -b +const branch = (repo, revDoc, target, opts) => { + const args = [ + 'clone', + '-b', + revDoc.ref, + repo, + target, + '--recurse-submodules', + ] + if (maybeShallow(repo, opts)) { + args.push('--depth=1') + } + if (isWindows(opts)) { + args.push('--config', 'core.longpaths=true') + } + return spawn(args, opts).then(() => revDoc.sha) +} + +// just the head. clone it +const plain = (repo, revDoc, target, opts) => { + const args = [ + 'clone', + repo, + target, + '--recurse-submodules', + ] + if (maybeShallow(repo, opts)) { + args.push('--depth=1') + } + if (isWindows(opts)) { + args.push('--config', 'core.longpaths=true') + } + return spawn(args, opts).then(() => revDoc.sha) +} + +const updateSubmodules = async (target, opts) => { + const hasSubmodules = await fs.stat(`${target}/.gitmodules`) + .then(() => true) + .catch(() => false) + if (!hasSubmodules) { + return null + } + return spawn([ + 'submodule', + 'update', + '-q', + '--init', + '--recursive', + ], { ...opts, cwd: target }) +} + +const unresolved = (repo, ref, target, opts) => { + // can't do this one shallowly, because the ref isn't advertised + // but we can avoid checking out the working dir twice, at least + const lp = isWindows(opts) ? ['--config', 'core.longpaths=true'] : [] + const cloneArgs = ['clone', '--mirror', '-q', repo, target + '/.git'] + const git = (args) => spawn(args, { ...opts, cwd: target }) + return fs.mkdir(target, { recursive: true }) + .then(() => git(cloneArgs.concat(lp))) + .then(() => git(['init'])) + .then(() => git(['checkout', ref])) + .then(() => updateSubmodules(target, opts)) + .then(() => git(['rev-parse', '--revs-only', 'HEAD'])) + .then(({ stdout }) => stdout.trim()) +} diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/errors.js b/node_modules/pacote/node_modules/@npmcli/git/lib/errors.js new file mode 100644 index 0000000000000..7aeac4762866f --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/errors.js @@ -0,0 +1,36 @@ + +const maxRetry = 3 + +class GitError extends Error { + shouldRetry () { + return false + } +} + +class GitConnectionError extends GitError { + constructor (message) { + super('A git connection error occurred') + } + + shouldRetry (number) { + return number < maxRetry + } +} + +class GitPathspecError extends GitError { + constructor (message) { + super('The git reference could not be found') + } +} + +class GitUnknownError extends GitError { + constructor (message) { + super('An unknown git error occurred') + } +} + +module.exports = { + GitConnectionError, + GitPathspecError, + GitUnknownError, +} diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/find.js b/node_modules/pacote/node_modules/@npmcli/git/lib/find.js new file mode 100644 index 0000000000000..34bd310b88e5d --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/find.js @@ -0,0 +1,15 @@ +const is = require('./is.js') +const { dirname } = require('path') + +module.exports = async ({ cwd = process.cwd(), root } = {}) => { + while (true) { + if (await is({ cwd })) { + return cwd + } + const next = dirname(cwd) + if (cwd === root || cwd === next) { + return null + } + cwd = next + } +} diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/index.js b/node_modules/pacote/node_modules/@npmcli/git/lib/index.js new file mode 100644 index 0000000000000..10a65f782e6da --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/index.js @@ -0,0 +1,9 @@ +module.exports = { + clone: require('./clone.js'), + revs: require('./revs.js'), + spawn: require('./spawn.js'), + is: require('./is.js'), + find: require('./find.js'), + isClean: require('./is-clean.js'), + errors: require('./errors.js'), +} diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js b/node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js new file mode 100644 index 0000000000000..182373be94193 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js @@ -0,0 +1,6 @@ +const spawn = require('./spawn.js') + +module.exports = (opts = {}) => + spawn(['status', '--porcelain=v1', '-uno'], opts) + .then(res => !res.stdout.trim().split(/\r?\n+/) + .map(l => l.trim()).filter(l => l).length) diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/is.js b/node_modules/pacote/node_modules/@npmcli/git/lib/is.js new file mode 100644 index 0000000000000..e2542f2157727 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/is.js @@ -0,0 +1,6 @@ +// not an airtight indicator, but a good gut-check to even bother trying +const { promisify } = require('util') +const fs = require('fs') +const stat = promisify(fs.stat) +module.exports = ({ cwd = process.cwd() } = {}) => + stat(cwd + '/.git').then(() => true, () => false) diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js b/node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js new file mode 100644 index 0000000000000..6bd7e7a4c1531 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js @@ -0,0 +1,147 @@ +// turn an array of lines from `git ls-remote` into a thing +// vaguely resembling a packument, where docs are a resolved ref + +const semver = require('semver') + +module.exports = lines => finish(lines.reduce(linesToRevsReducer, { + versions: {}, + 'dist-tags': {}, + refs: {}, + shas: {}, +})) + +const finish = revs => distTags(shaList(peelTags(revs))) + +// We can check out shallow clones on specific SHAs if we have a ref +const shaList = revs => { + Object.keys(revs.refs).forEach(ref => { + const doc = revs.refs[ref] + if (!revs.shas[doc.sha]) { + revs.shas[doc.sha] = [ref] + } else { + revs.shas[doc.sha].push(ref) + } + }) + return revs +} + +// Replace any tags with their ^{} counterparts, if those exist +const peelTags = revs => { + Object.keys(revs.refs).filter(ref => ref.endsWith('^{}')).forEach(ref => { + const peeled = revs.refs[ref] + const unpeeled = revs.refs[ref.replace(/\^\{\}$/, '')] + if (unpeeled) { + unpeeled.sha = peeled.sha + delete revs.refs[ref] + } + }) + return revs +} + +const distTags = revs => { + // not entirely sure what situations would result in an + // ichabod repo, but best to be careful in Sleepy Hollow anyway + const HEAD = revs.refs.HEAD || /* istanbul ignore next */ {} + const versions = Object.keys(revs.versions) + versions.forEach(v => { + // simulate a dist-tags with latest pointing at the + // 'latest' branch if one exists and is a version, + // or HEAD if not. + const ver = revs.versions[v] + if (revs.refs.latest && ver.sha === revs.refs.latest.sha) { + revs['dist-tags'].latest = v + } else if (ver.sha === HEAD.sha) { + revs['dist-tags'].HEAD = v + if (!revs.refs.latest) { + revs['dist-tags'].latest = v + } + } + }) + return revs +} + +const refType = ref => { + if (ref.startsWith('refs/tags/')) { + return 'tag' + } + if (ref.startsWith('refs/heads/')) { + return 'branch' + } + if (ref.startsWith('refs/pull/')) { + return 'pull' + } + if (ref === 'HEAD') { + return 'head' + } + // Could be anything, ignore for now + /* istanbul ignore next */ + return 'other' +} + +// return the doc, or null if we should ignore it. +const lineToRevDoc = line => { + const split = line.trim().split(/\s+/, 2) + if (split.length < 2) { + return null + } + + const sha = split[0].trim() + const rawRef = split[1].trim() + const type = refType(rawRef) + + if (type === 'tag') { + // refs/tags/foo^{} is the 'peeled tag', ie the commit + // that is tagged by refs/tags/foo they resolve to the same + // content, just different objects in git's data structure. + // But, we care about the thing the tag POINTS to, not the tag + // object itself, so we only look at the peeled tag refs, and + // ignore the pointer. + // For now, though, we have to save both, because some tags + // don't have peels, if they were not annotated. + const ref = rawRef.slice('refs/tags/'.length) + return { sha, ref, rawRef, type } + } + + if (type === 'branch') { + const ref = rawRef.slice('refs/heads/'.length) + return { sha, ref, rawRef, type } + } + + if (type === 'pull') { + // NB: merged pull requests installable with #pull/123/merge + // for the merged pr, or #pull/123 for the PR head + const ref = rawRef.slice('refs/'.length).replace(/\/head$/, '') + return { sha, ref, rawRef, type } + } + + if (type === 'head') { + const ref = 'HEAD' + return { sha, ref, rawRef, type } + } + + // at this point, all we can do is leave the ref un-munged + return { sha, ref: rawRef, rawRef, type } +} + +const linesToRevsReducer = (revs, line) => { + const doc = lineToRevDoc(line) + + if (!doc) { + return revs + } + + revs.refs[doc.ref] = doc + revs.refs[doc.rawRef] = doc + + if (doc.type === 'tag') { + // try to pull a semver value out of tags like `release-v1.2.3` + // which is a pretty common pattern. + const match = !doc.ref.endsWith('^{}') && + doc.ref.match(/v?(\d+\.\d+\.\d+(?:[-+].+)?)$/) + if (match && semver.valid(match[1], true)) { + revs.versions[semver.clean(match[1], true)] = doc + } + } + + return revs +} diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js b/node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js new file mode 100644 index 0000000000000..7540ec7c8b9f7 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js @@ -0,0 +1,33 @@ +const { + GitConnectionError, + GitPathspecError, + GitUnknownError, +} = require('./errors.js') + +const connectionErrorRe = new RegExp([ + 'remote error: Internal Server Error', + 'The remote end hung up unexpectedly', + 'Connection timed out', + 'Operation timed out', + 'Failed to connect to .* Timed out', + 'Connection reset by peer', + 'SSL_ERROR_SYSCALL', + 'The requested URL returned error: 503', +].join('|')) + +const missingPathspecRe = /pathspec .* did not match any file\(s\) known to git/ + +function makeError (er) { + const message = er.stderr + let gitEr + if (connectionErrorRe.test(message)) { + gitEr = new GitConnectionError(message) + } else if (missingPathspecRe.test(message)) { + gitEr = new GitPathspecError(message) + } else { + gitEr = new GitUnknownError(message) + } + return Object.assign(gitEr, er) +} + +module.exports = makeError diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/opts.js b/node_modules/pacote/node_modules/@npmcli/git/lib/opts.js new file mode 100644 index 0000000000000..3119af16e0cf1 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/opts.js @@ -0,0 +1,12 @@ +// Values we want to set if they're not already defined by the end user +// This defaults to accepting new ssh host key fingerprints +const gitEnv = { + GIT_ASKPASS: 'echo', + GIT_SSH_COMMAND: 'ssh -oStrictHostKeyChecking=accept-new', +} +module.exports = (opts = {}) => ({ + stdioString: true, + ...opts, + shell: false, + env: opts.env || { ...gitEnv, ...process.env }, +}) diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/revs.js b/node_modules/pacote/node_modules/@npmcli/git/lib/revs.js new file mode 100644 index 0000000000000..ee72370d5b7ec --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/revs.js @@ -0,0 +1,28 @@ +const pinflight = require('promise-inflight') +const spawn = require('./spawn.js') +const LRU = require('lru-cache') + +const revsCache = new LRU({ + max: 100, + ttl: 5 * 60 * 1000, +}) + +const linesToRevs = require('./lines-to-revs.js') + +module.exports = async (repo, opts = {}) => { + if (!opts.noGitRevCache) { + const cached = revsCache.get(repo) + if (cached) { + return cached + } + } + + return pinflight(`ls-remote:${repo}`, () => + spawn(['ls-remote', repo], opts) + .then(({ stdout }) => linesToRevs(stdout.trim().split('\n'))) + .then(revs => { + revsCache.set(repo, revs) + return revs + }) + ) +} diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js b/node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js new file mode 100644 index 0000000000000..7098d7b872942 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js @@ -0,0 +1,44 @@ +const spawn = require('@npmcli/promise-spawn') +const promiseRetry = require('promise-retry') +const log = require('proc-log') +const makeError = require('./make-error.js') +const whichGit = require('./which.js') +const makeOpts = require('./opts.js') + +module.exports = (gitArgs, opts = {}) => { + const gitPath = whichGit(opts) + + if (gitPath instanceof Error) { + return Promise.reject(gitPath) + } + + // undocumented option, mostly only here for tests + const args = opts.allowReplace || gitArgs[0] === '--no-replace-objects' + ? gitArgs + : ['--no-replace-objects', ...gitArgs] + + let retryOpts = opts.retry + if (retryOpts === null || retryOpts === undefined) { + retryOpts = { + retries: opts.fetchRetries || 2, + factor: opts.fetchRetryFactor || 10, + maxTimeout: opts.fetchRetryMaxtimeout || 60000, + minTimeout: opts.fetchRetryMintimeout || 1000, + } + } + return promiseRetry((retryFn, number) => { + if (number !== 1) { + log.silly('git', `Retrying git command: ${ + args.join(' ')} attempt # ${number}`) + } + + return spawn(gitPath, args, makeOpts(opts)) + .catch(er => { + const gitError = makeError(er) + if (!gitError.shouldRetry(number)) { + throw gitError + } + retryFn(gitError) + }) + }, retryOpts) +} diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/utils.js b/node_modules/pacote/node_modules/@npmcli/git/lib/utils.js new file mode 100644 index 0000000000000..fcd9578a19597 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/utils.js @@ -0,0 +1,3 @@ +const isWindows = opts => (opts.fakePlatform || process.platform) === 'win32' + +exports.isWindows = isWindows diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/which.js b/node_modules/pacote/node_modules/@npmcli/git/lib/which.js new file mode 100644 index 0000000000000..dc2a1ad212166 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/lib/which.js @@ -0,0 +1,18 @@ +const which = require('which') + +let gitPath +try { + gitPath = which.sync('git') +} catch { + // ignore errors +} + +module.exports = (opts = {}) => { + if (opts.git) { + return opts.git + } + if (!gitPath || opts.git === false) { + return Object.assign(new Error('No git binary found in $PATH'), { code: 'ENOGIT' }) + } + return gitPath +} diff --git a/node_modules/pacote/node_modules/@npmcli/git/package.json b/node_modules/pacote/node_modules/@npmcli/git/package.json new file mode 100644 index 0000000000000..eeba1c0415788 --- /dev/null +++ b/node_modules/pacote/node_modules/@npmcli/git/package.json @@ -0,0 +1,57 @@ +{ + "name": "@npmcli/git", + "version": "4.1.0", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "description": "a util for spawning git from npm CLI contexts", + "repository": { + "type": "git", + "url": "https://github.com/npm/git.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "scripts": { + "lint": "eslint \"**/*.js\"", + "snap": "tap", + "test": "tap", + "posttest": "npm run lint", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "template-oss-apply": "template-oss-apply --force" + }, + "tap": { + "timeout": 600, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.15.1", + "npm-package-arg": "^10.0.0", + "slash": "^3.0.0", + "tap": "^16.0.1" + }, + "dependencies": { + "@npmcli/promise-spawn": "^6.0.0", + "lru-cache": "^7.4.4", + "npm-pick-manifest": "^8.0.0", + "proc-log": "^3.0.0", + "promise-inflight": "^1.0.1", + "promise-retry": "^2.0.1", + "semver": "^7.3.5", + "which": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.15.1", + "publish": true + } +} diff --git a/package-lock.json b/package-lock.json index e0ef51b02bc00..9991f5b6b8abd 100644 --- a/package-lock.json +++ b/package-lock.json @@ -162,7 +162,7 @@ "devDependencies": { "@npmcli/docs": "^1.0.0", "@npmcli/eslint-config": "^4.0.2", - "@npmcli/git": "^4.1.0", + "@npmcli/git": "^5.0.0", "@npmcli/mock-globals": "^1.0.0", "@npmcli/mock-registry": "^1.0.0", "@npmcli/template-oss": "4.18.0", @@ -2410,13 +2410,12 @@ } }, "node_modules/@npmcli/git": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-4.1.0.tgz", - "integrity": "sha512-9hwoB3gStVfa0N31ymBmrX+GuDGdVA/QWShZVqE0HK2Af+7QGGrCTbZia/SW0ImUTjTne7SP91qxDmtXvDHRPQ==", - "inBundle": true, + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.0.tgz", + "integrity": "sha512-KzDPpE5oGu2x3ZHUMacrIPqmvgV48TBqNJzNQTszkOqNwtIjlLoZ+4Gxa268EgPE6UcEzunmZdyY9hLoNClXhQ==", "dependencies": { "@npmcli/promise-spawn": "^6.0.0", - "lru-cache": "^7.4.4", + "lru-cache": "^10.0.1", "npm-pick-manifest": "^8.0.0", "proc-log": "^3.0.0", "promise-inflight": "^1.0.1", @@ -2425,7 +2424,15 @@ "which": "^3.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/git/node_modules/lru-cache": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz", + "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==", + "engines": { + "node": "14 || >=16.14" } }, "node_modules/@npmcli/installed-package-contents": { @@ -2517,6 +2524,25 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/@npmcli/package-json/node_modules/@npmcli/git": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-4.1.0.tgz", + "integrity": "sha512-9hwoB3gStVfa0N31ymBmrX+GuDGdVA/QWShZVqE0HK2Af+7QGGrCTbZia/SW0ImUTjTne7SP91qxDmtXvDHRPQ==", + "inBundle": true, + "dependencies": { + "@npmcli/promise-spawn": "^6.0.0", + "lru-cache": "^7.4.4", + "npm-pick-manifest": "^8.0.0", + "proc-log": "^3.0.0", + "promise-inflight": "^1.0.1", + "promise-retry": "^2.0.1", + "semver": "^7.3.5", + "which": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/@npmcli/promise-spawn": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-6.0.2.tgz", @@ -2602,6 +2628,25 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/@npmcli/template-oss/node_modules/@npmcli/git": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-4.1.0.tgz", + "integrity": "sha512-9hwoB3gStVfa0N31ymBmrX+GuDGdVA/QWShZVqE0HK2Af+7QGGrCTbZia/SW0ImUTjTne7SP91qxDmtXvDHRPQ==", + "dev": true, + "dependencies": { + "@npmcli/promise-spawn": "^6.0.0", + "lru-cache": "^7.4.4", + "npm-pick-manifest": "^8.0.0", + "proc-log": "^3.0.0", + "promise-inflight": "^1.0.1", + "promise-retry": "^2.0.1", + "semver": "^7.3.5", + "which": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/@octokit/auth-token": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-3.0.3.tgz", @@ -10221,6 +10266,25 @@ "node": "^16.13.0 || >=18.0.0" } }, + "node_modules/pacote/node_modules/@npmcli/git": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-4.1.0.tgz", + "integrity": "sha512-9hwoB3gStVfa0N31ymBmrX+GuDGdVA/QWShZVqE0HK2Af+7QGGrCTbZia/SW0ImUTjTne7SP91qxDmtXvDHRPQ==", + "inBundle": true, + "dependencies": { + "@npmcli/promise-spawn": "^6.0.0", + "lru-cache": "^7.4.4", + "npm-pick-manifest": "^8.0.0", + "proc-log": "^3.0.0", + "promise-inflight": "^1.0.1", + "promise-retry": "^2.0.1", + "semver": "^7.3.5", + "which": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -15999,7 +16063,7 @@ "version": "4.0.2", "license": "ISC", "dependencies": { - "@npmcli/git": "^4.0.1", + "@npmcli/git": "^5.0.0", "@npmcli/run-script": "^6.0.0", "json-parse-even-better-errors": "^3.0.0", "proc-log": "^3.0.0", diff --git a/package.json b/package.json index 946a3c8a4cafc..8a88cb1cc8dbc 100644 --- a/package.json +++ b/package.json @@ -193,7 +193,7 @@ "devDependencies": { "@npmcli/docs": "^1.0.0", "@npmcli/eslint-config": "^4.0.2", - "@npmcli/git": "^4.1.0", + "@npmcli/git": "^5.0.0", "@npmcli/mock-globals": "^1.0.0", "@npmcli/mock-registry": "^1.0.0", "@npmcli/template-oss": "4.18.0", diff --git a/workspaces/libnpmversion/package.json b/workspaces/libnpmversion/package.json index 469f9c2bc00d6..2eadbc079a142 100644 --- a/workspaces/libnpmversion/package.json +++ b/workspaces/libnpmversion/package.json @@ -37,7 +37,7 @@ "tap": "^16.3.4" }, "dependencies": { - "@npmcli/git": "^4.0.1", + "@npmcli/git": "^5.0.0", "@npmcli/run-script": "^6.0.0", "json-parse-even-better-errors": "^3.0.0", "proc-log": "^3.0.0", From 9389a81ebf59d827c3774551b47934d251e56193 Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Tue, 15 Aug 2023 12:09:04 -0700 Subject: [PATCH 29/68] deps: @npmcli/package-json@5.0.0 --- node_modules/.gitignore | 6 +- .../node_modules/@npmcli/git/LICENSE | 15 - .../node_modules/@npmcli/git/lib/clone.js | 172 -- .../node_modules/@npmcli/git/lib/errors.js | 36 - .../node_modules/@npmcli/git/lib/find.js | 15 - .../node_modules/@npmcli/git/lib/index.js | 9 - .../node_modules/@npmcli/git/lib/is-clean.js | 6 - .../node_modules/@npmcli/git/lib/is.js | 6 - .../@npmcli/git/lib/lines-to-revs.js | 147 -- .../@npmcli/git/lib/make-error.js | 33 - .../node_modules/@npmcli/git/lib/opts.js | 12 - .../node_modules/@npmcli/git/lib/revs.js | 28 - .../node_modules/@npmcli/git/lib/spawn.js | 44 - .../node_modules/@npmcli/git/lib/utils.js | 3 - .../node_modules/@npmcli/git/lib/which.js | 18 - .../node_modules/hosted-git-info/LICENSE | 13 + .../hosted-git-info/lib/from-url.js | 122 ++ .../node_modules/hosted-git-info/lib/hosts.js | 228 +++ .../node_modules/hosted-git-info/lib/index.js | 179 +++ .../hosted-git-info/lib/parse-url.js | 78 + .../node_modules/hosted-git-info/package.json | 66 + .../node_modules/lru-cache/LICENSE | 15 + .../node_modules/lru-cache/dist/cjs/index.js | 1404 +++++++++++++++++ .../lru-cache/dist/cjs/index.min.js | 2 + .../lru-cache/dist/cjs/package.json | 3 + .../node_modules/lru-cache/dist/mjs/index.js | 1400 ++++++++++++++++ .../lru-cache/dist/mjs/index.min.js | 2 + .../lru-cache/dist/mjs/package.json | 3 + .../node_modules/lru-cache/package.json | 108 ++ .../normalize-package-data/LICENSE | 15 + .../lib/extract_description.js | 24 + .../normalize-package-data/lib/fixer.js | 475 ++++++ .../lib/make_warning.js | 22 + .../normalize-package-data/lib/normalize.js | 48 + .../normalize-package-data/lib/safe_format.js | 11 + .../normalize-package-data/lib/typos.json | 25 + .../lib/warning_messages.json | 30 + .../package.json | 77 +- .../@npmcli/package-json/package.json | 22 +- package-lock.json | 79 +- package.json | 2 +- workspaces/arborist/package.json | 2 +- 42 files changed, 4391 insertions(+), 614 deletions(-) delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/LICENSE delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/clone.js delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/errors.js delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/find.js delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/index.js delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is-clean.js delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is.js delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/lines-to-revs.js delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/make-error.js delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/opts.js delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/revs.js delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/spawn.js delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/utils.js delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/which.js create mode 100644 node_modules/@npmcli/package-json/node_modules/hosted-git-info/LICENSE create mode 100644 node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/from-url.js create mode 100644 node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/hosts.js create mode 100644 node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/index.js create mode 100644 node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/parse-url.js create mode 100644 node_modules/@npmcli/package-json/node_modules/hosted-git-info/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/lru-cache/LICENSE create mode 100644 node_modules/@npmcli/package-json/node_modules/lru-cache/dist/cjs/index.js create mode 100644 node_modules/@npmcli/package-json/node_modules/lru-cache/dist/cjs/index.min.js create mode 100644 node_modules/@npmcli/package-json/node_modules/lru-cache/dist/cjs/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/lru-cache/dist/mjs/index.js create mode 100644 node_modules/@npmcli/package-json/node_modules/lru-cache/dist/mjs/index.min.js create mode 100644 node_modules/@npmcli/package-json/node_modules/lru-cache/dist/mjs/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/lru-cache/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/normalize-package-data/LICENSE create mode 100644 node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/extract_description.js create mode 100644 node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/fixer.js create mode 100644 node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/make_warning.js create mode 100644 node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/normalize.js create mode 100644 node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/safe_format.js create mode 100644 node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/typos.json create mode 100644 node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/warning_messages.json rename node_modules/@npmcli/package-json/node_modules/{@npmcli/git => normalize-package-data}/package.json (50%) diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 24f1780ead0e5..55611b7a54162 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -33,9 +33,9 @@ !/@npmcli/package-json !/@npmcli/package-json/node_modules/ /@npmcli/package-json/node_modules/* -!/@npmcli/package-json/node_modules/@npmcli/ -/@npmcli/package-json/node_modules/@npmcli/* -!/@npmcli/package-json/node_modules/@npmcli/git +!/@npmcli/package-json/node_modules/hosted-git-info +!/@npmcli/package-json/node_modules/lru-cache +!/@npmcli/package-json/node_modules/normalize-package-data !/@npmcli/promise-spawn !/@npmcli/query !/@npmcli/run-script diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/LICENSE b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/LICENSE deleted file mode 100644 index 8f90f96f4c6c5..0000000000000 --- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, -OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, -DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS -ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS -SOFTWARE. diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/clone.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/clone.js deleted file mode 100644 index e25a4d1426821..0000000000000 --- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/clone.js +++ /dev/null @@ -1,172 +0,0 @@ -// The goal here is to minimize both git workload and -// the number of refs we download over the network. -// -// Every method ends up with the checked out working dir -// at the specified ref, and resolves with the git sha. - -// Only certain whitelisted hosts get shallow cloning. -// Many hosts (including GHE) don't always support it. -// A failed shallow fetch takes a LOT longer than a full -// fetch in most cases, so we skip it entirely. -// Set opts.gitShallow = true/false to force this behavior -// one way or the other. -const shallowHosts = new Set([ - 'github.com', - 'gist.github.com', - 'gitlab.com', - 'bitbucket.com', - 'bitbucket.org', -]) -// we have to use url.parse until we add the same shim that hosted-git-info has -// to handle scp:// urls -const { parse } = require('url') // eslint-disable-line node/no-deprecated-api -const path = require('path') - -const getRevs = require('./revs.js') -const spawn = require('./spawn.js') -const { isWindows } = require('./utils.js') - -const pickManifest = require('npm-pick-manifest') -const fs = require('fs/promises') - -module.exports = (repo, ref = 'HEAD', target = null, opts = {}) => - getRevs(repo, opts).then(revs => clone( - repo, - revs, - ref, - resolveRef(revs, ref, opts), - target || defaultTarget(repo, opts.cwd), - opts - )) - -const maybeShallow = (repo, opts) => { - if (opts.gitShallow === false || opts.gitShallow) { - return opts.gitShallow - } - return shallowHosts.has(parse(repo).host) -} - -const defaultTarget = (repo, /* istanbul ignore next */ cwd = process.cwd()) => - path.resolve(cwd, path.basename(repo.replace(/[/\\]?\.git$/, ''))) - -const clone = (repo, revs, ref, revDoc, target, opts) => { - if (!revDoc) { - return unresolved(repo, ref, target, opts) - } - if (revDoc.sha === revs.refs.HEAD.sha) { - return plain(repo, revDoc, target, opts) - } - if (revDoc.type === 'tag' || revDoc.type === 'branch') { - return branch(repo, revDoc, target, opts) - } - return other(repo, revDoc, target, opts) -} - -const resolveRef = (revs, ref, opts) => { - const { spec = {} } = opts - ref = spec.gitCommittish || ref - /* istanbul ignore next - will fail anyway, can't pull */ - if (!revs) { - return null - } - if (spec.gitRange) { - return pickManifest(revs, spec.gitRange, opts) - } - if (!ref) { - return revs.refs.HEAD - } - if (revs.refs[ref]) { - return revs.refs[ref] - } - if (revs.shas[ref]) { - return revs.refs[revs.shas[ref][0]] - } - return null -} - -// pull request or some other kind of advertised ref -const other = (repo, revDoc, target, opts) => { - const shallow = maybeShallow(repo, opts) - - const fetchOrigin = ['fetch', 'origin', revDoc.rawRef] - .concat(shallow ? ['--depth=1'] : []) - - const git = (args) => spawn(args, { ...opts, cwd: target }) - return fs.mkdir(target, { recursive: true }) - .then(() => git(['init'])) - .then(() => isWindows(opts) - ? git(['config', '--local', '--add', 'core.longpaths', 'true']) - : null) - .then(() => git(['remote', 'add', 'origin', repo])) - .then(() => git(fetchOrigin)) - .then(() => git(['checkout', revDoc.sha])) - .then(() => updateSubmodules(target, opts)) - .then(() => revDoc.sha) -} - -// tag or branches. use -b -const branch = (repo, revDoc, target, opts) => { - const args = [ - 'clone', - '-b', - revDoc.ref, - repo, - target, - '--recurse-submodules', - ] - if (maybeShallow(repo, opts)) { - args.push('--depth=1') - } - if (isWindows(opts)) { - args.push('--config', 'core.longpaths=true') - } - return spawn(args, opts).then(() => revDoc.sha) -} - -// just the head. clone it -const plain = (repo, revDoc, target, opts) => { - const args = [ - 'clone', - repo, - target, - '--recurse-submodules', - ] - if (maybeShallow(repo, opts)) { - args.push('--depth=1') - } - if (isWindows(opts)) { - args.push('--config', 'core.longpaths=true') - } - return spawn(args, opts).then(() => revDoc.sha) -} - -const updateSubmodules = async (target, opts) => { - const hasSubmodules = await fs.stat(`${target}/.gitmodules`) - .then(() => true) - .catch(() => false) - if (!hasSubmodules) { - return null - } - return spawn([ - 'submodule', - 'update', - '-q', - '--init', - '--recursive', - ], { ...opts, cwd: target }) -} - -const unresolved = (repo, ref, target, opts) => { - // can't do this one shallowly, because the ref isn't advertised - // but we can avoid checking out the working dir twice, at least - const lp = isWindows(opts) ? ['--config', 'core.longpaths=true'] : [] - const cloneArgs = ['clone', '--mirror', '-q', repo, target + '/.git'] - const git = (args) => spawn(args, { ...opts, cwd: target }) - return fs.mkdir(target, { recursive: true }) - .then(() => git(cloneArgs.concat(lp))) - .then(() => git(['init'])) - .then(() => git(['checkout', ref])) - .then(() => updateSubmodules(target, opts)) - .then(() => git(['rev-parse', '--revs-only', 'HEAD'])) - .then(({ stdout }) => stdout.trim()) -} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/errors.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/errors.js deleted file mode 100644 index 7aeac4762866f..0000000000000 --- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/errors.js +++ /dev/null @@ -1,36 +0,0 @@ - -const maxRetry = 3 - -class GitError extends Error { - shouldRetry () { - return false - } -} - -class GitConnectionError extends GitError { - constructor (message) { - super('A git connection error occurred') - } - - shouldRetry (number) { - return number < maxRetry - } -} - -class GitPathspecError extends GitError { - constructor (message) { - super('The git reference could not be found') - } -} - -class GitUnknownError extends GitError { - constructor (message) { - super('An unknown git error occurred') - } -} - -module.exports = { - GitConnectionError, - GitPathspecError, - GitUnknownError, -} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/find.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/find.js deleted file mode 100644 index 34bd310b88e5d..0000000000000 --- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/find.js +++ /dev/null @@ -1,15 +0,0 @@ -const is = require('./is.js') -const { dirname } = require('path') - -module.exports = async ({ cwd = process.cwd(), root } = {}) => { - while (true) { - if (await is({ cwd })) { - return cwd - } - const next = dirname(cwd) - if (cwd === root || cwd === next) { - return null - } - cwd = next - } -} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/index.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/index.js deleted file mode 100644 index 10a65f782e6da..0000000000000 --- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/index.js +++ /dev/null @@ -1,9 +0,0 @@ -module.exports = { - clone: require('./clone.js'), - revs: require('./revs.js'), - spawn: require('./spawn.js'), - is: require('./is.js'), - find: require('./find.js'), - isClean: require('./is-clean.js'), - errors: require('./errors.js'), -} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is-clean.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is-clean.js deleted file mode 100644 index 182373be94193..0000000000000 --- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is-clean.js +++ /dev/null @@ -1,6 +0,0 @@ -const spawn = require('./spawn.js') - -module.exports = (opts = {}) => - spawn(['status', '--porcelain=v1', '-uno'], opts) - .then(res => !res.stdout.trim().split(/\r?\n+/) - .map(l => l.trim()).filter(l => l).length) diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is.js deleted file mode 100644 index e2542f2157727..0000000000000 --- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is.js +++ /dev/null @@ -1,6 +0,0 @@ -// not an airtight indicator, but a good gut-check to even bother trying -const { promisify } = require('util') -const fs = require('fs') -const stat = promisify(fs.stat) -module.exports = ({ cwd = process.cwd() } = {}) => - stat(cwd + '/.git').then(() => true, () => false) diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/lines-to-revs.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/lines-to-revs.js deleted file mode 100644 index 6bd7e7a4c1531..0000000000000 --- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/lines-to-revs.js +++ /dev/null @@ -1,147 +0,0 @@ -// turn an array of lines from `git ls-remote` into a thing -// vaguely resembling a packument, where docs are a resolved ref - -const semver = require('semver') - -module.exports = lines => finish(lines.reduce(linesToRevsReducer, { - versions: {}, - 'dist-tags': {}, - refs: {}, - shas: {}, -})) - -const finish = revs => distTags(shaList(peelTags(revs))) - -// We can check out shallow clones on specific SHAs if we have a ref -const shaList = revs => { - Object.keys(revs.refs).forEach(ref => { - const doc = revs.refs[ref] - if (!revs.shas[doc.sha]) { - revs.shas[doc.sha] = [ref] - } else { - revs.shas[doc.sha].push(ref) - } - }) - return revs -} - -// Replace any tags with their ^{} counterparts, if those exist -const peelTags = revs => { - Object.keys(revs.refs).filter(ref => ref.endsWith('^{}')).forEach(ref => { - const peeled = revs.refs[ref] - const unpeeled = revs.refs[ref.replace(/\^\{\}$/, '')] - if (unpeeled) { - unpeeled.sha = peeled.sha - delete revs.refs[ref] - } - }) - return revs -} - -const distTags = revs => { - // not entirely sure what situations would result in an - // ichabod repo, but best to be careful in Sleepy Hollow anyway - const HEAD = revs.refs.HEAD || /* istanbul ignore next */ {} - const versions = Object.keys(revs.versions) - versions.forEach(v => { - // simulate a dist-tags with latest pointing at the - // 'latest' branch if one exists and is a version, - // or HEAD if not. - const ver = revs.versions[v] - if (revs.refs.latest && ver.sha === revs.refs.latest.sha) { - revs['dist-tags'].latest = v - } else if (ver.sha === HEAD.sha) { - revs['dist-tags'].HEAD = v - if (!revs.refs.latest) { - revs['dist-tags'].latest = v - } - } - }) - return revs -} - -const refType = ref => { - if (ref.startsWith('refs/tags/')) { - return 'tag' - } - if (ref.startsWith('refs/heads/')) { - return 'branch' - } - if (ref.startsWith('refs/pull/')) { - return 'pull' - } - if (ref === 'HEAD') { - return 'head' - } - // Could be anything, ignore for now - /* istanbul ignore next */ - return 'other' -} - -// return the doc, or null if we should ignore it. -const lineToRevDoc = line => { - const split = line.trim().split(/\s+/, 2) - if (split.length < 2) { - return null - } - - const sha = split[0].trim() - const rawRef = split[1].trim() - const type = refType(rawRef) - - if (type === 'tag') { - // refs/tags/foo^{} is the 'peeled tag', ie the commit - // that is tagged by refs/tags/foo they resolve to the same - // content, just different objects in git's data structure. - // But, we care about the thing the tag POINTS to, not the tag - // object itself, so we only look at the peeled tag refs, and - // ignore the pointer. - // For now, though, we have to save both, because some tags - // don't have peels, if they were not annotated. - const ref = rawRef.slice('refs/tags/'.length) - return { sha, ref, rawRef, type } - } - - if (type === 'branch') { - const ref = rawRef.slice('refs/heads/'.length) - return { sha, ref, rawRef, type } - } - - if (type === 'pull') { - // NB: merged pull requests installable with #pull/123/merge - // for the merged pr, or #pull/123 for the PR head - const ref = rawRef.slice('refs/'.length).replace(/\/head$/, '') - return { sha, ref, rawRef, type } - } - - if (type === 'head') { - const ref = 'HEAD' - return { sha, ref, rawRef, type } - } - - // at this point, all we can do is leave the ref un-munged - return { sha, ref: rawRef, rawRef, type } -} - -const linesToRevsReducer = (revs, line) => { - const doc = lineToRevDoc(line) - - if (!doc) { - return revs - } - - revs.refs[doc.ref] = doc - revs.refs[doc.rawRef] = doc - - if (doc.type === 'tag') { - // try to pull a semver value out of tags like `release-v1.2.3` - // which is a pretty common pattern. - const match = !doc.ref.endsWith('^{}') && - doc.ref.match(/v?(\d+\.\d+\.\d+(?:[-+].+)?)$/) - if (match && semver.valid(match[1], true)) { - revs.versions[semver.clean(match[1], true)] = doc - } - } - - return revs -} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/make-error.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/make-error.js deleted file mode 100644 index 7540ec7c8b9f7..0000000000000 --- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/make-error.js +++ /dev/null @@ -1,33 +0,0 @@ -const { - GitConnectionError, - GitPathspecError, - GitUnknownError, -} = require('./errors.js') - -const connectionErrorRe = new RegExp([ - 'remote error: Internal Server Error', - 'The remote end hung up unexpectedly', - 'Connection timed out', - 'Operation timed out', - 'Failed to connect to .* Timed out', - 'Connection reset by peer', - 'SSL_ERROR_SYSCALL', - 'The requested URL returned error: 503', -].join('|')) - -const missingPathspecRe = /pathspec .* did not match any file\(s\) known to git/ - -function makeError (er) { - const message = er.stderr - let gitEr - if (connectionErrorRe.test(message)) { - gitEr = new GitConnectionError(message) - } else if (missingPathspecRe.test(message)) { - gitEr = new GitPathspecError(message) - } else { - gitEr = new GitUnknownError(message) - } - return Object.assign(gitEr, er) -} - -module.exports = makeError diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/opts.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/opts.js deleted file mode 100644 index 3119af16e0cf1..0000000000000 --- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/opts.js +++ /dev/null @@ -1,12 +0,0 @@ -// Values we want to set if they're not already defined by the end user -// This defaults to accepting new ssh host key fingerprints -const gitEnv = { - GIT_ASKPASS: 'echo', - GIT_SSH_COMMAND: 'ssh -oStrictHostKeyChecking=accept-new', -} -module.exports = (opts = {}) => ({ - stdioString: true, - ...opts, - shell: false, - env: opts.env || { ...gitEnv, ...process.env }, -}) diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/revs.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/revs.js deleted file mode 100644 index ee72370d5b7ec..0000000000000 --- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/revs.js +++ /dev/null @@ -1,28 +0,0 @@ -const pinflight = require('promise-inflight') -const spawn = require('./spawn.js') -const LRU = require('lru-cache') - -const revsCache = new LRU({ - max: 100, - ttl: 5 * 60 * 1000, -}) - -const linesToRevs = require('./lines-to-revs.js') - -module.exports = async (repo, opts = {}) => { - if (!opts.noGitRevCache) { - const cached = revsCache.get(repo) - if (cached) { - return cached - } - } - - return pinflight(`ls-remote:${repo}`, () => - spawn(['ls-remote', repo], opts) - .then(({ stdout }) => linesToRevs(stdout.trim().split('\n'))) - .then(revs => { - revsCache.set(repo, revs) - return revs - }) - ) -} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/spawn.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/spawn.js deleted file mode 100644 index 7098d7b872942..0000000000000 --- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/spawn.js +++ /dev/null @@ -1,44 +0,0 @@ -const spawn = require('@npmcli/promise-spawn') -const promiseRetry = require('promise-retry') -const log = require('proc-log') -const makeError = require('./make-error.js') -const whichGit = require('./which.js') -const makeOpts = require('./opts.js') - -module.exports = (gitArgs, opts = {}) => { - const gitPath = whichGit(opts) - - if (gitPath instanceof Error) { - return Promise.reject(gitPath) - } - - // undocumented option, mostly only here for tests - const args = opts.allowReplace || gitArgs[0] === '--no-replace-objects' - ? gitArgs - : ['--no-replace-objects', ...gitArgs] - - let retryOpts = opts.retry - if (retryOpts === null || retryOpts === undefined) { - retryOpts = { - retries: opts.fetchRetries || 2, - factor: opts.fetchRetryFactor || 10, - maxTimeout: opts.fetchRetryMaxtimeout || 60000, - minTimeout: opts.fetchRetryMintimeout || 1000, - } - } - return promiseRetry((retryFn, number) => { - if (number !== 1) { - log.silly('git', `Retrying git command: ${ - args.join(' ')} attempt # ${number}`) - } - - return spawn(gitPath, args, makeOpts(opts)) - .catch(er => { - const gitError = makeError(er) - if (!gitError.shouldRetry(number)) { - throw gitError - } - retryFn(gitError) - }) - }, retryOpts) -} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/utils.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/utils.js deleted file mode 100644 index fcd9578a19597..0000000000000 --- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/utils.js +++ /dev/null @@ -1,3 +0,0 @@ -const isWindows = opts => (opts.fakePlatform || process.platform) === 'win32' - -exports.isWindows = isWindows diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/which.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/which.js deleted file mode 100644 index dc2a1ad212166..0000000000000 --- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/which.js +++ /dev/null @@ -1,18 +0,0 @@ -const which = require('which') - -let gitPath -try { - gitPath = which.sync('git') -} catch { - // ignore errors -} - -module.exports = (opts = {}) => { - if (opts.git) { - return opts.git - } - if (!gitPath || opts.git === false) { - return Object.assign(new Error('No git binary found in $PATH'), { code: 'ENOGIT' }) - } - return gitPath -} diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/LICENSE b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/LICENSE new file mode 100644 index 0000000000000..45055763dc838 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/from-url.js b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/from-url.js new file mode 100644 index 0000000000000..efc1247d59d12 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/from-url.js @@ -0,0 +1,122 @@ +'use strict' + +const parseUrl = require('./parse-url') + +// look for github shorthand inputs, such as npm/cli +const isGitHubShorthand = (arg) => { + // it cannot contain whitespace before the first # + // it cannot start with a / because that's probably an absolute file path + // but it must include a slash since repos are username/repository + // it cannot start with a . because that's probably a relative file path + // it cannot start with an @ because that's a scoped package if it passes the other tests + // it cannot contain a : before a # because that tells us that there's a protocol + // a second / may not exist before a # + const firstHash = arg.indexOf('#') + const firstSlash = arg.indexOf('/') + const secondSlash = arg.indexOf('/', firstSlash + 1) + const firstColon = arg.indexOf(':') + const firstSpace = /\s/.exec(arg) + const firstAt = arg.indexOf('@') + + const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash) + const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash) + const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash) + const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash) + const hasSlash = firstSlash > 0 + // if a # is found, what we really want to know is that the character + // immediately before # is not a / + const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/') + const doesNotStartWithDot = !arg.startsWith('.') + + return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash && + doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash && + secondSlashOnlyAfterHash +} + +module.exports = (giturl, opts, { gitHosts, protocols }) => { + if (!giturl) { + return + } + + const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl + const parsed = parseUrl(correctedUrl, protocols) + if (!parsed) { + return + } + + const gitHostShortcut = gitHosts.byShortcut[parsed.protocol] + const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.') + ? parsed.hostname.slice(4) + : parsed.hostname] + const gitHostName = gitHostShortcut || gitHostDomain + if (!gitHostName) { + return + } + + const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain] + let auth = null + if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) { + auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}` + } + + let committish = null + let user = null + let project = null + let defaultRepresentation = null + + try { + if (gitHostShortcut) { + let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname + const firstAt = pathname.indexOf('@') + // we ignore auth for shortcuts, so just trim it out + if (firstAt > -1) { + pathname = pathname.slice(firstAt + 1) + } + + const lastSlash = pathname.lastIndexOf('/') + if (lastSlash > -1) { + user = decodeURIComponent(pathname.slice(0, lastSlash)) + // we want nulls only, never empty strings + if (!user) { + user = null + } + project = decodeURIComponent(pathname.slice(lastSlash + 1)) + } else { + project = decodeURIComponent(pathname) + } + + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (parsed.hash) { + committish = decodeURIComponent(parsed.hash.slice(1)) + } + + defaultRepresentation = 'shortcut' + } else { + if (!gitHostInfo.protocols.includes(parsed.protocol)) { + return + } + + const segments = gitHostInfo.extract(parsed) + if (!segments) { + return + } + + user = segments.user && decodeURIComponent(segments.user) + project = decodeURIComponent(segments.project) + committish = decodeURIComponent(segments.committish) + defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1) + } + } catch (err) { + /* istanbul ignore else */ + if (err instanceof URIError) { + return + } else { + throw err + } + } + + return [gitHostName, user, auth, project, committish, defaultRepresentation, opts] +} diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/hosts.js b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/hosts.js new file mode 100644 index 0000000000000..013712b7842c8 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/hosts.js @@ -0,0 +1,228 @@ +/* eslint-disable max-len */ + +'use strict' + +const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : '' +const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : '' +const formatHashFragment = (f) => f.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-') + +const defaults = { + sshtemplate: ({ domain, user, project, committish }) => + `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`, + sshurltemplate: ({ domain, user, project, committish }) => + `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + edittemplate: ({ domain, user, project, committish, editpath, path }) => + `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`, + browsetemplate: ({ domain, user, project, committish, treepath }) => + `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`, + browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) => + `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, + browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) => + `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, + docstemplate: ({ domain, user, project, treepath, committish }) => + `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`, + httpstemplate: ({ auth, domain, user, project, committish }) => + `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + filetemplate: ({ domain, user, project, committish, path }) => + `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`, + shortcuttemplate: ({ type, user, project, committish }) => + `${type}:${user}/${project}${maybeJoin('#', committish)}`, + pathtemplate: ({ user, project, committish }) => + `${user}/${project}${maybeJoin('#', committish)}`, + bugstemplate: ({ domain, user, project }) => + `https://${domain}/${user}/${project}/issues`, + hashformat: formatHashFragment, +} + +const hosts = {} +hosts.github = { + // First two are insecure and generally shouldn't be used any more, but + // they are still supported. + protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'github.com', + treepath: 'tree', + blobpath: 'blob', + editpath: 'edit', + filetemplate: ({ auth, user, project, committish, path }) => + `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`, + gittemplate: ({ auth, domain, user, project, committish }) => + `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => + `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`, + extract: (url) => { + let [, user, project, type, committish] = url.pathname.split('/', 5) + if (type && type !== 'tree') { + return + } + + if (!type) { + committish = url.hash.slice(1) + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish } + }, +} + +hosts.bitbucket = { + protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'bitbucket.org', + treepath: 'src', + blobpath: 'src', + editpath: '?mode=edit', + edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) => + `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`, + tarballtemplate: ({ domain, user, project, committish }) => + `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + if (['get'].includes(aux)) { + return + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + }, +} + +hosts.gitlab = { + protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'gitlab.com', + treepath: 'tree', + blobpath: 'tree', + editpath: '-/edit', + httpstemplate: ({ auth, domain, user, project, committish }) => + `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => + `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`, + extract: (url) => { + const path = url.pathname.slice(1) + if (path.includes('/-/') || path.includes('/archive.tar.gz')) { + return + } + + const segments = path.split('/') + let project = segments.pop() + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + const user = segments.join('/') + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + }, +} + +hosts.gist = { + protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'gist.github.com', + editpath: 'edit', + sshtemplate: ({ domain, project, committish }) => + `git@${domain}:${project}.git${maybeJoin('#', committish)}`, + sshurltemplate: ({ domain, project, committish }) => + `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`, + edittemplate: ({ domain, user, project, committish, editpath }) => + `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`, + browsetemplate: ({ domain, project, committish }) => + `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, + browsetreetemplate: ({ domain, project, committish, path, hashformat }) => + `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`, + browseblobtemplate: ({ domain, project, committish, path, hashformat }) => + `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`, + docstemplate: ({ domain, project, committish }) => + `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, + httpstemplate: ({ domain, project, committish }) => + `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`, + filetemplate: ({ user, project, committish, path }) => + `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`, + shortcuttemplate: ({ type, project, committish }) => + `${type}:${project}${maybeJoin('#', committish)}`, + pathtemplate: ({ project, committish }) => + `${project}${maybeJoin('#', committish)}`, + bugstemplate: ({ domain, project }) => + `https://${domain}/${project}`, + gittemplate: ({ domain, project, committish }) => + `git://${domain}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ project, committish }) => + `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + if (aux === 'raw') { + return + } + + if (!project) { + if (!user) { + return + } + + project = user + user = null + } + + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + return { user, project, committish: url.hash.slice(1) } + }, + hashformat: function (fragment) { + return fragment && 'file-' + formatHashFragment(fragment) + }, +} + +hosts.sourcehut = { + protocols: ['git+ssh:', 'https:'], + domain: 'git.sr.ht', + treepath: 'tree', + blobpath: 'tree', + filetemplate: ({ domain, user, project, committish, path }) => + `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`, + httpstemplate: ({ domain, user, project, committish }) => + `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => + `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`, + bugstemplate: ({ user, project }) => + `https://todo.sr.ht/${user}/${project}`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + + // tarball url + if (['archive'].includes(aux)) { + return + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + }, +} + +for (const [name, host] of Object.entries(hosts)) { + hosts[name] = Object.assign({}, defaults, host) +} + +module.exports = hosts diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/index.js b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/index.js new file mode 100644 index 0000000000000..0c9d0b08c866b --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/index.js @@ -0,0 +1,179 @@ +'use strict' + +const { LRUCache } = require('lru-cache') +const hosts = require('./hosts.js') +const fromUrl = require('./from-url.js') +const parseUrl = require('./parse-url.js') + +const cache = new LRUCache({ max: 1000 }) + +class GitHost { + constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) { + Object.assign(this, GitHost.#gitHosts[type], { + type, + user, + auth, + project, + committish, + default: defaultRepresentation, + opts, + }) + } + + static #gitHosts = { byShortcut: {}, byDomain: {} } + static #protocols = { + 'git+ssh:': { name: 'sshurl' }, + 'ssh:': { name: 'sshurl' }, + 'git+https:': { name: 'https', auth: true }, + 'git:': { auth: true }, + 'http:': { auth: true }, + 'https:': { auth: true }, + 'git+http:': { auth: true }, + } + + static addHost (name, host) { + GitHost.#gitHosts[name] = host + GitHost.#gitHosts.byDomain[host.domain] = name + GitHost.#gitHosts.byShortcut[`${name}:`] = name + GitHost.#protocols[`${name}:`] = { name } + } + + static fromUrl (giturl, opts) { + if (typeof giturl !== 'string') { + return + } + + const key = giturl + JSON.stringify(opts || {}) + + if (!cache.has(key)) { + const hostArgs = fromUrl(giturl, opts, { + gitHosts: GitHost.#gitHosts, + protocols: GitHost.#protocols, + }) + cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined) + } + + return cache.get(key) + } + + static parseUrl (url) { + return parseUrl(url) + } + + #fill (template, opts) { + if (typeof template !== 'function') { + return null + } + + const options = { ...this, ...this.opts, ...opts } + + // the path should always be set so we don't end up with 'undefined' in urls + if (!options.path) { + options.path = '' + } + + // template functions will insert the leading slash themselves + if (options.path.startsWith('/')) { + options.path = options.path.slice(1) + } + + if (options.noCommittish) { + options.committish = null + } + + const result = template(options) + return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result + } + + hash () { + return this.committish ? `#${this.committish}` : '' + } + + ssh (opts) { + return this.#fill(this.sshtemplate, opts) + } + + sshurl (opts) { + return this.#fill(this.sshurltemplate, opts) + } + + browse (path, ...args) { + // not a string, treat path as opts + if (typeof path !== 'string') { + return this.#fill(this.browsetemplate, path) + } + + if (typeof args[0] !== 'string') { + return this.#fill(this.browsetreetemplate, { ...args[0], path }) + } + + return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path }) + } + + // If the path is known to be a file, then browseFile should be used. For some hosts + // the url is the same as browse, but for others like GitHub a file can use both `/tree/` + // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/` + // path will redirect to a specific commit. Using the `/blob/` path avoids this and + // does not redirect to a different commit. + browseFile (path, ...args) { + if (typeof args[0] !== 'string') { + return this.#fill(this.browseblobtemplate, { ...args[0], path }) + } + + return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path }) + } + + docs (opts) { + return this.#fill(this.docstemplate, opts) + } + + bugs (opts) { + return this.#fill(this.bugstemplate, opts) + } + + https (opts) { + return this.#fill(this.httpstemplate, opts) + } + + git (opts) { + return this.#fill(this.gittemplate, opts) + } + + shortcut (opts) { + return this.#fill(this.shortcuttemplate, opts) + } + + path (opts) { + return this.#fill(this.pathtemplate, opts) + } + + tarball (opts) { + return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false }) + } + + file (path, opts) { + return this.#fill(this.filetemplate, { ...opts, path }) + } + + edit (path, opts) { + return this.#fill(this.edittemplate, { ...opts, path }) + } + + getDefaultRepresentation () { + return this.default + } + + toString (opts) { + if (this.default && typeof this[this.default] === 'function') { + return this[this.default](opts) + } + + return this.sshurl(opts) + } +} + +for (const [name, host] of Object.entries(hosts)) { + GitHost.addHost(name, host) +} + +module.exports = GitHost diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/parse-url.js new file mode 100644 index 0000000000000..7d5489c008ab4 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/parse-url.js @@ -0,0 +1,78 @@ +const url = require('url') + +const lastIndexOfBefore = (str, char, beforeChar) => { + const startPosition = str.indexOf(beforeChar) + return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity) +} + +const safeUrl = (u) => { + try { + return new url.URL(u) + } catch { + // this fn should never throw + } +} + +// accepts input like git:github.com:user/repo and inserts the // after the first : +const correctProtocol = (arg, protocols) => { + const firstColon = arg.indexOf(':') + const proto = arg.slice(0, firstColon + 1) + if (Object.prototype.hasOwnProperty.call(protocols, proto)) { + return arg + } + + const firstAt = arg.indexOf('@') + if (firstAt > -1) { + if (firstAt > firstColon) { + return `git+ssh://${arg}` + } else { + return arg + } + } + + const doubleSlash = arg.indexOf('//') + if (doubleSlash === firstColon + 1) { + return arg + } + + return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}` +} + +// attempt to correct an scp style url so that it will parse with `new URL()` +const correctUrl = (giturl) => { + // ignore @ that come after the first hash since the denotes the start + // of a committish which can contain @ characters + const firstAt = lastIndexOfBefore(giturl, '@', '#') + // ignore colons that come after the hash since that could include colons such as: + // git@github.com:user/package-2#semver:^1.0.0 + const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#') + + if (lastColonBeforeHash > firstAt) { + // the last : comes after the first @ (or there is no @) + // like it would in: + // proto://hostname.com:user/repo + // username@hostname.com:user/repo + // :password@hostname.com:user/repo + // username:password@hostname.com:user/repo + // proto://username@hostname.com:user/repo + // proto://:password@hostname.com:user/repo + // proto://username:password@hostname.com:user/repo + // then we replace the last : with a / to create a valid path + giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1) + } + + if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) { + // we have no : at all + // as it would be in: + // username@hostname.com/user/repo + // then we prepend a protocol + giturl = `git+ssh://${giturl}` + } + + return giturl +} + +module.exports = (giturl, protocols) => { + const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl + return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol)) +} diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/package.json b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/package.json new file mode 100644 index 0000000000000..262a6c20fcf00 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/package.json @@ -0,0 +1,66 @@ +{ + "name": "hosted-git-info", + "version": "7.0.0", + "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab", + "main": "./lib/index.js", + "repository": { + "type": "git", + "url": "https://github.com/npm/hosted-git-info.git" + }, + "keywords": [ + "git", + "github", + "bitbucket", + "gitlab" + ], + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/hosted-git-info/issues" + }, + "homepage": "https://github.com/npm/hosted-git-info", + "scripts": { + "posttest": "npm run lint", + "snap": "tap", + "test": "tap", + "test:coverage": "tap --coverage-report=html", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "template-oss-apply": "template-oss-apply --force" + }, + "dependencies": { + "lru-cache": "^10.0.1" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.18.0", + "tap": "^16.0.1" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "tap": { + "color": 1, + "coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.18.0", + "publish": "true", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] + } +} diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/LICENSE b/node_modules/@npmcli/package-json/node_modules/lru-cache/LICENSE new file mode 100644 index 0000000000000..f785757cd63f8 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/cjs/index.js b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/cjs/index.js new file mode 100644 index 0000000000000..02d76ec800a92 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/cjs/index.js @@ -0,0 +1,1404 @@ +"use strict"; +/** + * @module LRUCache + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.LRUCache = void 0; +const perf = typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date; +const warned = new Set(); +/* c8 ignore start */ +const PROCESS = (typeof process === 'object' && !!process ? process : {}); +/* c8 ignore start */ +const emitWarning = (msg, type, code, fn) => { + typeof PROCESS.emitWarning === 'function' + ? PROCESS.emitWarning(msg, type, code, fn) + : console.error(`[${code}] ${type}: ${msg}`); +}; +let AC = globalThis.AbortController; +let AS = globalThis.AbortSignal; +/* c8 ignore start */ +if (typeof AC === 'undefined') { + //@ts-ignore + AS = class AbortSignal { + onabort; + _onabort = []; + reason; + aborted = false; + addEventListener(_, fn) { + this._onabort.push(fn); + } + }; + //@ts-ignore + AC = class AbortController { + constructor() { + warnACPolyfill(); + } + signal = new AS(); + abort(reason) { + if (this.signal.aborted) + return; + //@ts-ignore + this.signal.reason = reason; + //@ts-ignore + this.signal.aborted = true; + //@ts-ignore + for (const fn of this.signal._onabort) { + fn(reason); + } + this.signal.onabort?.(reason); + } + }; + let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; + const warnACPolyfill = () => { + if (!printACPolyfillWarning) + return; + printACPolyfillWarning = false; + emitWarning('AbortController is not defined. If using lru-cache in ' + + 'node 14, load an AbortController polyfill from the ' + + '`node-abort-controller` package. A minimal polyfill is ' + + 'provided for use by LRUCache.fetch(), but it should not be ' + + 'relied upon in other contexts (eg, passing it to other APIs that ' + + 'use AbortController/AbortSignal might have undesirable effects). ' + + 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); + }; +} +/* c8 ignore stop */ +const shouldWarn = (code) => !warned.has(code); +const TYPE = Symbol('type'); +const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); +/* c8 ignore start */ +// This is a little bit ridiculous, tbh. +// The maximum array length is 2^32-1 or thereabouts on most JS impls. +// And well before that point, you're caching the entire world, I mean, +// that's ~32GB of just integers for the next/prev links, plus whatever +// else to hold that many keys and values. Just filling the memory with +// zeroes at init time is brutal when you get that big. +// But why not be complete? +// Maybe in the future, these limits will have expanded. +const getUintArray = (max) => !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null; +/* c8 ignore stop */ +class ZeroArray extends Array { + constructor(size) { + super(size); + this.fill(0); + } +} +class Stack { + heap; + length; + // private constructor + static #constructing = false; + static create(max) { + const HeapCls = getUintArray(max); + if (!HeapCls) + return []; + Stack.#constructing = true; + const s = new Stack(max, HeapCls); + Stack.#constructing = false; + return s; + } + constructor(max, HeapCls) { + /* c8 ignore start */ + if (!Stack.#constructing) { + throw new TypeError('instantiate Stack using Stack.create(n)'); + } + /* c8 ignore stop */ + this.heap = new HeapCls(max); + this.length = 0; + } + push(n) { + this.heap[this.length++] = n; + } + pop() { + return this.heap[--this.length]; + } +} +/** + * Default export, the thing you're using this module to get. + * + * All properties from the options object (with the exception of + * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as + * normal public members. (`max` and `maxBase` are read-only getters.) + * Changing any of these will alter the defaults for subsequent method calls, + * but is otherwise safe. + */ +class LRUCache { + // properties coming in from the options of these, only max and maxSize + // really *need* to be protected. The rest can be modified, as they just + // set defaults for various methods. + #max; + #maxSize; + #dispose; + #disposeAfter; + #fetchMethod; + /** + * {@link LRUCache.OptionsBase.ttl} + */ + ttl; + /** + * {@link LRUCache.OptionsBase.ttlResolution} + */ + ttlResolution; + /** + * {@link LRUCache.OptionsBase.ttlAutopurge} + */ + ttlAutopurge; + /** + * {@link LRUCache.OptionsBase.updateAgeOnGet} + */ + updateAgeOnGet; + /** + * {@link LRUCache.OptionsBase.updateAgeOnHas} + */ + updateAgeOnHas; + /** + * {@link LRUCache.OptionsBase.allowStale} + */ + allowStale; + /** + * {@link LRUCache.OptionsBase.noDisposeOnSet} + */ + noDisposeOnSet; + /** + * {@link LRUCache.OptionsBase.noUpdateTTL} + */ + noUpdateTTL; + /** + * {@link LRUCache.OptionsBase.maxEntrySize} + */ + maxEntrySize; + /** + * {@link LRUCache.OptionsBase.sizeCalculation} + */ + sizeCalculation; + /** + * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} + */ + noDeleteOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} + */ + noDeleteOnStaleGet; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} + */ + allowStaleOnFetchAbort; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} + */ + allowStaleOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.ignoreFetchAbort} + */ + ignoreFetchAbort; + // computed properties + #size; + #calculatedSize; + #keyMap; + #keyList; + #valList; + #next; + #prev; + #head; + #tail; + #free; + #disposed; + #sizes; + #starts; + #ttls; + #hasDispose; + #hasFetchMethod; + #hasDisposeAfter; + /** + * Do not call this method unless you need to inspect the + * inner workings of the cache. If anything returned by this + * object is modified in any way, strange breakage may occur. + * + * These fields are private for a reason! + * + * @internal + */ + static unsafeExposeInternals(c) { + return { + // properties + starts: c.#starts, + ttls: c.#ttls, + sizes: c.#sizes, + keyMap: c.#keyMap, + keyList: c.#keyList, + valList: c.#valList, + next: c.#next, + prev: c.#prev, + get head() { + return c.#head; + }, + get tail() { + return c.#tail; + }, + free: c.#free, + // methods + isBackgroundFetch: (p) => c.#isBackgroundFetch(p), + backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), + moveToTail: (index) => c.#moveToTail(index), + indexes: (options) => c.#indexes(options), + rindexes: (options) => c.#rindexes(options), + isStale: (index) => c.#isStale(index), + }; + } + // Protected read-only members + /** + * {@link LRUCache.OptionsBase.max} (read-only) + */ + get max() { + return this.#max; + } + /** + * {@link LRUCache.OptionsBase.maxSize} (read-only) + */ + get maxSize() { + return this.#maxSize; + } + /** + * The total computed size of items in the cache (read-only) + */ + get calculatedSize() { + return this.#calculatedSize; + } + /** + * The number of items stored in the cache (read-only) + */ + get size() { + return this.#size; + } + /** + * {@link LRUCache.OptionsBase.fetchMethod} (read-only) + */ + get fetchMethod() { + return this.#fetchMethod; + } + /** + * {@link LRUCache.OptionsBase.dispose} (read-only) + */ + get dispose() { + return this.#dispose; + } + /** + * {@link LRUCache.OptionsBase.disposeAfter} (read-only) + */ + get disposeAfter() { + return this.#disposeAfter; + } + constructor(options) { + const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer'); + } + const UintArray = max ? getUintArray(max) : Array; + if (!UintArray) { + throw new Error('invalid max value: ' + max); + } + this.#max = max; + this.#maxSize = maxSize; + this.maxEntrySize = maxEntrySize || this.#maxSize; + this.sizeCalculation = sizeCalculation; + if (this.sizeCalculation) { + if (!this.#maxSize && !this.maxEntrySize) { + throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function'); + } + } + if (fetchMethod !== undefined && + typeof fetchMethod !== 'function') { + throw new TypeError('fetchMethod must be a function if specified'); + } + this.#fetchMethod = fetchMethod; + this.#hasFetchMethod = !!fetchMethod; + this.#keyMap = new Map(); + this.#keyList = new Array(max).fill(undefined); + this.#valList = new Array(max).fill(undefined); + this.#next = new UintArray(max); + this.#prev = new UintArray(max); + this.#head = 0; + this.#tail = 0; + this.#free = Stack.create(max); + this.#size = 0; + this.#calculatedSize = 0; + if (typeof dispose === 'function') { + this.#dispose = dispose; + } + if (typeof disposeAfter === 'function') { + this.#disposeAfter = disposeAfter; + this.#disposed = []; + } + else { + this.#disposeAfter = undefined; + this.#disposed = undefined; + } + this.#hasDispose = !!this.#dispose; + this.#hasDisposeAfter = !!this.#disposeAfter; + this.noDisposeOnSet = !!noDisposeOnSet; + this.noUpdateTTL = !!noUpdateTTL; + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; + this.ignoreFetchAbort = !!ignoreFetchAbort; + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.#maxSize !== 0) { + if (!isPosInt(this.#maxSize)) { + throw new TypeError('maxSize must be a positive integer if specified'); + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError('maxEntrySize must be a positive integer if specified'); + } + this.#initializeSizeTracking(); + } + this.allowStale = !!allowStale; + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; + this.updateAgeOnGet = !!updateAgeOnGet; + this.updateAgeOnHas = !!updateAgeOnHas; + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1; + this.ttlAutopurge = !!ttlAutopurge; + this.ttl = ttl || 0; + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError('ttl must be a positive integer if specified'); + } + this.#initializeTTLTracking(); + } + // do not allow completely unbounded caches + if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { + throw new TypeError('At least one of max, maxSize, or ttl is required'); + } + if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { + const code = 'LRU_CACHE_UNBOUNDED'; + if (shouldWarn(code)) { + warned.add(code); + const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.'; + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); + } + } + } + /** + * Return the remaining TTL time for a given entry key + */ + getRemainingTTL(key) { + return this.#keyMap.has(key) ? Infinity : 0; + } + #initializeTTLTracking() { + const ttls = new ZeroArray(this.#max); + const starts = new ZeroArray(this.#max); + this.#ttls = ttls; + this.#starts = starts; + this.#setItemTTL = (index, ttl, start = perf.now()) => { + starts[index] = ttl !== 0 ? start : 0; + ttls[index] = ttl; + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.#isStale(index)) { + this.delete(this.#keyList[index]); + } + }, ttl + 1); + // unref() not supported on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + }; + this.#updateItemAge = index => { + starts[index] = ttls[index] !== 0 ? perf.now() : 0; + }; + this.#statusTTL = (status, index) => { + if (ttls[index]) { + const ttl = ttls[index]; + const start = starts[index]; + status.ttl = ttl; + status.start = start; + status.now = cachedNow || getNow(); + const age = status.now - start; + status.remainingTTL = ttl - age; + } + }; + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0; + const getNow = () => { + const n = perf.now(); + if (this.ttlResolution > 0) { + cachedNow = n; + const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); + // not available on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + return n; + }; + this.getRemainingTTL = key => { + const index = this.#keyMap.get(key); + if (index === undefined) { + return 0; + } + const ttl = ttls[index]; + const start = starts[index]; + if (ttl === 0 || start === 0) { + return Infinity; + } + const age = (cachedNow || getNow()) - start; + return ttl - age; + }; + this.#isStale = index => { + return (ttls[index] !== 0 && + starts[index] !== 0 && + (cachedNow || getNow()) - starts[index] > ttls[index]); + }; + } + // conditionally set private methods related to TTL + #updateItemAge = () => { }; + #statusTTL = () => { }; + #setItemTTL = () => { }; + /* c8 ignore stop */ + #isStale = () => false; + #initializeSizeTracking() { + const sizes = new ZeroArray(this.#max); + this.#calculatedSize = 0; + this.#sizes = sizes; + this.#removeItemSize = index => { + this.#calculatedSize -= sizes[index]; + sizes[index] = 0; + }; + this.#requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.#isBackgroundFetch(v)) { + return 0; + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function'); + } + size = sizeCalculation(v, k); + if (!isPosInt(size)) { + throw new TypeError('sizeCalculation return invalid (expect positive integer)'); + } + } + else { + throw new TypeError('invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation ' + + 'or size must be set.'); + } + } + return size; + }; + this.#addItemSize = (index, size, status) => { + sizes[index] = size; + if (this.#maxSize) { + const maxSize = this.#maxSize - sizes[index]; + while (this.#calculatedSize > maxSize) { + this.#evict(true); + } + } + this.#calculatedSize += sizes[index]; + if (status) { + status.entrySize = size; + status.totalCalculatedSize = this.#calculatedSize; + } + }; + } + #removeItemSize = _i => { }; + #addItemSize = (_i, _s, _st) => { }; + #requireSize = (_k, _v, size, sizeCalculation) => { + if (size || sizeCalculation) { + throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); + } + return 0; + }; + *#indexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#tail; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#head) { + break; + } + else { + i = this.#prev[i]; + } + } + } + } + *#rindexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#head; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#tail) { + break; + } + else { + i = this.#next[i]; + } + } + } + } + #isValidIndex(index) { + return (index !== undefined && + this.#keyMap.get(this.#keyList[index]) === index); + } + /** + * Return a generator yielding `[key, value]` pairs, + * in order from most recently used to least recently used. + */ + *entries() { + for (const i of this.#indexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Inverse order version of {@link LRUCache.entries} + * + * Return a generator yielding `[key, value]` pairs, + * in order from least recently used to most recently used. + */ + *rentries() { + for (const i of this.#rindexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Return a generator yielding the keys in the cache, + * in order from most recently used to least recently used. + */ + *keys() { + for (const i of this.#indexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Inverse order version of {@link LRUCache.keys} + * + * Return a generator yielding the keys in the cache, + * in order from least recently used to most recently used. + */ + *rkeys() { + for (const i of this.#rindexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Return a generator yielding the values in the cache, + * in order from most recently used to least recently used. + */ + *values() { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Inverse order version of {@link LRUCache.values} + * + * Return a generator yielding the values in the cache, + * in order from least recently used to most recently used. + */ + *rvalues() { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Iterating over the cache itself yields the same results as + * {@link LRUCache.entries} + */ + [Symbol.iterator]() { + return this.entries(); + } + /** + * Find a value for which the supplied fn method returns a truthy value, + * similar to Array.find(). fn is called as fn(value, key, cache). + */ + find(fn, getOptions = {}) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + if (fn(value, this.#keyList[i], this)) { + return this.get(this.#keyList[i], getOptions); + } + } + } + /** + * Call the supplied function on each item in the cache, in order from + * most recently used to least recently used. fn is called as + * fn(value, key, cache). Does not update age or recenty of use. + * Does not iterate over stale values. + */ + forEach(fn, thisp = this) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * The same as {@link LRUCache.forEach} but items are iterated over in + * reverse order. (ie, less recently used items are iterated over first.) + */ + rforEach(fn, thisp = this) { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * Delete any stale entries. Returns true if anything was removed, + * false otherwise. + */ + purgeStale() { + let deleted = false; + for (const i of this.#rindexes({ allowStale: true })) { + if (this.#isStale(i)) { + this.delete(this.#keyList[i]); + deleted = true; + } + } + return deleted; + } + /** + * Return an array of [key, {@link LRUCache.Entry}] tuples which can be + * passed to cache.load() + */ + dump() { + const arr = []; + for (const i of this.#indexes({ allowStale: true })) { + const key = this.#keyList[i]; + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined || key === undefined) + continue; + const entry = { value }; + if (this.#ttls && this.#starts) { + entry.ttl = this.#ttls[i]; + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.#starts[i]; + entry.start = Math.floor(Date.now() - age); + } + if (this.#sizes) { + entry.size = this.#sizes[i]; + } + arr.unshift([key, entry]); + } + return arr; + } + /** + * Reset the cache and load in the items in entries in the order listed. + * Note that the shape of the resulting cache may be different if the + * same options are not used in both caches. + */ + load(arr) { + this.clear(); + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset, so that + // we get the intended remaining TTL, no matter how long it's + // been on ice. + // + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start; + entry.start = perf.now() - age; + } + this.set(key, entry.value, entry); + } + } + /** + * Add a value to the cache. + * + * Note: if `undefined` is specified as a value, this is an alias for + * {@link LRUCache#delete} + */ + set(k, v, setOptions = {}) { + if (v === undefined) { + this.delete(k); + return this; + } + const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; + let { noUpdateTTL = this.noUpdateTTL } = setOptions; + const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss'; + status.maxEntrySizeExceeded = true; + } + // have to delete, in case something is there already. + this.delete(k); + return this; + } + let index = this.#size === 0 ? undefined : this.#keyMap.get(k); + if (index === undefined) { + // addition + index = (this.#size === 0 + ? this.#tail + : this.#free.length !== 0 + ? this.#free.pop() + : this.#size === this.#max + ? this.#evict(false) + : this.#size); + this.#keyList[index] = k; + this.#valList[index] = v; + this.#keyMap.set(k, index); + this.#next[this.#tail] = index; + this.#prev[index] = this.#tail; + this.#tail = index; + this.#size++; + this.#addItemSize(index, size, status); + if (status) + status.set = 'add'; + noUpdateTTL = false; + } + else { + // update + this.#moveToTail(index); + const oldVal = this.#valList[index]; + if (v !== oldVal) { + if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')); + const { __staleWhileFetching: s } = oldVal; + if (s !== undefined && !noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(s, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([s, k, 'set']); + } + } + } + else if (!noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(oldVal, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([oldVal, k, 'set']); + } + } + this.#removeItemSize(index); + this.#addItemSize(index, size, status); + this.#valList[index] = v; + if (status) { + status.set = 'replace'; + const oldValue = oldVal && this.#isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal; + if (oldValue !== undefined) + status.oldValue = oldValue; + } + } + else if (status) { + status.set = 'update'; + } + } + if (ttl !== 0 && !this.#ttls) { + this.#initializeTTLTracking(); + } + if (this.#ttls) { + if (!noUpdateTTL) { + this.#setItemTTL(index, ttl, start); + } + if (status) + this.#statusTTL(status, index); + } + if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return this; + } + /** + * Evict the least recently used item, returning its value or + * `undefined` if cache is empty. + */ + pop() { + try { + while (this.#size) { + const val = this.#valList[this.#head]; + this.#evict(true); + if (this.#isBackgroundFetch(val)) { + if (val.__staleWhileFetching) { + return val.__staleWhileFetching; + } + } + else if (val !== undefined) { + return val; + } + } + } + finally { + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } + } + #evict(free) { + const head = this.#head; + const k = this.#keyList[head]; + const v = this.#valList[head]; + if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'evict'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'evict']); + } + } + this.#removeItemSize(head); + // if we aren't about to use the index, then null these out + if (free) { + this.#keyList[head] = undefined; + this.#valList[head] = undefined; + this.#free.push(head); + } + if (this.#size === 1) { + this.#head = this.#tail = 0; + this.#free.length = 0; + } + else { + this.#head = this.#next[head]; + } + this.#keyMap.delete(k); + this.#size--; + return head; + } + /** + * Check if a key is in the cache, without updating the recency of use. + * Will return false if the item is stale, even though it is technically + * in the cache. + * + * Will not update item age unless + * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. + */ + has(k, hasOptions = {}) { + const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v) && + v.__staleWhileFetching === undefined) { + return false; + } + if (!this.#isStale(index)) { + if (updateAgeOnHas) { + this.#updateItemAge(index); + } + if (status) { + status.has = 'hit'; + this.#statusTTL(status, index); + } + return true; + } + else if (status) { + status.has = 'stale'; + this.#statusTTL(status, index); + } + } + else if (status) { + status.has = 'miss'; + } + return false; + } + /** + * Like {@link LRUCache#get} but doesn't update recency or delete stale + * items. + * + * Returns `undefined` if the item is stale, unless + * {@link LRUCache.OptionsBase.allowStale} is set. + */ + peek(k, peekOptions = {}) { + const { allowStale = this.allowStale } = peekOptions; + const index = this.#keyMap.get(k); + if (index !== undefined && + (allowStale || !this.#isStale(index))) { + const v = this.#valList[index]; + // either stale and allowed, or forcing a refresh of non-stale value + return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + } + } + #backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + return v; + } + const ac = new AC(); + const { signal } = options; + // when/if our AC signals, then stop listening to theirs. + signal?.addEventListener('abort', () => ac.abort(signal.reason), { + signal: ac.signal, + }); + const fetchOpts = { + signal: ac.signal, + options, + context, + }; + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal; + const ignoreAbort = options.ignoreFetchAbort && v !== undefined; + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true; + options.status.fetchError = ac.signal.reason; + if (ignoreAbort) + options.status.fetchAbortIgnored = true; + } + else { + options.status.fetchResolved = true; + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason); + } + // either we didn't abort, and are still here, or we did, and ignored + const bf = p; + if (this.#valList[index] === p) { + if (v === undefined) { + if (bf.__staleWhileFetching) { + this.#valList[index] = bf.__staleWhileFetching; + } + else { + this.delete(k); + } + } + else { + if (options.status) + options.status.fetchUpdated = true; + this.set(k, v, fetchOpts.options); + } + } + return v; + }; + const eb = (er) => { + if (options.status) { + options.status.fetchRejected = true; + options.status.fetchError = er; + } + return fetchFail(er); + }; + const fetchFail = (er) => { + const { aborted } = ac.signal; + const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; + const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; + const noDelete = allowStale || options.noDeleteOnFetchRejection; + const bf = p; + if (this.#valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || bf.__staleWhileFetching === undefined; + if (del) { + this.delete(k); + } + else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.#valList[index] = bf.__staleWhileFetching; + } + } + if (allowStale) { + if (options.status && bf.__staleWhileFetching !== undefined) { + options.status.returnedStale = true; + } + return bf.__staleWhileFetching; + } + else if (bf.__returned === bf) { + throw er; + } + }; + const pcall = (res, rej) => { + const fmp = this.#fetchMethod?.(k, v, fetchOpts); + if (fmp && fmp instanceof Promise) { + fmp.then(v => res(v === undefined ? undefined : v), rej); + } + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if (!options.ignoreFetchAbort || + options.allowStaleOnFetchAbort) { + res(undefined); + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true); + } + } + }); + }; + if (options.status) + options.status.fetchDispatched = true; + const p = new Promise(pcall).then(cb, eb); + const bf = Object.assign(p, { + __abortController: ac, + __staleWhileFetching: v, + __returned: undefined, + }); + if (index === undefined) { + // internal, don't expose status. + this.set(k, bf, { ...fetchOpts.options, status: undefined }); + index = this.#keyMap.get(k); + } + else { + this.#valList[index] = bf; + } + return bf; + } + #isBackgroundFetch(p) { + if (!this.#hasFetchMethod) + return false; + const b = p; + return (!!b && + b instanceof Promise && + b.hasOwnProperty('__staleWhileFetching') && + b.__abortController instanceof AC); + } + async fetch(k, fetchOptions = {}) { + const { + // get options + allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; + if (!this.#hasFetchMethod) { + if (status) + status.fetch = 'get'; + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }); + } + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + }; + let index = this.#keyMap.get(k); + if (index === undefined) { + if (status) + status.fetch = 'miss'; + const p = this.#backgroundFetch(k, index, options, context); + return (p.__returned = p); + } + else { + // in cache, maybe already fetching + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + const stale = allowStale && v.__staleWhileFetching !== undefined; + if (status) { + status.fetch = 'inflight'; + if (stale) + status.returnedStale = true; + } + return stale ? v.__staleWhileFetching : (v.__returned = v); + } + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.#isStale(index); + if (!forceRefresh && !isStale) { + if (status) + status.fetch = 'hit'; + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + if (status) + this.#statusTTL(status, index); + return v; + } + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.#backgroundFetch(k, index, options, context); + const hasStale = p.__staleWhileFetching !== undefined; + const staleVal = hasStale && allowStale; + if (status) { + status.fetch = isStale ? 'stale' : 'refresh'; + if (staleVal && isStale) + status.returnedStale = true; + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p); + } + } + /** + * Return a value from the cache. Will update the recency of the cache + * entry found. + * + * If the key is not found, get() will return `undefined`. + */ + get(k, getOptions = {}) { + const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const value = this.#valList[index]; + const fetching = this.#isBackgroundFetch(value); + if (status) + this.#statusTTL(status, index); + if (this.#isStale(index)) { + if (status) + status.get = 'stale'; + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k); + } + if (status && allowStale) + status.returnedStale = true; + return allowStale ? value : undefined; + } + else { + if (status && + allowStale && + value.__staleWhileFetching !== undefined) { + status.returnedStale = true; + } + return allowStale ? value.__staleWhileFetching : undefined; + } + } + else { + if (status) + status.get = 'hit'; + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching; + } + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + return value; + } + } + else if (status) { + status.get = 'miss'; + } + } + #connect(p, n) { + this.#prev[n] = p; + this.#next[p] = n; + } + #moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.#tail) { + if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#connect(this.#prev[index], this.#next[index]); + } + this.#connect(this.#tail, index); + this.#tail = index; + } + } + /** + * Deletes a key out of the cache. + * Returns true if the key was deleted, false otherwise. + */ + delete(k) { + let deleted = false; + if (this.#size !== 0) { + const index = this.#keyMap.get(k); + if (index !== undefined) { + deleted = true; + if (this.#size === 1) { + this.clear(); + } + else { + this.#removeItemSize(index); + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + this.#keyMap.delete(k); + this.#keyList[index] = undefined; + this.#valList[index] = undefined; + if (index === this.#tail) { + this.#tail = this.#prev[index]; + } + else if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#next[this.#prev[index]] = this.#next[index]; + this.#prev[this.#next[index]] = this.#prev[index]; + } + this.#size--; + this.#free.push(index); + } + } + } + if (this.#hasDisposeAfter && this.#disposed?.length) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return deleted; + } + /** + * Clear the cache entirely, throwing away all values. + */ + clear() { + for (const index of this.#rindexes({ allowStale: true })) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else { + const k = this.#keyList[index]; + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + } + this.#keyMap.clear(); + this.#valList.fill(undefined); + this.#keyList.fill(undefined); + if (this.#ttls && this.#starts) { + this.#ttls.fill(0); + this.#starts.fill(0); + } + if (this.#sizes) { + this.#sizes.fill(0); + } + this.#head = 0; + this.#tail = 0; + this.#free.length = 0; + this.#calculatedSize = 0; + this.#size = 0; + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } +} +exports.LRUCache = LRUCache; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/cjs/index.min.js b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/cjs/index.min.js new file mode 100644 index 0000000000000..8d34a03041d25 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/cjs/index.min.js @@ -0,0 +1,2 @@ +"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=R.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#W(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};exports.LRUCache=C; +//# sourceMappingURL=index.min.js.map diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/cjs/package.json b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/cjs/package.json new file mode 100644 index 0000000000000..5bbefffbabee3 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/mjs/index.js b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/mjs/index.js new file mode 100644 index 0000000000000..23b9754ad6c76 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/mjs/index.js @@ -0,0 +1,1400 @@ +/** + * @module LRUCache + */ +const perf = typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date; +const warned = new Set(); +/* c8 ignore start */ +const PROCESS = (typeof process === 'object' && !!process ? process : {}); +/* c8 ignore start */ +const emitWarning = (msg, type, code, fn) => { + typeof PROCESS.emitWarning === 'function' + ? PROCESS.emitWarning(msg, type, code, fn) + : console.error(`[${code}] ${type}: ${msg}`); +}; +let AC = globalThis.AbortController; +let AS = globalThis.AbortSignal; +/* c8 ignore start */ +if (typeof AC === 'undefined') { + //@ts-ignore + AS = class AbortSignal { + onabort; + _onabort = []; + reason; + aborted = false; + addEventListener(_, fn) { + this._onabort.push(fn); + } + }; + //@ts-ignore + AC = class AbortController { + constructor() { + warnACPolyfill(); + } + signal = new AS(); + abort(reason) { + if (this.signal.aborted) + return; + //@ts-ignore + this.signal.reason = reason; + //@ts-ignore + this.signal.aborted = true; + //@ts-ignore + for (const fn of this.signal._onabort) { + fn(reason); + } + this.signal.onabort?.(reason); + } + }; + let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; + const warnACPolyfill = () => { + if (!printACPolyfillWarning) + return; + printACPolyfillWarning = false; + emitWarning('AbortController is not defined. If using lru-cache in ' + + 'node 14, load an AbortController polyfill from the ' + + '`node-abort-controller` package. A minimal polyfill is ' + + 'provided for use by LRUCache.fetch(), but it should not be ' + + 'relied upon in other contexts (eg, passing it to other APIs that ' + + 'use AbortController/AbortSignal might have undesirable effects). ' + + 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); + }; +} +/* c8 ignore stop */ +const shouldWarn = (code) => !warned.has(code); +const TYPE = Symbol('type'); +const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); +/* c8 ignore start */ +// This is a little bit ridiculous, tbh. +// The maximum array length is 2^32-1 or thereabouts on most JS impls. +// And well before that point, you're caching the entire world, I mean, +// that's ~32GB of just integers for the next/prev links, plus whatever +// else to hold that many keys and values. Just filling the memory with +// zeroes at init time is brutal when you get that big. +// But why not be complete? +// Maybe in the future, these limits will have expanded. +const getUintArray = (max) => !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null; +/* c8 ignore stop */ +class ZeroArray extends Array { + constructor(size) { + super(size); + this.fill(0); + } +} +class Stack { + heap; + length; + // private constructor + static #constructing = false; + static create(max) { + const HeapCls = getUintArray(max); + if (!HeapCls) + return []; + Stack.#constructing = true; + const s = new Stack(max, HeapCls); + Stack.#constructing = false; + return s; + } + constructor(max, HeapCls) { + /* c8 ignore start */ + if (!Stack.#constructing) { + throw new TypeError('instantiate Stack using Stack.create(n)'); + } + /* c8 ignore stop */ + this.heap = new HeapCls(max); + this.length = 0; + } + push(n) { + this.heap[this.length++] = n; + } + pop() { + return this.heap[--this.length]; + } +} +/** + * Default export, the thing you're using this module to get. + * + * All properties from the options object (with the exception of + * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as + * normal public members. (`max` and `maxBase` are read-only getters.) + * Changing any of these will alter the defaults for subsequent method calls, + * but is otherwise safe. + */ +export class LRUCache { + // properties coming in from the options of these, only max and maxSize + // really *need* to be protected. The rest can be modified, as they just + // set defaults for various methods. + #max; + #maxSize; + #dispose; + #disposeAfter; + #fetchMethod; + /** + * {@link LRUCache.OptionsBase.ttl} + */ + ttl; + /** + * {@link LRUCache.OptionsBase.ttlResolution} + */ + ttlResolution; + /** + * {@link LRUCache.OptionsBase.ttlAutopurge} + */ + ttlAutopurge; + /** + * {@link LRUCache.OptionsBase.updateAgeOnGet} + */ + updateAgeOnGet; + /** + * {@link LRUCache.OptionsBase.updateAgeOnHas} + */ + updateAgeOnHas; + /** + * {@link LRUCache.OptionsBase.allowStale} + */ + allowStale; + /** + * {@link LRUCache.OptionsBase.noDisposeOnSet} + */ + noDisposeOnSet; + /** + * {@link LRUCache.OptionsBase.noUpdateTTL} + */ + noUpdateTTL; + /** + * {@link LRUCache.OptionsBase.maxEntrySize} + */ + maxEntrySize; + /** + * {@link LRUCache.OptionsBase.sizeCalculation} + */ + sizeCalculation; + /** + * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} + */ + noDeleteOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} + */ + noDeleteOnStaleGet; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} + */ + allowStaleOnFetchAbort; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} + */ + allowStaleOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.ignoreFetchAbort} + */ + ignoreFetchAbort; + // computed properties + #size; + #calculatedSize; + #keyMap; + #keyList; + #valList; + #next; + #prev; + #head; + #tail; + #free; + #disposed; + #sizes; + #starts; + #ttls; + #hasDispose; + #hasFetchMethod; + #hasDisposeAfter; + /** + * Do not call this method unless you need to inspect the + * inner workings of the cache. If anything returned by this + * object is modified in any way, strange breakage may occur. + * + * These fields are private for a reason! + * + * @internal + */ + static unsafeExposeInternals(c) { + return { + // properties + starts: c.#starts, + ttls: c.#ttls, + sizes: c.#sizes, + keyMap: c.#keyMap, + keyList: c.#keyList, + valList: c.#valList, + next: c.#next, + prev: c.#prev, + get head() { + return c.#head; + }, + get tail() { + return c.#tail; + }, + free: c.#free, + // methods + isBackgroundFetch: (p) => c.#isBackgroundFetch(p), + backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), + moveToTail: (index) => c.#moveToTail(index), + indexes: (options) => c.#indexes(options), + rindexes: (options) => c.#rindexes(options), + isStale: (index) => c.#isStale(index), + }; + } + // Protected read-only members + /** + * {@link LRUCache.OptionsBase.max} (read-only) + */ + get max() { + return this.#max; + } + /** + * {@link LRUCache.OptionsBase.maxSize} (read-only) + */ + get maxSize() { + return this.#maxSize; + } + /** + * The total computed size of items in the cache (read-only) + */ + get calculatedSize() { + return this.#calculatedSize; + } + /** + * The number of items stored in the cache (read-only) + */ + get size() { + return this.#size; + } + /** + * {@link LRUCache.OptionsBase.fetchMethod} (read-only) + */ + get fetchMethod() { + return this.#fetchMethod; + } + /** + * {@link LRUCache.OptionsBase.dispose} (read-only) + */ + get dispose() { + return this.#dispose; + } + /** + * {@link LRUCache.OptionsBase.disposeAfter} (read-only) + */ + get disposeAfter() { + return this.#disposeAfter; + } + constructor(options) { + const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer'); + } + const UintArray = max ? getUintArray(max) : Array; + if (!UintArray) { + throw new Error('invalid max value: ' + max); + } + this.#max = max; + this.#maxSize = maxSize; + this.maxEntrySize = maxEntrySize || this.#maxSize; + this.sizeCalculation = sizeCalculation; + if (this.sizeCalculation) { + if (!this.#maxSize && !this.maxEntrySize) { + throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function'); + } + } + if (fetchMethod !== undefined && + typeof fetchMethod !== 'function') { + throw new TypeError('fetchMethod must be a function if specified'); + } + this.#fetchMethod = fetchMethod; + this.#hasFetchMethod = !!fetchMethod; + this.#keyMap = new Map(); + this.#keyList = new Array(max).fill(undefined); + this.#valList = new Array(max).fill(undefined); + this.#next = new UintArray(max); + this.#prev = new UintArray(max); + this.#head = 0; + this.#tail = 0; + this.#free = Stack.create(max); + this.#size = 0; + this.#calculatedSize = 0; + if (typeof dispose === 'function') { + this.#dispose = dispose; + } + if (typeof disposeAfter === 'function') { + this.#disposeAfter = disposeAfter; + this.#disposed = []; + } + else { + this.#disposeAfter = undefined; + this.#disposed = undefined; + } + this.#hasDispose = !!this.#dispose; + this.#hasDisposeAfter = !!this.#disposeAfter; + this.noDisposeOnSet = !!noDisposeOnSet; + this.noUpdateTTL = !!noUpdateTTL; + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; + this.ignoreFetchAbort = !!ignoreFetchAbort; + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.#maxSize !== 0) { + if (!isPosInt(this.#maxSize)) { + throw new TypeError('maxSize must be a positive integer if specified'); + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError('maxEntrySize must be a positive integer if specified'); + } + this.#initializeSizeTracking(); + } + this.allowStale = !!allowStale; + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; + this.updateAgeOnGet = !!updateAgeOnGet; + this.updateAgeOnHas = !!updateAgeOnHas; + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1; + this.ttlAutopurge = !!ttlAutopurge; + this.ttl = ttl || 0; + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError('ttl must be a positive integer if specified'); + } + this.#initializeTTLTracking(); + } + // do not allow completely unbounded caches + if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { + throw new TypeError('At least one of max, maxSize, or ttl is required'); + } + if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { + const code = 'LRU_CACHE_UNBOUNDED'; + if (shouldWarn(code)) { + warned.add(code); + const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.'; + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); + } + } + } + /** + * Return the remaining TTL time for a given entry key + */ + getRemainingTTL(key) { + return this.#keyMap.has(key) ? Infinity : 0; + } + #initializeTTLTracking() { + const ttls = new ZeroArray(this.#max); + const starts = new ZeroArray(this.#max); + this.#ttls = ttls; + this.#starts = starts; + this.#setItemTTL = (index, ttl, start = perf.now()) => { + starts[index] = ttl !== 0 ? start : 0; + ttls[index] = ttl; + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.#isStale(index)) { + this.delete(this.#keyList[index]); + } + }, ttl + 1); + // unref() not supported on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + }; + this.#updateItemAge = index => { + starts[index] = ttls[index] !== 0 ? perf.now() : 0; + }; + this.#statusTTL = (status, index) => { + if (ttls[index]) { + const ttl = ttls[index]; + const start = starts[index]; + status.ttl = ttl; + status.start = start; + status.now = cachedNow || getNow(); + const age = status.now - start; + status.remainingTTL = ttl - age; + } + }; + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0; + const getNow = () => { + const n = perf.now(); + if (this.ttlResolution > 0) { + cachedNow = n; + const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); + // not available on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + return n; + }; + this.getRemainingTTL = key => { + const index = this.#keyMap.get(key); + if (index === undefined) { + return 0; + } + const ttl = ttls[index]; + const start = starts[index]; + if (ttl === 0 || start === 0) { + return Infinity; + } + const age = (cachedNow || getNow()) - start; + return ttl - age; + }; + this.#isStale = index => { + return (ttls[index] !== 0 && + starts[index] !== 0 && + (cachedNow || getNow()) - starts[index] > ttls[index]); + }; + } + // conditionally set private methods related to TTL + #updateItemAge = () => { }; + #statusTTL = () => { }; + #setItemTTL = () => { }; + /* c8 ignore stop */ + #isStale = () => false; + #initializeSizeTracking() { + const sizes = new ZeroArray(this.#max); + this.#calculatedSize = 0; + this.#sizes = sizes; + this.#removeItemSize = index => { + this.#calculatedSize -= sizes[index]; + sizes[index] = 0; + }; + this.#requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.#isBackgroundFetch(v)) { + return 0; + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function'); + } + size = sizeCalculation(v, k); + if (!isPosInt(size)) { + throw new TypeError('sizeCalculation return invalid (expect positive integer)'); + } + } + else { + throw new TypeError('invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation ' + + 'or size must be set.'); + } + } + return size; + }; + this.#addItemSize = (index, size, status) => { + sizes[index] = size; + if (this.#maxSize) { + const maxSize = this.#maxSize - sizes[index]; + while (this.#calculatedSize > maxSize) { + this.#evict(true); + } + } + this.#calculatedSize += sizes[index]; + if (status) { + status.entrySize = size; + status.totalCalculatedSize = this.#calculatedSize; + } + }; + } + #removeItemSize = _i => { }; + #addItemSize = (_i, _s, _st) => { }; + #requireSize = (_k, _v, size, sizeCalculation) => { + if (size || sizeCalculation) { + throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); + } + return 0; + }; + *#indexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#tail; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#head) { + break; + } + else { + i = this.#prev[i]; + } + } + } + } + *#rindexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#head; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#tail) { + break; + } + else { + i = this.#next[i]; + } + } + } + } + #isValidIndex(index) { + return (index !== undefined && + this.#keyMap.get(this.#keyList[index]) === index); + } + /** + * Return a generator yielding `[key, value]` pairs, + * in order from most recently used to least recently used. + */ + *entries() { + for (const i of this.#indexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Inverse order version of {@link LRUCache.entries} + * + * Return a generator yielding `[key, value]` pairs, + * in order from least recently used to most recently used. + */ + *rentries() { + for (const i of this.#rindexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Return a generator yielding the keys in the cache, + * in order from most recently used to least recently used. + */ + *keys() { + for (const i of this.#indexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Inverse order version of {@link LRUCache.keys} + * + * Return a generator yielding the keys in the cache, + * in order from least recently used to most recently used. + */ + *rkeys() { + for (const i of this.#rindexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Return a generator yielding the values in the cache, + * in order from most recently used to least recently used. + */ + *values() { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Inverse order version of {@link LRUCache.values} + * + * Return a generator yielding the values in the cache, + * in order from least recently used to most recently used. + */ + *rvalues() { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Iterating over the cache itself yields the same results as + * {@link LRUCache.entries} + */ + [Symbol.iterator]() { + return this.entries(); + } + /** + * Find a value for which the supplied fn method returns a truthy value, + * similar to Array.find(). fn is called as fn(value, key, cache). + */ + find(fn, getOptions = {}) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + if (fn(value, this.#keyList[i], this)) { + return this.get(this.#keyList[i], getOptions); + } + } + } + /** + * Call the supplied function on each item in the cache, in order from + * most recently used to least recently used. fn is called as + * fn(value, key, cache). Does not update age or recenty of use. + * Does not iterate over stale values. + */ + forEach(fn, thisp = this) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * The same as {@link LRUCache.forEach} but items are iterated over in + * reverse order. (ie, less recently used items are iterated over first.) + */ + rforEach(fn, thisp = this) { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * Delete any stale entries. Returns true if anything was removed, + * false otherwise. + */ + purgeStale() { + let deleted = false; + for (const i of this.#rindexes({ allowStale: true })) { + if (this.#isStale(i)) { + this.delete(this.#keyList[i]); + deleted = true; + } + } + return deleted; + } + /** + * Return an array of [key, {@link LRUCache.Entry}] tuples which can be + * passed to cache.load() + */ + dump() { + const arr = []; + for (const i of this.#indexes({ allowStale: true })) { + const key = this.#keyList[i]; + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined || key === undefined) + continue; + const entry = { value }; + if (this.#ttls && this.#starts) { + entry.ttl = this.#ttls[i]; + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.#starts[i]; + entry.start = Math.floor(Date.now() - age); + } + if (this.#sizes) { + entry.size = this.#sizes[i]; + } + arr.unshift([key, entry]); + } + return arr; + } + /** + * Reset the cache and load in the items in entries in the order listed. + * Note that the shape of the resulting cache may be different if the + * same options are not used in both caches. + */ + load(arr) { + this.clear(); + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset, so that + // we get the intended remaining TTL, no matter how long it's + // been on ice. + // + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start; + entry.start = perf.now() - age; + } + this.set(key, entry.value, entry); + } + } + /** + * Add a value to the cache. + * + * Note: if `undefined` is specified as a value, this is an alias for + * {@link LRUCache#delete} + */ + set(k, v, setOptions = {}) { + if (v === undefined) { + this.delete(k); + return this; + } + const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; + let { noUpdateTTL = this.noUpdateTTL } = setOptions; + const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss'; + status.maxEntrySizeExceeded = true; + } + // have to delete, in case something is there already. + this.delete(k); + return this; + } + let index = this.#size === 0 ? undefined : this.#keyMap.get(k); + if (index === undefined) { + // addition + index = (this.#size === 0 + ? this.#tail + : this.#free.length !== 0 + ? this.#free.pop() + : this.#size === this.#max + ? this.#evict(false) + : this.#size); + this.#keyList[index] = k; + this.#valList[index] = v; + this.#keyMap.set(k, index); + this.#next[this.#tail] = index; + this.#prev[index] = this.#tail; + this.#tail = index; + this.#size++; + this.#addItemSize(index, size, status); + if (status) + status.set = 'add'; + noUpdateTTL = false; + } + else { + // update + this.#moveToTail(index); + const oldVal = this.#valList[index]; + if (v !== oldVal) { + if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')); + const { __staleWhileFetching: s } = oldVal; + if (s !== undefined && !noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(s, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([s, k, 'set']); + } + } + } + else if (!noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(oldVal, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([oldVal, k, 'set']); + } + } + this.#removeItemSize(index); + this.#addItemSize(index, size, status); + this.#valList[index] = v; + if (status) { + status.set = 'replace'; + const oldValue = oldVal && this.#isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal; + if (oldValue !== undefined) + status.oldValue = oldValue; + } + } + else if (status) { + status.set = 'update'; + } + } + if (ttl !== 0 && !this.#ttls) { + this.#initializeTTLTracking(); + } + if (this.#ttls) { + if (!noUpdateTTL) { + this.#setItemTTL(index, ttl, start); + } + if (status) + this.#statusTTL(status, index); + } + if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return this; + } + /** + * Evict the least recently used item, returning its value or + * `undefined` if cache is empty. + */ + pop() { + try { + while (this.#size) { + const val = this.#valList[this.#head]; + this.#evict(true); + if (this.#isBackgroundFetch(val)) { + if (val.__staleWhileFetching) { + return val.__staleWhileFetching; + } + } + else if (val !== undefined) { + return val; + } + } + } + finally { + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } + } + #evict(free) { + const head = this.#head; + const k = this.#keyList[head]; + const v = this.#valList[head]; + if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'evict'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'evict']); + } + } + this.#removeItemSize(head); + // if we aren't about to use the index, then null these out + if (free) { + this.#keyList[head] = undefined; + this.#valList[head] = undefined; + this.#free.push(head); + } + if (this.#size === 1) { + this.#head = this.#tail = 0; + this.#free.length = 0; + } + else { + this.#head = this.#next[head]; + } + this.#keyMap.delete(k); + this.#size--; + return head; + } + /** + * Check if a key is in the cache, without updating the recency of use. + * Will return false if the item is stale, even though it is technically + * in the cache. + * + * Will not update item age unless + * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. + */ + has(k, hasOptions = {}) { + const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v) && + v.__staleWhileFetching === undefined) { + return false; + } + if (!this.#isStale(index)) { + if (updateAgeOnHas) { + this.#updateItemAge(index); + } + if (status) { + status.has = 'hit'; + this.#statusTTL(status, index); + } + return true; + } + else if (status) { + status.has = 'stale'; + this.#statusTTL(status, index); + } + } + else if (status) { + status.has = 'miss'; + } + return false; + } + /** + * Like {@link LRUCache#get} but doesn't update recency or delete stale + * items. + * + * Returns `undefined` if the item is stale, unless + * {@link LRUCache.OptionsBase.allowStale} is set. + */ + peek(k, peekOptions = {}) { + const { allowStale = this.allowStale } = peekOptions; + const index = this.#keyMap.get(k); + if (index !== undefined && + (allowStale || !this.#isStale(index))) { + const v = this.#valList[index]; + // either stale and allowed, or forcing a refresh of non-stale value + return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + } + } + #backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + return v; + } + const ac = new AC(); + const { signal } = options; + // when/if our AC signals, then stop listening to theirs. + signal?.addEventListener('abort', () => ac.abort(signal.reason), { + signal: ac.signal, + }); + const fetchOpts = { + signal: ac.signal, + options, + context, + }; + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal; + const ignoreAbort = options.ignoreFetchAbort && v !== undefined; + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true; + options.status.fetchError = ac.signal.reason; + if (ignoreAbort) + options.status.fetchAbortIgnored = true; + } + else { + options.status.fetchResolved = true; + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason); + } + // either we didn't abort, and are still here, or we did, and ignored + const bf = p; + if (this.#valList[index] === p) { + if (v === undefined) { + if (bf.__staleWhileFetching) { + this.#valList[index] = bf.__staleWhileFetching; + } + else { + this.delete(k); + } + } + else { + if (options.status) + options.status.fetchUpdated = true; + this.set(k, v, fetchOpts.options); + } + } + return v; + }; + const eb = (er) => { + if (options.status) { + options.status.fetchRejected = true; + options.status.fetchError = er; + } + return fetchFail(er); + }; + const fetchFail = (er) => { + const { aborted } = ac.signal; + const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; + const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; + const noDelete = allowStale || options.noDeleteOnFetchRejection; + const bf = p; + if (this.#valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || bf.__staleWhileFetching === undefined; + if (del) { + this.delete(k); + } + else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.#valList[index] = bf.__staleWhileFetching; + } + } + if (allowStale) { + if (options.status && bf.__staleWhileFetching !== undefined) { + options.status.returnedStale = true; + } + return bf.__staleWhileFetching; + } + else if (bf.__returned === bf) { + throw er; + } + }; + const pcall = (res, rej) => { + const fmp = this.#fetchMethod?.(k, v, fetchOpts); + if (fmp && fmp instanceof Promise) { + fmp.then(v => res(v === undefined ? undefined : v), rej); + } + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if (!options.ignoreFetchAbort || + options.allowStaleOnFetchAbort) { + res(undefined); + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true); + } + } + }); + }; + if (options.status) + options.status.fetchDispatched = true; + const p = new Promise(pcall).then(cb, eb); + const bf = Object.assign(p, { + __abortController: ac, + __staleWhileFetching: v, + __returned: undefined, + }); + if (index === undefined) { + // internal, don't expose status. + this.set(k, bf, { ...fetchOpts.options, status: undefined }); + index = this.#keyMap.get(k); + } + else { + this.#valList[index] = bf; + } + return bf; + } + #isBackgroundFetch(p) { + if (!this.#hasFetchMethod) + return false; + const b = p; + return (!!b && + b instanceof Promise && + b.hasOwnProperty('__staleWhileFetching') && + b.__abortController instanceof AC); + } + async fetch(k, fetchOptions = {}) { + const { + // get options + allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; + if (!this.#hasFetchMethod) { + if (status) + status.fetch = 'get'; + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }); + } + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + }; + let index = this.#keyMap.get(k); + if (index === undefined) { + if (status) + status.fetch = 'miss'; + const p = this.#backgroundFetch(k, index, options, context); + return (p.__returned = p); + } + else { + // in cache, maybe already fetching + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + const stale = allowStale && v.__staleWhileFetching !== undefined; + if (status) { + status.fetch = 'inflight'; + if (stale) + status.returnedStale = true; + } + return stale ? v.__staleWhileFetching : (v.__returned = v); + } + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.#isStale(index); + if (!forceRefresh && !isStale) { + if (status) + status.fetch = 'hit'; + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + if (status) + this.#statusTTL(status, index); + return v; + } + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.#backgroundFetch(k, index, options, context); + const hasStale = p.__staleWhileFetching !== undefined; + const staleVal = hasStale && allowStale; + if (status) { + status.fetch = isStale ? 'stale' : 'refresh'; + if (staleVal && isStale) + status.returnedStale = true; + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p); + } + } + /** + * Return a value from the cache. Will update the recency of the cache + * entry found. + * + * If the key is not found, get() will return `undefined`. + */ + get(k, getOptions = {}) { + const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const value = this.#valList[index]; + const fetching = this.#isBackgroundFetch(value); + if (status) + this.#statusTTL(status, index); + if (this.#isStale(index)) { + if (status) + status.get = 'stale'; + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k); + } + if (status && allowStale) + status.returnedStale = true; + return allowStale ? value : undefined; + } + else { + if (status && + allowStale && + value.__staleWhileFetching !== undefined) { + status.returnedStale = true; + } + return allowStale ? value.__staleWhileFetching : undefined; + } + } + else { + if (status) + status.get = 'hit'; + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching; + } + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + return value; + } + } + else if (status) { + status.get = 'miss'; + } + } + #connect(p, n) { + this.#prev[n] = p; + this.#next[p] = n; + } + #moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.#tail) { + if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#connect(this.#prev[index], this.#next[index]); + } + this.#connect(this.#tail, index); + this.#tail = index; + } + } + /** + * Deletes a key out of the cache. + * Returns true if the key was deleted, false otherwise. + */ + delete(k) { + let deleted = false; + if (this.#size !== 0) { + const index = this.#keyMap.get(k); + if (index !== undefined) { + deleted = true; + if (this.#size === 1) { + this.clear(); + } + else { + this.#removeItemSize(index); + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + this.#keyMap.delete(k); + this.#keyList[index] = undefined; + this.#valList[index] = undefined; + if (index === this.#tail) { + this.#tail = this.#prev[index]; + } + else if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#next[this.#prev[index]] = this.#next[index]; + this.#prev[this.#next[index]] = this.#prev[index]; + } + this.#size--; + this.#free.push(index); + } + } + } + if (this.#hasDisposeAfter && this.#disposed?.length) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return deleted; + } + /** + * Clear the cache entirely, throwing away all values. + */ + clear() { + for (const index of this.#rindexes({ allowStale: true })) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else { + const k = this.#keyList[index]; + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + } + this.#keyMap.clear(); + this.#valList.fill(undefined); + this.#keyList.fill(undefined); + if (this.#ttls && this.#starts) { + this.#ttls.fill(0); + this.#starts.fill(0); + } + if (this.#sizes) { + this.#sizes.fill(0); + } + this.#head = 0; + this.#tail = 0; + this.#free.length = 0; + this.#calculatedSize = 0; + this.#size = 0; + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/mjs/index.min.js b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/mjs/index.min.js new file mode 100644 index 0000000000000..5a16b3940d6df --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/mjs/index.min.js @@ -0,0 +1,2 @@ +var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var R=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=C.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,R))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#W=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#R(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#W=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#R(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#W(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#W(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#R(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#R(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};export{R as LRUCache}; +//# sourceMappingURL=index.min.js.map diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/mjs/package.json b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/mjs/package.json new file mode 100644 index 0000000000000..3dbc1ca591c05 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/mjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/package.json b/node_modules/@npmcli/package-json/node_modules/lru-cache/package.json new file mode 100644 index 0000000000000..bae4a04839d1f --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/lru-cache/package.json @@ -0,0 +1,108 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "10.0.1", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "sideEffects": false, + "scripts": { + "build": "npm run prepare", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", + "postprepare": "bash fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write .", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts", + "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh", + "prebenchmark": "npm run prepare", + "benchmark": "make -C benchmark", + "preprofile": "npm run prepare", + "profile": "make -C benchmark profile" + }, + "main": "./dist/cjs/index.js", + "module": "./dist/mjs/index.js", + "exports": { + "./min": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.min.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.min.js" + } + }, + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + } + }, + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "@size-limit/preset-small-lib": "^7.0.8", + "@types/node": "^20.2.5", + "@types/tap": "^15.0.6", + "benchmark": "^2.1.4", + "c8": "^7.11.2", + "clock-mock": "^1.0.6", + "esbuild": "^0.17.11", + "eslint-config-prettier": "^8.5.0", + "marked": "^4.2.12", + "mkdirp": "^2.1.5", + "prettier": "^2.6.2", + "size-limit": "^7.0.8", + "tap": "^16.3.4", + "ts-node": "^10.9.1", + "tslib": "^2.4.0", + "typedoc": "^0.24.6", + "typescript": "^5.0.4" + }, + "license": "ISC", + "files": [ + "dist" + ], + "engines": { + "node": "14 || >=16.14" + }, + "prettier": { + "semi": false, + "printWidth": 70, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "tap": { + "coverage": false, + "node-arg": [ + "--expose-gc", + "-r", + "ts-node/register" + ], + "ts": false + }, + "size-limit": [ + { + "path": "./dist/mjs/index.js" + } + ] +} diff --git a/node_modules/@npmcli/package-json/node_modules/normalize-package-data/LICENSE b/node_modules/@npmcli/package-json/node_modules/normalize-package-data/LICENSE new file mode 100644 index 0000000000000..19d1364a8ac08 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/normalize-package-data/LICENSE @@ -0,0 +1,15 @@ +This package contains code originally written by Isaac Z. Schlueter. +Used with permission. + +Copyright (c) Meryn Stol ("Author") +All rights reserved. + +The BSD License + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/extract_description.js b/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/extract_description.js new file mode 100644 index 0000000000000..631966b5f29af --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/extract_description.js @@ -0,0 +1,24 @@ +module.exports = extractDescription + +// Extracts description from contents of a readme file in markdown format +function extractDescription (d) { + if (!d) { + return + } + if (d === 'ERROR: No README data found!') { + return + } + // the first block of text before the first heading + // that isn't the first line heading + d = d.trim().split('\n') + let s = 0 + while (d[s] && d[s].trim().match(/^(#|$)/)) { + s++ + } + const l = d.length + let e = s + 1 + while (e < l && d[e].trim()) { + e++ + } + return d.slice(s, e).join(' ').trim() +} diff --git a/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/fixer.js b/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/fixer.js new file mode 100644 index 0000000000000..bb78231d83ca9 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/fixer.js @@ -0,0 +1,475 @@ +var isValidSemver = require('semver/functions/valid') +var cleanSemver = require('semver/functions/clean') +var validateLicense = require('validate-npm-package-license') +var hostedGitInfo = require('hosted-git-info') +var isBuiltinModule = require('is-core-module') +var depTypes = ['dependencies', 'devDependencies', 'optionalDependencies'] +var extractDescription = require('./extract_description') +var url = require('url') +var typos = require('./typos.json') + +var isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.')) + +module.exports = { + // default warning function + warn: function () {}, + + fixRepositoryField: function (data) { + if (data.repositories) { + this.warn('repositories') + data.repository = data.repositories[0] + } + if (!data.repository) { + return this.warn('missingRepository') + } + if (typeof data.repository === 'string') { + data.repository = { + type: 'git', + url: data.repository, + } + } + var r = data.repository.url || '' + if (r) { + var hosted = hostedGitInfo.fromUrl(r) + if (hosted) { + r = data.repository.url + = hosted.getDefaultRepresentation() === 'shortcut' ? hosted.https() : hosted.toString() + } + } + + if (r.match(/github.com\/[^/]+\/[^/]+\.git\.git$/)) { + this.warn('brokenGitUrl', r) + } + }, + + fixTypos: function (data) { + Object.keys(typos.topLevel).forEach(function (d) { + if (Object.prototype.hasOwnProperty.call(data, d)) { + this.warn('typo', d, typos.topLevel[d]) + } + }, this) + }, + + fixScriptsField: function (data) { + if (!data.scripts) { + return + } + if (typeof data.scripts !== 'object') { + this.warn('nonObjectScripts') + delete data.scripts + return + } + Object.keys(data.scripts).forEach(function (k) { + if (typeof data.scripts[k] !== 'string') { + this.warn('nonStringScript') + delete data.scripts[k] + } else if (typos.script[k] && !data.scripts[typos.script[k]]) { + this.warn('typo', k, typos.script[k], 'scripts') + } + }, this) + }, + + fixFilesField: function (data) { + var files = data.files + if (files && !Array.isArray(files)) { + this.warn('nonArrayFiles') + delete data.files + } else if (data.files) { + data.files = data.files.filter(function (file) { + if (!file || typeof file !== 'string') { + this.warn('invalidFilename', file) + return false + } else { + return true + } + }, this) + } + }, + + fixBinField: function (data) { + if (!data.bin) { + return + } + if (typeof data.bin === 'string') { + var b = {} + var match + if (match = data.name.match(/^@[^/]+[/](.*)$/)) { + b[match[1]] = data.bin + } else { + b[data.name] = data.bin + } + data.bin = b + } + }, + + fixManField: function (data) { + if (!data.man) { + return + } + if (typeof data.man === 'string') { + data.man = [data.man] + } + }, + fixBundleDependenciesField: function (data) { + var bdd = 'bundledDependencies' + var bd = 'bundleDependencies' + if (data[bdd] && !data[bd]) { + data[bd] = data[bdd] + delete data[bdd] + } + if (data[bd] && !Array.isArray(data[bd])) { + this.warn('nonArrayBundleDependencies') + delete data[bd] + } else if (data[bd]) { + data[bd] = data[bd].filter(function (filtered) { + if (!filtered || typeof filtered !== 'string') { + this.warn('nonStringBundleDependency', filtered) + return false + } else { + if (!data.dependencies) { + data.dependencies = {} + } + if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) { + this.warn('nonDependencyBundleDependency', filtered) + data.dependencies[filtered] = '*' + } + return true + } + }, this) + } + }, + + fixDependencies: function (data, strict) { + objectifyDeps(data, this.warn) + addOptionalDepsToDeps(data, this.warn) + this.fixBundleDependenciesField(data) + + ;['dependencies', 'devDependencies'].forEach(function (deps) { + if (!(deps in data)) { + return + } + if (!data[deps] || typeof data[deps] !== 'object') { + this.warn('nonObjectDependencies', deps) + delete data[deps] + return + } + Object.keys(data[deps]).forEach(function (d) { + var r = data[deps][d] + if (typeof r !== 'string') { + this.warn('nonStringDependency', d, JSON.stringify(r)) + delete data[deps][d] + } + var hosted = hostedGitInfo.fromUrl(data[deps][d]) + if (hosted) { + data[deps][d] = hosted.toString() + } + }, this) + }, this) + }, + + fixModulesField: function (data) { + if (data.modules) { + this.warn('deprecatedModules') + delete data.modules + } + }, + + fixKeywordsField: function (data) { + if (typeof data.keywords === 'string') { + data.keywords = data.keywords.split(/,\s+/) + } + if (data.keywords && !Array.isArray(data.keywords)) { + delete data.keywords + this.warn('nonArrayKeywords') + } else if (data.keywords) { + data.keywords = data.keywords.filter(function (kw) { + if (typeof kw !== 'string' || !kw) { + this.warn('nonStringKeyword') + return false + } else { + return true + } + }, this) + } + }, + + fixVersionField: function (data, strict) { + // allow "loose" semver 1.0 versions in non-strict mode + // enforce strict semver 2.0 compliance in strict mode + var loose = !strict + if (!data.version) { + data.version = '' + return true + } + if (!isValidSemver(data.version, loose)) { + throw new Error('Invalid version: "' + data.version + '"') + } + data.version = cleanSemver(data.version, loose) + return true + }, + + fixPeople: function (data) { + modifyPeople(data, unParsePerson) + modifyPeople(data, parsePerson) + }, + + fixNameField: function (data, options) { + if (typeof options === 'boolean') { + options = { strict: options } + } else if (typeof options === 'undefined') { + options = {} + } + var strict = options.strict + if (!data.name && !strict) { + data.name = '' + return + } + if (typeof data.name !== 'string') { + throw new Error('name field must be a string.') + } + if (!strict) { + data.name = data.name.trim() + } + ensureValidName(data.name, strict, options.allowLegacyCase) + if (isBuiltinModule(data.name)) { + this.warn('conflictingName', data.name) + } + }, + + fixDescriptionField: function (data) { + if (data.description && typeof data.description !== 'string') { + this.warn('nonStringDescription') + delete data.description + } + if (data.readme && !data.description) { + data.description = extractDescription(data.readme) + } + if (data.description === undefined) { + delete data.description + } + if (!data.description) { + this.warn('missingDescription') + } + }, + + fixReadmeField: function (data) { + if (!data.readme) { + this.warn('missingReadme') + data.readme = 'ERROR: No README data found!' + } + }, + + fixBugsField: function (data) { + if (!data.bugs && data.repository && data.repository.url) { + var hosted = hostedGitInfo.fromUrl(data.repository.url) + if (hosted && hosted.bugs()) { + data.bugs = { url: hosted.bugs() } + } + } else if (data.bugs) { + if (typeof data.bugs === 'string') { + if (isEmail(data.bugs)) { + data.bugs = { email: data.bugs } + /* eslint-disable-next-line node/no-deprecated-api */ + } else if (url.parse(data.bugs).protocol) { + data.bugs = { url: data.bugs } + } else { + this.warn('nonEmailUrlBugsString') + } + } else { + bugsTypos(data.bugs, this.warn) + var oldBugs = data.bugs + data.bugs = {} + if (oldBugs.url) { + /* eslint-disable-next-line node/no-deprecated-api */ + if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) { + data.bugs.url = oldBugs.url + } else { + this.warn('nonUrlBugsUrlField') + } + } + if (oldBugs.email) { + if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) { + data.bugs.email = oldBugs.email + } else { + this.warn('nonEmailBugsEmailField') + } + } + } + if (!data.bugs.email && !data.bugs.url) { + delete data.bugs + this.warn('emptyNormalizedBugs') + } + } + }, + + fixHomepageField: function (data) { + if (!data.homepage && data.repository && data.repository.url) { + var hosted = hostedGitInfo.fromUrl(data.repository.url) + if (hosted && hosted.docs()) { + data.homepage = hosted.docs() + } + } + if (!data.homepage) { + return + } + + if (typeof data.homepage !== 'string') { + this.warn('nonUrlHomepage') + return delete data.homepage + } + /* eslint-disable-next-line node/no-deprecated-api */ + if (!url.parse(data.homepage).protocol) { + data.homepage = 'http://' + data.homepage + } + }, + + fixLicenseField: function (data) { + const license = data.license || data.licence + if (!license) { + return this.warn('missingLicense') + } + if ( + typeof (license) !== 'string' || + license.length < 1 || + license.trim() === '' + ) { + return this.warn('invalidLicense') + } + if (!validateLicense(license).validForNewPackages) { + return this.warn('invalidLicense') + } + }, +} + +function isValidScopedPackageName (spec) { + if (spec.charAt(0) !== '@') { + return false + } + + var rest = spec.slice(1).split('/') + if (rest.length !== 2) { + return false + } + + return rest[0] && rest[1] && + rest[0] === encodeURIComponent(rest[0]) && + rest[1] === encodeURIComponent(rest[1]) +} + +function isCorrectlyEncodedName (spec) { + return !spec.match(/[/@\s+%:]/) && + spec === encodeURIComponent(spec) +} + +function ensureValidName (name, strict, allowLegacyCase) { + if (name.charAt(0) === '.' || + !(isValidScopedPackageName(name) || isCorrectlyEncodedName(name)) || + (strict && (!allowLegacyCase) && name !== name.toLowerCase()) || + name.toLowerCase() === 'node_modules' || + name.toLowerCase() === 'favicon.ico') { + throw new Error('Invalid name: ' + JSON.stringify(name)) + } +} + +function modifyPeople (data, fn) { + if (data.author) { + data.author = fn(data.author) + }['maintainers', 'contributors'].forEach(function (set) { + if (!Array.isArray(data[set])) { + return + } + data[set] = data[set].map(fn) + }) + return data +} + +function unParsePerson (person) { + if (typeof person === 'string') { + return person + } + var name = person.name || '' + var u = person.url || person.web + var wrappedUrl = u ? (' (' + u + ')') : '' + var e = person.email || person.mail + var wrappedEmail = e ? (' <' + e + '>') : '' + return name + wrappedEmail + wrappedUrl +} + +function parsePerson (person) { + if (typeof person !== 'string') { + return person + } + var matchedName = person.match(/^([^(<]+)/) + var matchedUrl = person.match(/\(([^()]+)\)/) + var matchedEmail = person.match(/<([^<>]+)>/) + var obj = {} + if (matchedName && matchedName[0].trim()) { + obj.name = matchedName[0].trim() + } + if (matchedEmail) { + obj.email = matchedEmail[1] + } + if (matchedUrl) { + obj.url = matchedUrl[1] + } + return obj +} + +function addOptionalDepsToDeps (data, warn) { + var o = data.optionalDependencies + if (!o) { + return + } + var d = data.dependencies || {} + Object.keys(o).forEach(function (k) { + d[k] = o[k] + }) + data.dependencies = d +} + +function depObjectify (deps, type, warn) { + if (!deps) { + return {} + } + if (typeof deps === 'string') { + deps = deps.trim().split(/[\n\r\s\t ,]+/) + } + if (!Array.isArray(deps)) { + return deps + } + warn('deprecatedArrayDependencies', type) + var o = {} + deps.filter(function (d) { + return typeof d === 'string' + }).forEach(function (d) { + d = d.trim().split(/(:?[@\s><=])/) + var dn = d.shift() + var dv = d.join('') + dv = dv.trim() + dv = dv.replace(/^@/, '') + o[dn] = dv + }) + return o +} + +function objectifyDeps (data, warn) { + depTypes.forEach(function (type) { + if (!data[type]) { + return + } + data[type] = depObjectify(data[type], type, warn) + }) +} + +function bugsTypos (bugs, warn) { + if (!bugs) { + return + } + Object.keys(bugs).forEach(function (k) { + if (typos.bugs[k]) { + warn('typo', k, typos.bugs[k], 'bugs') + bugs[typos.bugs[k]] = bugs[k] + delete bugs[k] + } + }) +} diff --git a/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/make_warning.js b/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/make_warning.js new file mode 100644 index 0000000000000..3be9c86539952 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/make_warning.js @@ -0,0 +1,22 @@ +var util = require('util') +var messages = require('./warning_messages.json') + +module.exports = function () { + var args = Array.prototype.slice.call(arguments, 0) + var warningName = args.shift() + if (warningName === 'typo') { + return makeTypoWarning.apply(null, args) + } else { + var msgTemplate = messages[warningName] ? messages[warningName] : warningName + ": '%s'" + args.unshift(msgTemplate) + return util.format.apply(null, args) + } +} + +function makeTypoWarning (providedName, probableName, field) { + if (field) { + providedName = field + "['" + providedName + "']" + probableName = field + "['" + probableName + "']" + } + return util.format(messages.typo, providedName, probableName) +} diff --git a/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/normalize.js b/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/normalize.js new file mode 100644 index 0000000000000..bf71d2c1e2235 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/normalize.js @@ -0,0 +1,48 @@ +module.exports = normalize + +var fixer = require('./fixer') +normalize.fixer = fixer + +var makeWarning = require('./make_warning') + +var fieldsToFix = ['name', 'version', 'description', 'repository', 'modules', 'scripts', + 'files', 'bin', 'man', 'bugs', 'keywords', 'readme', 'homepage', 'license'] +var otherThingsToFix = ['dependencies', 'people', 'typos'] + +var thingsToFix = fieldsToFix.map(function (fieldName) { + return ucFirst(fieldName) + 'Field' +}) +// two ways to do this in CoffeeScript on only one line, sub-70 chars: +// thingsToFix = fieldsToFix.map (name) -> ucFirst(name) + "Field" +// thingsToFix = (ucFirst(name) + "Field" for name in fieldsToFix) +thingsToFix = thingsToFix.concat(otherThingsToFix) + +function normalize (data, warn, strict) { + if (warn === true) { + warn = null + strict = true + } + if (!strict) { + strict = false + } + if (!warn || data.private) { + warn = function (msg) { /* noop */ } + } + + if (data.scripts && + data.scripts.install === 'node-gyp rebuild' && + !data.scripts.preinstall) { + data.gypfile = true + } + fixer.warn = function () { + warn(makeWarning.apply(null, arguments)) + } + thingsToFix.forEach(function (thingName) { + fixer['fix' + ucFirst(thingName)](data, strict) + }) + data._id = data.name + '@' + data.version +} + +function ucFirst (string) { + return string.charAt(0).toUpperCase() + string.slice(1) +} diff --git a/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/safe_format.js b/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/safe_format.js new file mode 100644 index 0000000000000..5fc888e5450cd --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/safe_format.js @@ -0,0 +1,11 @@ +var util = require('util') + +module.exports = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.forEach(function (arg) { + if (!arg) { + throw new TypeError('Bad arguments.') + } + }) + return util.format.apply(null, arguments) +} diff --git a/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/typos.json b/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/typos.json new file mode 100644 index 0000000000000..7f9dd283b30ff --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/typos.json @@ -0,0 +1,25 @@ +{ + "topLevel": { + "dependancies": "dependencies" + ,"dependecies": "dependencies" + ,"depdenencies": "dependencies" + ,"devEependencies": "devDependencies" + ,"depends": "dependencies" + ,"dev-dependencies": "devDependencies" + ,"devDependences": "devDependencies" + ,"devDepenencies": "devDependencies" + ,"devdependencies": "devDependencies" + ,"repostitory": "repository" + ,"repo": "repository" + ,"prefereGlobal": "preferGlobal" + ,"hompage": "homepage" + ,"hampage": "homepage" + ,"autohr": "author" + ,"autor": "author" + ,"contributers": "contributors" + ,"publicationConfig": "publishConfig" + ,"script": "scripts" + }, + "bugs": { "web": "url", "name": "url" }, + "script": { "server": "start", "tests": "test" } +} diff --git a/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/warning_messages.json b/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/warning_messages.json new file mode 100644 index 0000000000000..4890f506ed965 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/warning_messages.json @@ -0,0 +1,30 @@ +{ + "repositories": "'repositories' (plural) Not supported. Please pick one as the 'repository' field" + ,"missingRepository": "No repository field." + ,"brokenGitUrl": "Probably broken git url: %s" + ,"nonObjectScripts": "scripts must be an object" + ,"nonStringScript": "script values must be string commands" + ,"nonArrayFiles": "Invalid 'files' member" + ,"invalidFilename": "Invalid filename in 'files' list: %s" + ,"nonArrayBundleDependencies": "Invalid 'bundleDependencies' list. Must be array of package names" + ,"nonStringBundleDependency": "Invalid bundleDependencies member: %s" + ,"nonDependencyBundleDependency": "Non-dependency in bundleDependencies: %s" + ,"nonObjectDependencies": "%s field must be an object" + ,"nonStringDependency": "Invalid dependency: %s %s" + ,"deprecatedArrayDependencies": "specifying %s as array is deprecated" + ,"deprecatedModules": "modules field is deprecated" + ,"nonArrayKeywords": "keywords should be an array of strings" + ,"nonStringKeyword": "keywords should be an array of strings" + ,"conflictingName": "%s is also the name of a node core module." + ,"nonStringDescription": "'description' field should be a string" + ,"missingDescription": "No description" + ,"missingReadme": "No README data" + ,"missingLicense": "No license field." + ,"nonEmailUrlBugsString": "Bug string field must be url, email, or {email,url}" + ,"nonUrlBugsUrlField": "bugs.url field must be a string url. Deleted." + ,"nonEmailBugsEmailField": "bugs.email field must be a string email. Deleted." + ,"emptyNormalizedBugs": "Normalized value of bugs field is an empty object. Deleted." + ,"nonUrlHomepage": "homepage field must be a string url. Deleted." + ,"invalidLicense": "license should be a valid SPDX license expression" + ,"typo": "%s should probably be %s." +} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/package.json b/node_modules/@npmcli/package-json/node_modules/normalize-package-data/package.json similarity index 50% rename from node_modules/@npmcli/package-json/node_modules/@npmcli/git/package.json rename to node_modules/@npmcli/package-json/node_modules/normalize-package-data/package.json index eeba1c0415788..48d2371d4a66b 100644 --- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/package.json +++ b/node_modules/@npmcli/package-json/node_modules/normalize-package-data/package.json @@ -1,57 +1,62 @@ { - "name": "@npmcli/git", - "version": "4.1.0", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "description": "a util for spawning git from npm CLI contexts", + "name": "normalize-package-data", + "version": "6.0.0", + "author": "GitHub Inc.", + "description": "Normalizes data that can be found in package.json files.", + "license": "BSD-2-Clause", "repository": { "type": "git", - "url": "https://github.com/npm/git.git" + "url": "https://github.com/npm/normalize-package-data.git" }, - "author": "GitHub Inc.", - "license": "ISC", + "main": "lib/normalize.js", "scripts": { - "lint": "eslint \"**/*.js\"", - "snap": "tap", "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "eslint \"**/*.js\"", + "lintfix": "npm run lint -- --fix", "posttest": "npm run lint", + "postsnap": "npm run lintfix --", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", + "snap": "tap", "template-oss-apply": "template-oss-apply --force" }, - "tap": { - "timeout": 600, - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] + "dependencies": { + "hosted-git-info": "^7.0.0", + "is-core-module": "^2.8.1", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.15.1", - "npm-package-arg": "^10.0.0", - "slash": "^3.0.0", + "@npmcli/template-oss": "4.18.0", "tap": "^16.0.1" }, - "dependencies": { - "@npmcli/promise-spawn": "^6.0.0", - "lru-cache": "^7.4.4", - "npm-pick-manifest": "^8.0.0", - "proc-log": "^3.0.0", - "promise-inflight": "^1.0.1", - "promise-retry": "^2.0.1", - "semver": "^7.3.5", - "which": "^3.0.0" - }, + "files": [ + "bin/", + "lib/" + ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.15.1", - "publish": true + "version": "4.18.0", + "publish": "true", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] + }, + "tap": { + "branches": 86, + "functions": 92, + "lines": 86, + "statements": 86, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] } } diff --git a/node_modules/@npmcli/package-json/package.json b/node_modules/@npmcli/package-json/package.json index 33215b638db6e..ab320e8695ca3 100644 --- a/node_modules/@npmcli/package-json/package.json +++ b/node_modules/@npmcli/package-json/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/package-json", - "version": "4.0.1", + "version": "5.0.0", "description": "Programmatic API to update package.json", "main": "lib/index.js", "files": [ @@ -25,17 +25,17 @@ "license": "ISC", "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.17.0", + "@npmcli/template-oss": "4.18.0", "read-package-json": "^6.0.4", "read-package-json-fast": "^3.0.2", "tap": "^16.0.1" }, "dependencies": { - "@npmcli/git": "^4.1.0", + "@npmcli/git": "^5.0.0", "glob": "^10.2.2", - "hosted-git-info": "^6.1.1", + "hosted-git-info": "^7.0.0", "json-parse-even-better-errors": "^3.0.0", - "normalize-package-data": "^5.0.0", + "normalize-package-data": "^6.0.0", "proc-log": "^3.0.0", "semver": "^7.5.3" }, @@ -44,12 +44,18 @@ "url": "https://github.com/npm/package-json.git" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.17.0", - "publish": "true" + "version": "4.18.0", + "publish": "true", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { "nyc-arg": [ diff --git a/package-lock.json b/package-lock.json index 9991f5b6b8abd..805c6503cdc0c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -91,7 +91,7 @@ "@npmcli/config": "^7.0.0", "@npmcli/fs": "^3.1.0", "@npmcli/map-workspaces": "^3.0.4", - "@npmcli/package-json": "^4.0.1", + "@npmcli/package-json": "^5.0.0", "@npmcli/promise-spawn": "^6.0.2", "@npmcli/run-script": "^6.0.2", "abbrev": "^2.0.0", @@ -2413,6 +2413,7 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.0.tgz", "integrity": "sha512-KzDPpE5oGu2x3ZHUMacrIPqmvgV48TBqNJzNQTszkOqNwtIjlLoZ+4Gxa268EgPE6UcEzunmZdyY9hLoNClXhQ==", + "inBundle": true, "dependencies": { "@npmcli/promise-spawn": "^6.0.0", "lru-cache": "^10.0.1", @@ -2431,6 +2432,7 @@ "version": "10.0.1", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz", "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==", + "inBundle": true, "engines": { "node": "14 || >=16.14" } @@ -2507,40 +2509,57 @@ } }, "node_modules/@npmcli/package-json": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-4.0.1.tgz", - "integrity": "sha512-lRCEGdHZomFsURroh522YvA/2cVb9oPIJrjHanCJZkiasz1BzcnLr3tBJhlV7S86MBJBuAQ33is2D60YitZL2Q==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-5.0.0.tgz", + "integrity": "sha512-OI2zdYBLhQ7kpNPaJxiflofYIpkNLi+lnGdzqUOfRmCF3r2l1nadcjtCYMJKv/Utm/ZtlffaUuTiAktPHbc17g==", "inBundle": true, "dependencies": { - "@npmcli/git": "^4.1.0", + "@npmcli/git": "^5.0.0", "glob": "^10.2.2", - "hosted-git-info": "^6.1.1", + "hosted-git-info": "^7.0.0", "json-parse-even-better-errors": "^3.0.0", - "normalize-package-data": "^5.0.0", + "normalize-package-data": "^6.0.0", "proc-log": "^3.0.0", "semver": "^7.5.3" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" } }, - "node_modules/@npmcli/package-json/node_modules/@npmcli/git": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-4.1.0.tgz", - "integrity": "sha512-9hwoB3gStVfa0N31ymBmrX+GuDGdVA/QWShZVqE0HK2Af+7QGGrCTbZia/SW0ImUTjTne7SP91qxDmtXvDHRPQ==", + "node_modules/@npmcli/package-json/node_modules/hosted-git-info": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.0.tgz", + "integrity": "sha512-ICclEpTLhHj+zCuSb2/usoNXSVkxUSIopre+b1w8NDY9Dntp9LO4vLdHYI336TH8sAqwrRgnSfdkBG2/YpisHA==", "inBundle": true, "dependencies": { - "@npmcli/promise-spawn": "^6.0.0", - "lru-cache": "^7.4.4", - "npm-pick-manifest": "^8.0.0", - "proc-log": "^3.0.0", - "promise-inflight": "^1.0.1", - "promise-retry": "^2.0.1", + "lru-cache": "^10.0.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/package-json/node_modules/lru-cache": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz", + "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==", + "inBundle": true, + "engines": { + "node": "14 || >=16.14" + } + }, + "node_modules/@npmcli/package-json/node_modules/normalize-package-data": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.0.tgz", + "integrity": "sha512-UL7ELRVxYBHBgYEtZCXjxuD5vPxnmvMGq0jp/dGPKKrN7tfsBh2IY7TlJ15WWwdjRWD3RJbnsygUurTK3xkPkg==", + "inBundle": true, + "dependencies": { + "hosted-git-info": "^7.0.0", + "is-core-module": "^2.8.1", "semver": "^7.3.5", - "which": "^3.0.0" + "validate-npm-package-license": "^3.0.4" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" } }, "node_modules/@npmcli/promise-spawn": { @@ -2647,6 +2666,24 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-4.0.1.tgz", + "integrity": "sha512-lRCEGdHZomFsURroh522YvA/2cVb9oPIJrjHanCJZkiasz1BzcnLr3tBJhlV7S86MBJBuAQ33is2D60YitZL2Q==", + "dev": true, + "dependencies": { + "@npmcli/git": "^4.1.0", + "glob": "^10.2.2", + "hosted-git-info": "^6.1.1", + "json-parse-even-better-errors": "^3.0.0", + "normalize-package-data": "^5.0.0", + "proc-log": "^3.0.0", + "semver": "^7.5.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/@octokit/auth-token": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-3.0.3.tgz", @@ -15792,7 +15829,7 @@ "@npmcli/metavuln-calculator": "^6.0.1", "@npmcli/name-from-folder": "^2.0.0", "@npmcli/node-gyp": "^3.0.0", - "@npmcli/package-json": "^4.0.0", + "@npmcli/package-json": "^5.0.0", "@npmcli/query": "^3.0.0", "@npmcli/run-script": "^6.0.0", "bin-links": "^4.0.1", diff --git a/package.json b/package.json index 8a88cb1cc8dbc..e16660472c2c1 100644 --- a/package.json +++ b/package.json @@ -56,7 +56,7 @@ "@npmcli/config": "^7.0.0", "@npmcli/fs": "^3.1.0", "@npmcli/map-workspaces": "^3.0.4", - "@npmcli/package-json": "^4.0.1", + "@npmcli/package-json": "^5.0.0", "@npmcli/promise-spawn": "^6.0.2", "@npmcli/run-script": "^6.0.2", "abbrev": "^2.0.0", diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json index 55409e4266d81..8389434649e82 100644 --- a/workspaces/arborist/package.json +++ b/workspaces/arborist/package.json @@ -10,7 +10,7 @@ "@npmcli/metavuln-calculator": "^6.0.1", "@npmcli/name-from-folder": "^2.0.0", "@npmcli/node-gyp": "^3.0.0", - "@npmcli/package-json": "^4.0.0", + "@npmcli/package-json": "^5.0.0", "@npmcli/query": "^3.0.0", "@npmcli/run-script": "^6.0.0", "bin-links": "^4.0.1", From 4e0382e1ecd6a6cb5a926620b800bfdf041dd267 Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Tue, 15 Aug 2023 12:09:47 -0700 Subject: [PATCH 30/68] deps: cacache@18.0.0 --- node_modules/.gitignore | 25 + .../node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 + .../node_modules/cacache/lib/content/read.js | 166 ++ .../node_modules/cacache/lib/content/rm.js | 18 + .../node_modules/cacache/lib/content/write.js | 205 +++ .../node_modules/cacache/lib/entry-index.js | 330 ++++ .../node_modules/cacache/lib/get.js | 170 ++ .../node_modules/cacache/lib/index.js | 42 + .../node_modules/cacache/lib/memoization.js | 72 + .../node_modules/cacache/lib/put.js | 80 + .../node_modules/cacache/lib/rm.js | 31 + .../node_modules/cacache/lib/util/glob.js | 7 + .../cacache/lib/util/hash-to-segments.js | 7 + .../node_modules/cacache/lib/util/tmp.js | 26 + .../node_modules/cacache/lib/verify.js | 257 +++ .../node_modules/cacache/package.json | 82 + node_modules/cacache/lib/memoization.js | 4 +- .../cacache/node_modules/lru-cache/LICENSE | 15 + .../node_modules/lru-cache/dist/cjs/index.js | 1404 +++++++++++++++++ .../lru-cache/dist/cjs/index.min.js | 2 + .../lru-cache/dist/cjs/package.json | 3 + .../node_modules/lru-cache/dist/mjs/index.js | 1400 ++++++++++++++++ .../lru-cache/dist/mjs/index.min.js | 2 + .../lru-cache/dist/mjs/package.json | 3 + .../node_modules/lru-cache/package.json | 108 ++ node_modules/cacache/package.json | 14 +- .../node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 + .../node_modules/cacache/lib/content/read.js | 166 ++ .../node_modules/cacache/lib/content/rm.js | 18 + .../node_modules/cacache/lib/content/write.js | 205 +++ .../node_modules/cacache/lib/entry-index.js | 330 ++++ .../node_modules/cacache/lib/get.js | 170 ++ .../node_modules/cacache/lib/index.js | 42 + .../node_modules/cacache/lib/memoization.js | 72 + .../node_modules/cacache/lib/put.js | 80 + .../node_modules/cacache/lib/rm.js | 31 + .../node_modules/cacache/lib/util/glob.js | 7 + .../cacache/lib/util/hash-to-segments.js | 7 + .../node_modules/cacache/lib/util/tmp.js | 26 + .../node_modules/cacache/lib/verify.js | 257 +++ .../node_modules/cacache/package.json | 82 + .../node-gyp/node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 + .../node_modules/cacache/lib/content/read.js | 166 ++ .../node_modules/cacache/lib/content/rm.js | 18 + .../node_modules/cacache/lib/content/write.js | 205 +++ .../node_modules/cacache/lib/entry-index.js | 330 ++++ .../node-gyp/node_modules/cacache/lib/get.js | 170 ++ .../node_modules/cacache/lib/index.js | 42 + .../node_modules/cacache/lib/memoization.js | 72 + .../node-gyp/node_modules/cacache/lib/put.js | 80 + .../node-gyp/node_modules/cacache/lib/rm.js | 31 + .../node_modules/cacache/lib/util/glob.js | 7 + .../cacache/lib/util/hash-to-segments.js | 7 + .../node_modules/cacache/lib/util/tmp.js | 26 + .../node_modules/cacache/lib/verify.js | 257 +++ .../node_modules/brace-expansion/LICENSE | 21 + .../node_modules/brace-expansion/index.js | 203 +++ .../node_modules/brace-expansion/package.json | 46 + .../cacache/node_modules/glob/LICENSE | 15 + .../node_modules/glob/dist/cjs/package.json | 4 + .../node_modules/glob/dist/cjs/src/bin.js | 270 ++++ .../node_modules/glob/dist/cjs/src/glob.js | 238 +++ .../glob/dist/cjs/src/has-magic.js | 27 + .../node_modules/glob/dist/cjs/src/ignore.js | 103 ++ .../node_modules/glob/dist/cjs/src/index.js | 68 + .../node_modules/glob/dist/cjs/src/pattern.js | 219 +++ .../glob/dist/cjs/src/processor.js | 309 ++++ .../node_modules/glob/dist/cjs/src/walker.js | 358 +++++ .../node_modules/glob/dist/mjs/glob.js | 234 +++ .../node_modules/glob/dist/mjs/has-magic.js | 23 + .../node_modules/glob/dist/mjs/ignore.js | 99 ++ .../node_modules/glob/dist/mjs/index.js | 56 + .../node_modules/glob/dist/mjs/package.json | 4 + .../node_modules/glob/dist/mjs/pattern.js | 215 +++ .../node_modules/glob/dist/mjs/processor.js | 302 ++++ .../node_modules/glob/dist/mjs/walker.js | 352 +++++ .../cacache/node_modules/glob/package.json | 98 ++ .../cacache/node_modules/minimatch/LICENSE | 15 + .../dist/cjs/assert-valid-pattern.js | 14 + .../node_modules/minimatch/dist/cjs/ast.js | 589 +++++++ .../minimatch/dist/cjs/brace-expressions.js | 152 ++ .../node_modules/minimatch/dist/cjs/escape.js | 22 + .../node_modules/minimatch/dist/cjs/index.js | 1011 ++++++++++++ .../minimatch/dist/cjs/package.json | 3 + .../minimatch/dist/cjs/unescape.js | 24 + .../dist/mjs/assert-valid-pattern.js | 10 + .../node_modules/minimatch/dist/mjs/ast.js | 585 +++++++ .../minimatch/dist/mjs/brace-expressions.js | 148 ++ .../node_modules/minimatch/dist/mjs/escape.js | 18 + .../node_modules/minimatch/dist/mjs/index.js | 995 ++++++++++++ .../minimatch/dist/mjs/package.json | 3 + .../minimatch/dist/mjs/unescape.js | 20 + .../node_modules/minimatch/package.json | 86 + .../cacache/node_modules/minipass/LICENSE | 15 + .../node_modules/minipass/dist/cjs/index.js | 1028 ++++++++++++ .../minipass/dist/cjs/package.json | 3 + .../node_modules/minipass/dist/mjs/index.js | 1018 ++++++++++++ .../minipass/dist/mjs/package.json | 3 + .../node_modules/minipass/package.json | 82 + .../node_modules/cacache/package.json | 82 + .../pacote/node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 + .../node_modules/cacache/lib/content/read.js | 166 ++ .../node_modules/cacache/lib/content/rm.js | 18 + .../node_modules/cacache/lib/content/write.js | 205 +++ .../node_modules/cacache/lib/entry-index.js | 330 ++++ .../pacote/node_modules/cacache/lib/get.js | 170 ++ .../pacote/node_modules/cacache/lib/index.js | 42 + .../node_modules/cacache/lib/memoization.js | 72 + .../pacote/node_modules/cacache/lib/put.js | 80 + .../pacote/node_modules/cacache/lib/rm.js | 31 + .../node_modules/cacache/lib/util/glob.js | 7 + .../cacache/lib/util/hash-to-segments.js | 7 + .../node_modules/cacache/lib/util/tmp.js | 26 + .../pacote/node_modules/cacache/lib/verify.js | 257 +++ .../pacote/node_modules/cacache/package.json | 82 + .../sigstore/node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 + .../node_modules/cacache/lib/content/read.js | 166 ++ .../node_modules/cacache/lib/content/rm.js | 18 + .../node_modules/cacache/lib/content/write.js | 205 +++ .../node_modules/cacache/lib/entry-index.js | 330 ++++ .../sigstore/node_modules/cacache/lib/get.js | 170 ++ .../node_modules/cacache/lib/index.js | 42 + .../node_modules/cacache/lib/memoization.js | 72 + .../sigstore/node_modules/cacache/lib/put.js | 80 + .../sigstore/node_modules/cacache/lib/rm.js | 31 + .../node_modules/cacache/lib/util/glob.js | 7 + .../cacache/lib/util/hash-to-segments.js | 7 + .../node_modules/cacache/lib/util/tmp.js | 26 + .../node_modules/cacache/lib/verify.js | 257 +++ .../cacache/node_modules/minipass/LICENSE | 15 + .../node_modules/minipass/dist/cjs/index.js | 1028 ++++++++++++ .../minipass/dist/cjs/package.json | 3 + .../node_modules/minipass/dist/mjs/index.js | 1018 ++++++++++++ .../minipass/dist/mjs/package.json | 3 + .../node_modules/minipass/package.json | 82 + .../node_modules/cacache/package.json | 82 + .../tuf-js/node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 + .../node_modules/cacache/lib/content/read.js | 166 ++ .../node_modules/cacache/lib/content/rm.js | 18 + .../node_modules/cacache/lib/content/write.js | 205 +++ .../node_modules/cacache/lib/entry-index.js | 330 ++++ .../tuf-js/node_modules/cacache/lib/get.js | 170 ++ .../tuf-js/node_modules/cacache/lib/index.js | 42 + .../node_modules/cacache/lib/memoization.js | 72 + .../tuf-js/node_modules/cacache/lib/put.js | 80 + .../tuf-js/node_modules/cacache/lib/rm.js | 31 + .../node_modules/cacache/lib/util/glob.js | 7 + .../cacache/lib/util/hash-to-segments.js | 7 + .../node_modules/cacache/lib/util/tmp.js | 26 + .../tuf-js/node_modules/cacache/lib/verify.js | 257 +++ .../cacache/node_modules/minipass/LICENSE | 15 + .../node_modules/minipass/dist/cjs/index.js | 1028 ++++++++++++ .../minipass/dist/cjs/package.json | 3 + .../node_modules/minipass/dist/mjs/index.js | 1018 ++++++++++++ .../minipass/dist/mjs/package.json | 3 + .../node_modules/minipass/package.json | 82 + .../tuf-js/node_modules/cacache/package.json | 82 + package-lock.json | 233 ++- package.json | 2 +- workspaces/arborist/package.json | 2 +- 166 files changed, 25836 insertions(+), 15 deletions(-) create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/glob.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json create mode 100644 node_modules/cacache/node_modules/lru-cache/LICENSE create mode 100644 node_modules/cacache/node_modules/lru-cache/dist/cjs/index.js create mode 100644 node_modules/cacache/node_modules/lru-cache/dist/cjs/index.min.js create mode 100644 node_modules/cacache/node_modules/lru-cache/dist/cjs/package.json create mode 100644 node_modules/cacache/node_modules/lru-cache/dist/mjs/index.js create mode 100644 node_modules/cacache/node_modules/lru-cache/dist/mjs/index.min.js create mode 100644 node_modules/cacache/node_modules/lru-cache/dist/mjs/package.json create mode 100644 node_modules/cacache/node_modules/lru-cache/package.json create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/get.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/index.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/put.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/util/glob.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/LICENSE.md create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/content/path.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/content/read.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/content/rm.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/content/write.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/entry-index.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/get.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/index.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/memoization.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/put.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/rm.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/util/glob.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/verify.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/package.json create mode 100755 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/LICENSE create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/assert-valid-pattern.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/ast.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/brace-expressions.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/escape.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/index.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/unescape.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/assert-valid-pattern.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/ast.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/brace-expressions.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/escape.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/index.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/unescape.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minipass/LICENSE create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/index.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/index.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minipass/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/package.json create mode 100644 node_modules/pacote/node_modules/cacache/LICENSE.md create mode 100644 node_modules/pacote/node_modules/cacache/lib/content/path.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/content/read.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/content/rm.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/content/write.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/entry-index.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/get.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/index.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/memoization.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/put.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/rm.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/util/glob.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/util/tmp.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/verify.js create mode 100644 node_modules/pacote/node_modules/cacache/package.json create mode 100644 node_modules/sigstore/node_modules/cacache/LICENSE.md create mode 100644 node_modules/sigstore/node_modules/cacache/lib/content/path.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/content/read.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/content/rm.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/content/write.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/entry-index.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/get.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/index.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/memoization.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/put.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/rm.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/util/glob.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/util/tmp.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/verify.js create mode 100644 node_modules/sigstore/node_modules/cacache/node_modules/minipass/LICENSE create mode 100644 node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/index.js create mode 100644 node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/package.json create mode 100644 node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/index.js create mode 100644 node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/package.json create mode 100644 node_modules/sigstore/node_modules/cacache/node_modules/minipass/package.json create mode 100644 node_modules/sigstore/node_modules/cacache/package.json create mode 100644 node_modules/tuf-js/node_modules/cacache/LICENSE.md create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/content/path.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/content/read.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/content/rm.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/content/write.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/entry-index.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/get.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/index.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/memoization.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/put.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/rm.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/util/glob.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/verify.js create mode 100644 node_modules/tuf-js/node_modules/cacache/node_modules/minipass/LICENSE create mode 100644 node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/index.js create mode 100644 node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/package.json create mode 100644 node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/index.js create mode 100644 node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/package.json create mode 100644 node_modules/tuf-js/node_modules/cacache/node_modules/minipass/package.json create mode 100644 node_modules/tuf-js/node_modules/cacache/package.json diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 55611b7a54162..6b6963e6939f7 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -28,6 +28,9 @@ !/@npmcli/installed-package-contents !/@npmcli/map-workspaces !/@npmcli/metavuln-calculator +!/@npmcli/metavuln-calculator/node_modules/ +/@npmcli/metavuln-calculator/node_modules/* +!/@npmcli/metavuln-calculator/node_modules/cacache !/@npmcli/name-from-folder !/@npmcli/node-gyp !/@npmcli/package-json @@ -71,6 +74,9 @@ !/buffer !/builtins !/cacache +!/cacache/node_modules/ +/cacache/node_modules/* +!/cacache/node_modules/lru-cache !/chalk !/chownr !/ci-info @@ -146,6 +152,9 @@ !/just-diff !/lru-cache !/make-fetch-happen +!/make-fetch-happen/node_modules/ +/make-fetch-happen/node_modules/* +!/make-fetch-happen/node_modules/cacache !/minimatch !/minipass-collect !/minipass-collect/node_modules/ @@ -183,6 +192,13 @@ !/node-gyp/node_modules/abbrev !/node-gyp/node_modules/are-we-there-yet !/node-gyp/node_modules/brace-expansion +!/node-gyp/node_modules/cacache +!/node-gyp/node_modules/cacache/node_modules/ +/node-gyp/node_modules/cacache/node_modules/* +!/node-gyp/node_modules/cacache/node_modules/brace-expansion +!/node-gyp/node_modules/cacache/node_modules/glob +!/node-gyp/node_modules/cacache/node_modules/minimatch +!/node-gyp/node_modules/cacache/node_modules/minipass !/node-gyp/node_modules/gauge !/node-gyp/node_modules/glob !/node-gyp/node_modules/make-fetch-happen @@ -214,6 +230,7 @@ !/pacote/node_modules/@npmcli/ /pacote/node_modules/@npmcli/* !/pacote/node_modules/@npmcli/git +!/pacote/node_modules/cacache !/parse-conflict-json !/path-is-absolute !/path-key @@ -255,6 +272,10 @@ !/sigstore !/sigstore/node_modules/ /sigstore/node_modules/* +!/sigstore/node_modules/cacache +!/sigstore/node_modules/cacache/node_modules/ +/sigstore/node_modules/cacache/node_modules/* +!/sigstore/node_modules/cacache/node_modules/minipass !/sigstore/node_modules/make-fetch-happen !/sigstore/node_modules/minipass !/smart-buffer @@ -285,6 +306,10 @@ !/tuf-js !/tuf-js/node_modules/ /tuf-js/node_modules/* +!/tuf-js/node_modules/cacache +!/tuf-js/node_modules/cacache/node_modules/ +/tuf-js/node_modules/cacache/node_modules/* +!/tuf-js/node_modules/cacache/node_modules/minipass !/tuf-js/node_modules/make-fetch-happen !/tuf-js/node_modules/minipass !/unique-filename diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md new file mode 100644 index 0000000000000..8d28acf866d93 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js new file mode 100644 index 0000000000000..ad5a76a4f73f2 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js @@ -0,0 +1,29 @@ +'use strict' + +const contentVer = require('../../package.json')['cache-version'].content +const hashToSegments = require('../util/hash-to-segments') +const path = require('path') +const ssri = require('ssri') + +// Current format of content file path: +// +// sha512-BaSE64Hex= -> +// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee +// +module.exports = contentPath + +function contentPath (cache, integrity) { + const sri = ssri.parse(integrity, { single: true }) + // contentPath is the *strongest* algo given + return path.join( + contentDir(cache), + sri.algorithm, + ...hashToSegments(sri.hexDigest()) + ) +} + +module.exports.contentDir = contentDir + +function contentDir (cache) { + return path.join(cache, `content-v${contentVer}`) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js new file mode 100644 index 0000000000000..f41b539df65dc --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js @@ -0,0 +1,166 @@ +'use strict' + +const fs = require('fs/promises') +const fsm = require('fs-minipass') +const ssri = require('ssri') +const contentPath = require('./path') +const Pipeline = require('minipass-pipeline') + +module.exports = read + +const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 +async function read (cache, integrity, opts = {}) { + const { size } = opts + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = await fs.stat(cpath) + return { stat, cpath, sri } + }) + if (typeof size === 'number' && stat.size !== size) { + throw sizeError(size, stat.size) + } + + if (stat.size > MAX_SINGLE_READ_SIZE) { + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } + + const data = await fs.readFile(cpath, { encoding: null }) + if (!ssri.checkData(data, sri)) { + throw integrityError(sri, cpath) + } + + return data +} + +const readPipeline = (cpath, size, sri, stream) => { + stream.push( + new fsm.ReadStream(cpath, { + size, + readSize: MAX_SINGLE_READ_SIZE, + }), + ssri.integrityStream({ + integrity: sri, + size, + }) + ) + return stream +} + +module.exports.stream = readStream +module.exports.readStream = readStream + +function readStream (cache, integrity, opts = {}) { + const { size } = opts + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // just stat to ensure it exists + const stat = await fs.stat(cpath) + return { stat, cpath, sri } + }) + if (typeof size === 'number' && size !== stat.size) { + return stream.emit('error', sizeError(size, stat.size)) + } + + return readPipeline(cpath, stat.size, sri, stream) + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.copy = copy + +function copy (cache, integrity, dest) { + return withContentSri(cache, integrity, (cpath, sri) => { + return fs.copyFile(cpath, dest) + }) +} + +module.exports.hasContent = hasContent + +async function hasContent (cache, integrity) { + if (!integrity) { + return false + } + + try { + return await withContentSri(cache, integrity, async (cpath, sri) => { + const stat = await fs.stat(cpath) + return { size: stat.size, sri, stat } + }) + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + } +} + +async function withContentSri (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + const results = await Promise.all(digests.map(async (meta) => { + try { + return await withContentSri(cache, meta, fn) + } catch (err) { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + } + })) + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) { + return result + } + + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) { + throw enoentError + } + + // Throw generic error + throw results.find((r) => r instanceof Error) + } +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function integrityError (sri, path) { + const err = new Error(`Integrity verification failed for ${sri} (${path})`) + err.code = 'EINTEGRITY' + err.sri = sri + err.path = path + return err +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js new file mode 100644 index 0000000000000..ce58d679e4cb2 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js @@ -0,0 +1,18 @@ +'use strict' + +const fs = require('fs/promises') +const contentPath = require('./path') +const { hasContent } = require('./read') + +module.exports = rm + +async function rm (cache, integrity) { + const content = await hasContent(cache, integrity) + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) { + await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) + return true + } else { + return false + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js new file mode 100644 index 0000000000000..7146146581287 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js @@ -0,0 +1,205 @@ +'use strict' + +const events = require('events') + +const contentPath = require('./path') +const fs = require('fs/promises') +const { moveFile } = require('@npmcli/fs') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') +const Flush = require('minipass-flush') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') +const fsm = require('fs-minipass') + +module.exports = write + +// Cache of move operations in process so we don't duplicate +const moveOperations = new Map() + +async function write (cache, data, opts = {}) { + const { algorithms, size, integrity } = opts + + if (typeof size === 'number' && data.length !== size) { + throw sizeError(size, data.length) + } + + const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) + if (integrity && !ssri.checkData(data, integrity, opts)) { + throw checksumError(integrity, sri) + } + + for (const algo in sri) { + const tmp = await makeTmp(cache, opts) + const hash = sri[algo].toString() + try { + await fs.writeFile(tmp.target, data, { flag: 'wx' }) + await moveToDestination(tmp, cache, hash, opts) + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } + } + return { integrity: sri, size: data.length } +} + +module.exports.stream = writeStream + +// writes proxied to the 'inputStream' that is passed to the Promise +// 'end' is deferred until content is handled. +class CacacheWriteStream extends Flush { + constructor (cache, opts) { + super() + this.opts = opts + this.cache = cache + this.inputStream = new Minipass() + this.inputStream.on('error', er => this.emit('error', er)) + this.inputStream.on('drain', () => this.emit('drain')) + this.handleContentP = null + } + + write (chunk, encoding, cb) { + if (!this.handleContentP) { + this.handleContentP = handleContent( + this.inputStream, + this.cache, + this.opts + ) + } + return this.inputStream.write(chunk, encoding, cb) + } + + flush (cb) { + this.inputStream.end(() => { + if (!this.handleContentP) { + const e = new Error('Cache input stream was empty') + e.code = 'ENODATA' + // empty streams are probably emitting end right away. + // defer this one tick by rejecting a promise on it. + return Promise.reject(e).catch(cb) + } + // eslint-disable-next-line promise/catch-or-return + this.handleContentP.then( + (res) => { + res.integrity && this.emit('integrity', res.integrity) + // eslint-disable-next-line promise/always-return + res.size !== null && this.emit('size', res.size) + cb() + }, + (er) => cb(er) + ) + }) + } +} + +function writeStream (cache, opts = {}) { + return new CacacheWriteStream(cache, opts) +} + +async function handleContent (inputStream, cache, opts) { + const tmp = await makeTmp(cache, opts) + try { + const res = await pipeToTmp(inputStream, cache, tmp.target, opts) + await moveToDestination( + tmp, + cache, + res.integrity, + opts + ) + return res + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } +} + +async function pipeToTmp (inputStream, cache, tmpTarget, opts) { + const outStream = new fsm.WriteStream(tmpTarget, { + flags: 'wx', + }) + + if (opts.integrityEmitter) { + // we need to create these all simultaneously since they can fire in any order + const [integrity, size] = await Promise.all([ + events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), + events.once(opts.integrityEmitter, 'size').then(res => res[0]), + new Pipeline(inputStream, outStream).promise(), + ]) + return { integrity, size } + } + + let integrity + let size + const hashStream = ssri.integrityStream({ + integrity: opts.integrity, + algorithms: opts.algorithms, + size: opts.size, + }) + hashStream.on('integrity', i => { + integrity = i + }) + hashStream.on('size', s => { + size = s + }) + + const pipeline = new Pipeline(inputStream, hashStream, outStream) + await pipeline.promise() + return { integrity, size } +} + +async function makeTmp (cache, opts) { + const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) + return { + target: tmpTarget, + moved: false, + } +} + +async function moveToDestination (tmp, cache, sri, opts) { + const destination = contentPath(cache, sri) + const destDir = path.dirname(destination) + if (moveOperations.has(destination)) { + return moveOperations.get(destination) + } + moveOperations.set( + destination, + fs.mkdir(destDir, { recursive: true }) + .then(async () => { + await moveFile(tmp.target, destination, { overwrite: false }) + tmp.moved = true + return tmp.moved + }) + .catch(err => { + if (!err.message.startsWith('The destination file exists')) { + throw Object.assign(err, { code: 'EEXIST' }) + } + }).finally(() => { + moveOperations.delete(destination) + }) + + ) + return moveOperations.get(destination) +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function checksumError (expected, found) { + const err = new Error(`Integrity check failed: + Wanted: ${expected} + Found: ${found}`) + err.code = 'EINTEGRITY' + err.expected = expected + err.found = found + return err +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js new file mode 100644 index 0000000000000..722a37af5ce15 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js @@ -0,0 +1,330 @@ +'use strict' + +const crypto = require('crypto') +const { + appendFile, + mkdir, + readFile, + readdir, + rm, + writeFile, +} = require('fs/promises') +const { Minipass } = require('minipass') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') + +const contentPath = require('./content/path') +const hashToSegments = require('./util/hash-to-segments') +const indexV = require('../package.json')['cache-version'].index +const { moveFile } = require('@npmcli/fs') + +module.exports.NotFoundError = class NotFoundError extends Error { + constructor (cache, key) { + super(`No cache entry for ${key} found in ${cache}`) + this.code = 'ENOENT' + this.cache = cache + this.key = key + } +} + +module.exports.compact = compact + +async function compact (cache, key, matchFn, opts = {}) { + const bucket = bucketPath(cache, key) + const entries = await bucketEntries(bucket) + const newEntries = [] + // we loop backwards because the bottom-most result is the newest + // since we add new entries with appendFile + for (let i = entries.length - 1; i >= 0; --i) { + const entry = entries[i] + // a null integrity could mean either a delete was appended + // or the user has simply stored an index that does not map + // to any content. we determine if the user wants to keep the + // null integrity based on the validateEntry function passed in options. + // if the integrity is null and no validateEntry is provided, we break + // as we consider the null integrity to be a deletion of everything + // that came before it. + if (entry.integrity === null && !opts.validateEntry) { + break + } + + // if this entry is valid, and it is either the first entry or + // the newEntries array doesn't already include an entry that + // matches this one based on the provided matchFn, then we add + // it to the beginning of our list + if ((!opts.validateEntry || opts.validateEntry(entry) === true) && + (newEntries.length === 0 || + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { + newEntries.unshift(entry) + } + } + + const newIndex = '\n' + newEntries.map((entry) => { + const stringified = JSON.stringify(entry) + const hash = hashEntry(stringified) + return `${hash}\t${stringified}` + }).join('\n') + + const setup = async () => { + const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await mkdir(path.dirname(target), { recursive: true }) + return { + target, + moved: false, + } + } + + const teardown = async (tmp) => { + if (!tmp.moved) { + return rm(tmp.target, { recursive: true, force: true }) + } + } + + const write = async (tmp) => { + await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await mkdir(path.dirname(bucket), { recursive: true }) + // we use @npmcli/move-file directly here because we + // want to overwrite the existing file + await moveFile(tmp.target, bucket) + tmp.moved = true + } + + // write the file atomically + const tmp = await setup() + try { + await write(tmp) + } finally { + await teardown(tmp) + } + + // we reverse the list we generated such that the newest + // entries come first in order to make looping through them easier + // the true passed to formatEntry tells it to keep null + // integrity values, if they made it this far it's because + // validateEntry returned true, and as such we should return it + return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) +} + +module.exports.insert = insert + +async function insert (cache, key, integrity, opts = {}) { + const { metadata, size, time } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: time || Date.now(), + size, + metadata, + } + try { + await mkdir(path.dirname(bucket), { recursive: true }) + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + + throw err + } + return formatEntry(cache, entry) +} + +module.exports.find = find + +async function find (cache, key) { + const bucket = bucketPath(cache, key) + try { + const entries = await bucketEntries(bucket) + return entries.reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } +} + +module.exports.delete = del + +function del (cache, key, opts = {}) { + if (!opts.removeFully) { + return insert(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rm(bucket, { recursive: true, force: true }) +} + +module.exports.lsStream = lsStream + +function lsStream (cache) { + const indexDir = bucketDir(cache) + const stream = new Minipass({ objectMode: true }) + + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const buckets = await readdirOrEmpty(indexDir) + await Promise.all(buckets.map(async (bucket) => { + const bucketPath = path.join(indexDir, bucket) + const subbuckets = await readdirOrEmpty(bucketPath) + await Promise.all(subbuckets.map(async (subbucket) => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + const subbucketEntries = await readdirOrEmpty(subbucketPath) + await Promise.all(subbucketEntries.map(async (entry) => { + const entryPath = path.join(subbucketPath, entry) + try { + const entries = await bucketEntries(entryPath) + // using a Map here prevents duplicate keys from showing up + // twice, I guess? + const reduced = entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) { + stream.write(formatted) + } + } + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + throw err + } + })) + })) + })) + stream.end() + return stream + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.ls = ls + +async function ls (cache) { + const entries = await lsStream(cache).collect() + return entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) +} + +module.exports.bucketEntries = bucketEntries + +async function bucketEntries (bucket, filter) { + const data = await readFile(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +function _bucketEntries (data, filter) { + const entries = [] + data.split('\n').forEach((entry) => { + if (!entry) { + return + } + + const pieces = entry.split('\t') + if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { + // Hash is no good! Corruption or malice? Doesn't matter! + // EJECT EJECT + return + } + let obj + try { + obj = JSON.parse(pieces[1]) + } catch (_) { + // eslint-ignore-next-line no-empty-block + } + // coverage disabled here, no need to test with an entry that parses to something falsey + // istanbul ignore else + if (obj) { + entries.push(obj) + } + }) + return entries +} + +module.exports.bucketDir = bucketDir + +function bucketDir (cache) { + return path.join(cache, `index-v${indexV}`) +} + +module.exports.bucketPath = bucketPath + +function bucketPath (cache, key) { + const hashed = hashKey(key) + return path.join.apply( + path, + [bucketDir(cache)].concat(hashToSegments(hashed)) + ) +} + +module.exports.hashKey = hashKey + +function hashKey (key) { + return hash(key, 'sha256') +} + +module.exports.hashEntry = hashEntry + +function hashEntry (str) { + return hash(str, 'sha1') +} + +function hash (str, digest) { + return crypto + .createHash(digest) + .update(str) + .digest('hex') +} + +function formatEntry (cache, entry, keepAll) { + // Treat null digests as deletions. They'll shadow any previous entries. + if (!entry.integrity && !keepAll) { + return null + } + + return { + key: entry.key, + integrity: entry.integrity, + path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, + size: entry.size, + time: entry.time, + metadata: entry.metadata, + } +} + +function readdirOrEmpty (dir) { + return readdir(dir).catch((err) => { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + return [] + } + + throw err + }) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js new file mode 100644 index 0000000000000..80ec206c7ecaa --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js @@ -0,0 +1,170 @@ +'use strict' + +const Collect = require('minipass-collect') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') + +const index = require('./entry-index') +const memo = require('./memoization') +const read = require('./content/read') + +async function getData (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } + } + + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = await read(cache, entry.integrity, { integrity, size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} +module.exports = getData + +async function getDataByDigest (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, key, opts) + if (memoized && memoize !== false) { + return memoized + } + + const res = await read(cache, key, { integrity, size }) + if (memoize) { + memo.put.byDigest(cache, key, res, opts) + } + return res +} +module.exports.byDigest = getDataByDigest + +const getMemoizedStream = (memoized) => { + const stream = new Minipass() + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(memoized.entry.metadata) + ev === 'integrity' && cb(memoized.entry.integrity) + ev === 'size' && cb(memoized.entry.size) + }) + stream.end(memoized.data) + return stream +} + +function getStream (cache, key, opts = {}) { + const { memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return getMemoizedStream(memoized) + } + + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const entry = await index.find(cache, key) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) + }) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + return stream + }).catch((err) => stream.emit('error', err)) + + return stream +} + +module.exports.stream = getStream + +function getStreamDigest (cache, integrity, opts = {}) { + const { memoize } = opts + const memoized = memo.get.byDigest(cache, integrity, opts) + if (memoized && memoize !== false) { + const stream = new Minipass() + stream.end(memoized) + return stream + } else { + const stream = read.readStream(cache, integrity, opts) + if (!memoize) { + return stream + } + + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put.byDigest( + cache, + integrity, + data, + opts + )) + return new Pipeline(stream, memoStream) + } +} + +module.exports.stream.byDigest = getStreamDigest + +function info (cache, key, opts = {}) { + const { memoize } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized.entry) + } else { + return index.find(cache, key) + } +} +module.exports.info = info + +async function copy (cache, key, dest, opts = {}) { + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + await read.copy(cache, entry.integrity, dest, opts) + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} + +module.exports.copy = copy + +async function copyByDigest (cache, key, dest, opts = {}) { + await read.copy(cache, key, dest, opts) + return key +} + +module.exports.copy.byDigest = copyByDigest + +module.exports.hasContent = read.hasContent diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js new file mode 100644 index 0000000000000..c9b0da5f3a271 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js @@ -0,0 +1,42 @@ +'use strict' + +const get = require('./get.js') +const put = require('./put.js') +const rm = require('./rm.js') +const verify = require('./verify.js') +const { clearMemoized } = require('./memoization.js') +const tmp = require('./util/tmp.js') +const index = require('./entry-index.js') + +module.exports.index = {} +module.exports.index.compact = index.compact +module.exports.index.insert = index.insert + +module.exports.ls = index.ls +module.exports.ls.stream = index.lsStream + +module.exports.get = get +module.exports.get.byDigest = get.byDigest +module.exports.get.stream = get.stream +module.exports.get.stream.byDigest = get.stream.byDigest +module.exports.get.copy = get.copy +module.exports.get.copy.byDigest = get.copy.byDigest +module.exports.get.info = get.info +module.exports.get.hasContent = get.hasContent + +module.exports.put = put +module.exports.put.stream = put.stream + +module.exports.rm = rm.entry +module.exports.rm.all = rm.all +module.exports.rm.entry = module.exports.rm +module.exports.rm.content = rm.content + +module.exports.clearMemoized = clearMemoized + +module.exports.tmp = {} +module.exports.tmp.mkdir = tmp.mkdir +module.exports.tmp.withTmp = tmp.withTmp + +module.exports.verify = verify +module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js new file mode 100644 index 0000000000000..0ff604a479c9c --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js @@ -0,0 +1,72 @@ +'use strict' + +const LRU = require('lru-cache') + +const MEMOIZED = new LRU({ + max: 500, + maxSize: 50 * 1024 * 1024, // 50MB + ttl: 3 * 60 * 1000, // 3 minutes + sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, +}) + +module.exports.clearMemoized = clearMemoized + +function clearMemoized () { + const old = {} + MEMOIZED.forEach((v, k) => { + old[k] = v + }) + MEMOIZED.clear() + return old +} + +module.exports.put = put + +function put (cache, entry, data, opts) { + pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) + putDigest(cache, entry.integrity, data, opts) +} + +module.exports.put.byDigest = putDigest + +function putDigest (cache, integrity, data, opts) { + pickMem(opts).set(`digest:${cache}:${integrity}`, data) +} + +module.exports.get = get + +function get (cache, key, opts) { + return pickMem(opts).get(`key:${cache}:${key}`) +} + +module.exports.get.byDigest = getDigest + +function getDigest (cache, integrity, opts) { + return pickMem(opts).get(`digest:${cache}:${integrity}`) +} + +class ObjProxy { + constructor (obj) { + this.obj = obj + } + + get (key) { + return this.obj[key] + } + + set (key, val) { + this.obj[key] = val + } +} + +function pickMem (opts) { + if (!opts || !opts.memoize) { + return MEMOIZED + } else if (opts.memoize.get && opts.memoize.set) { + return opts.memoize + } else if (typeof opts.memoize === 'object') { + return new ObjProxy(opts.memoize) + } else { + return MEMOIZED + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js new file mode 100644 index 0000000000000..9fc932d5f6dec --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js @@ -0,0 +1,80 @@ +'use strict' + +const index = require('./entry-index') +const memo = require('./memoization') +const write = require('./content/write') +const Flush = require('minipass-flush') +const { PassThrough } = require('minipass-collect') +const Pipeline = require('minipass-pipeline') + +const putOpts = (opts) => ({ + algorithms: ['sha512'], + ...opts, +}) + +module.exports = putData + +async function putData (cache, key, data, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + const res = await write(cache, data, opts) + const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return res.integrity +} + +module.exports.stream = putStream + +function putStream (cache, key, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + let integrity + let size + let error + + let memoData + const pipeline = new Pipeline() + // first item in the pipeline is the memoizer, because we need + // that to end first and get the collected data. + if (memoize) { + const memoizer = new PassThrough().on('collect', data => { + memoData = data + }) + pipeline.push(memoizer) + } + + // contentStream is a write-only, not a passthrough + // no data comes out of it. + const contentStream = write.stream(cache, opts) + .on('integrity', (int) => { + integrity = int + }) + .on('size', (s) => { + size = s + }) + .on('error', (err) => { + error = err + }) + + pipeline.push(contentStream) + + // last but not least, we write the index and emit hash and size, + // and memoize if we're doing that + pipeline.push(new Flush({ + async flush () { + if (!error) { + const entry = await index.insert(cache, key, integrity, { ...opts, size }) + if (memoize && memoData) { + memo.put(cache, entry, memoData, opts) + } + pipeline.emit('integrity', integrity) + pipeline.emit('size', size) + } + }, + })) + + return pipeline +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js new file mode 100644 index 0000000000000..a94760c7cf243 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js @@ -0,0 +1,31 @@ +'use strict' + +const { rm } = require('fs/promises') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const memo = require('./memoization') +const path = require('path') +const rmContent = require('./content/rm') + +module.exports = entry +module.exports.entry = entry + +function entry (cache, key, opts) { + memo.clearMemoized() + return index.delete(cache, key, opts) +} + +module.exports.content = content + +function content (cache, integrity) { + memo.clearMemoized() + return rmContent(cache, integrity) +} + +module.exports.all = all + +async function all (cache) { + memo.clearMemoized() + const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) + return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/glob.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/glob.js new file mode 100644 index 0000000000000..8500c1c16a429 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/glob.js @@ -0,0 +1,7 @@ +'use strict' + +const { glob } = require('glob') +const path = require('path') + +const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) +module.exports = (path, options) => glob(globify(path), options) diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js new file mode 100644 index 0000000000000..445599b503808 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = hashToSegments + +function hashToSegments (hash) { + return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js new file mode 100644 index 0000000000000..0bf5302136ebe --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js @@ -0,0 +1,26 @@ +'use strict' + +const { withTempDir } = require('@npmcli/fs') +const fs = require('fs/promises') +const path = require('path') + +module.exports.mkdir = mktmpdir + +async function mktmpdir (cache, opts = {}) { + const { tmpPrefix } = opts + const tmpDir = path.join(cache, 'tmp') + await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) +} + +module.exports.withTmp = withTmp + +function withTmp (cache, opts, cb) { + if (!cb) { + cb = opts + opts = {} + } + return withTempDir(path.join(cache, 'tmp'), cb, opts) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js new file mode 100644 index 0000000000000..62e85c946490f --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js @@ -0,0 +1,257 @@ +'use strict' + +const { + mkdir, + readFile, + rm, + stat, + truncate, + writeFile, +} = require('fs/promises') +const pMap = require('p-map') +const contentPath = require('./content/path') +const fsm = require('fs-minipass') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const path = require('path') +const ssri = require('ssri') + +const hasOwnProperty = (obj, key) => + Object.prototype.hasOwnProperty.call(obj, key) + +const verifyOpts = (opts) => ({ + concurrency: 20, + log: { silly () {} }, + ...opts, +}) + +module.exports = verify + +async function verify (cache, opts) { + opts = verifyOpts(opts) + opts.log.silly('verify', 'verifying cache at', cache) + + const steps = [ + markStartTime, + fixPerms, + garbageCollect, + rebuildIndex, + cleanTmp, + writeVerifile, + markEndTime, + ] + + const stats = {} + for (const step of steps) { + const label = step.name + const start = new Date() + const s = await step(cache, opts) + if (s) { + Object.keys(s).forEach((k) => { + stats[k] = s[k] + }) + } + const end = new Date() + if (!stats.runTime) { + stats.runTime = {} + } + stats.runTime[label] = end - start + } + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats +} + +async function markStartTime (cache, opts) { + return { startTime: new Date() } +} + +async function markEndTime (cache, opts) { + return { endTime: new Date() } +} + +async function fixPerms (cache, opts) { + opts.log.silly('verify', 'fixing cache permissions') + await mkdir(cache, { recursive: true }) + return null +} + +// Implements a naive mark-and-sweep tracing garbage collector. +// +// The algorithm is basically as follows: +// 1. Read (and filter) all index entries ("pointers") +// 2. Mark each integrity value as "live" +// 3. Read entire filesystem tree in `content-vX/` dir +// 4. If content is live, verify its checksum and delete it if it fails +// 5. If content is not marked as live, rm it. +// +async function garbageCollect (cache, opts) { + opts.log.silly('verify', 'garbage collecting content') + const indexStream = index.lsStream(cache) + const liveContent = new Set() + indexStream.on('data', (entry) => { + if (opts.filter && !opts.filter(entry)) { + return + } + + // integrity is stringified, re-parse it so we can get each hash + const integrity = ssri.parse(entry.integrity) + for (const algo in integrity) { + liveContent.add(integrity[algo].toString()) + } + }) + await new Promise((resolve, reject) => { + indexStream.on('end', resolve).on('error', reject) + }) + const contentDir = contentPath.contentDir(cache) + const files = await glob(path.join(contentDir, '**'), { + follow: false, + nodir: true, + nosort: true, + }) + const stats = { + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + } + await pMap( + files, + async (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + const info = await verifyContent(f, integrity) + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + const s = await stat(f) + await rm(f, { recursive: true, force: true }) + stats.reclaimedSize += s.size + } + return stats + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function verifyContent (filepath, sri) { + const contentInfo = {} + try { + const { size } = await stat(filepath) + contentInfo.size = size + contentInfo.valid = true + await ssri.checkStream(new fsm.ReadStream(filepath), sri) + } catch (err) { + if (err.code === 'ENOENT') { + return { size: 0, valid: false } + } + if (err.code !== 'EINTEGRITY') { + throw err + } + + await rm(filepath, { recursive: true, force: true }) + contentInfo.valid = false + } + return contentInfo +} + +async function rebuildIndex (cache, opts) { + opts.log.silly('verify', 'rebuilding index') + const entries = await index.ls(cache) + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) { + buckets[hashed].push(entry) + } else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) + } + } + } + await pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function rebuildBucket (cache, bucket, stats, opts) { + await truncate(bucket._path) + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + for (const entry of bucket) { + const content = contentPath(cache, entry.integrity) + try { + await stat(content) + await index.insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, + time: entry.time, + }) + stats.totalEntries++ + } catch (err) { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + } else { + throw err + } + } + } +} + +function cleanTmp (cache, opts) { + opts.log.silly('verify', 'cleaning tmp directory') + return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) +} + +async function writeVerifile (cache, opts) { + const verifile = path.join(cache, '_lastverified') + opts.log.silly('verify', 'writing verifile to ' + verifile) + return writeFile(verifile, `${Date.now()}`) +} + +module.exports.lastRun = lastRun + +async function lastRun (cache) { + const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) + return new Date(+data) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json new file mode 100644 index 0000000000000..ab58cb8b7c50f --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json @@ -0,0 +1,82 @@ +{ + "name": "cacache", + "version": "17.1.4", + "cache-version": { + "content": "2", + "index": "5" + }, + "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "snap": "tap", + "coverage": "tap", + "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", + "lint": "eslint \"**/*.js\"", + "npmclilint": "npmcli-lint", + "lintfix": "npm run lint -- --fix", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/cacache.git" + }, + "keywords": [ + "cache", + "caching", + "content-addressable", + "sri", + "sri hash", + "subresource integrity", + "cache", + "storage", + "store", + "file store", + "filesystem", + "disk cache", + "disk storage" + ], + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^3.1.0", + "fs-minipass": "^3.0.0", + "glob": "^10.2.2", + "lru-cache": "^7.7.1", + "minipass": "^7.0.3", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "p-map": "^4.0.0", + "ssri": "^10.0.0", + "tar": "^6.1.11", + "unique-filename": "^3.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.18.0", + "tap": "^16.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "windowsCI": false, + "version": "4.18.0", + "publish": "true" + }, + "author": "GitHub Inc.", + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/node_modules/cacache/lib/memoization.js b/node_modules/cacache/lib/memoization.js index 0ff604a479c9c..2ecc60912e456 100644 --- a/node_modules/cacache/lib/memoization.js +++ b/node_modules/cacache/lib/memoization.js @@ -1,8 +1,8 @@ 'use strict' -const LRU = require('lru-cache') +const { LRUCache } = require('lru-cache') -const MEMOIZED = new LRU({ +const MEMOIZED = new LRUCache({ max: 500, maxSize: 50 * 1024 * 1024, // 50MB ttl: 3 * 60 * 1000, // 3 minutes diff --git a/node_modules/cacache/node_modules/lru-cache/LICENSE b/node_modules/cacache/node_modules/lru-cache/LICENSE new file mode 100644 index 0000000000000..f785757cd63f8 --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.js b/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.js new file mode 100644 index 0000000000000..02d76ec800a92 --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.js @@ -0,0 +1,1404 @@ +"use strict"; +/** + * @module LRUCache + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.LRUCache = void 0; +const perf = typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date; +const warned = new Set(); +/* c8 ignore start */ +const PROCESS = (typeof process === 'object' && !!process ? process : {}); +/* c8 ignore start */ +const emitWarning = (msg, type, code, fn) => { + typeof PROCESS.emitWarning === 'function' + ? PROCESS.emitWarning(msg, type, code, fn) + : console.error(`[${code}] ${type}: ${msg}`); +}; +let AC = globalThis.AbortController; +let AS = globalThis.AbortSignal; +/* c8 ignore start */ +if (typeof AC === 'undefined') { + //@ts-ignore + AS = class AbortSignal { + onabort; + _onabort = []; + reason; + aborted = false; + addEventListener(_, fn) { + this._onabort.push(fn); + } + }; + //@ts-ignore + AC = class AbortController { + constructor() { + warnACPolyfill(); + } + signal = new AS(); + abort(reason) { + if (this.signal.aborted) + return; + //@ts-ignore + this.signal.reason = reason; + //@ts-ignore + this.signal.aborted = true; + //@ts-ignore + for (const fn of this.signal._onabort) { + fn(reason); + } + this.signal.onabort?.(reason); + } + }; + let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; + const warnACPolyfill = () => { + if (!printACPolyfillWarning) + return; + printACPolyfillWarning = false; + emitWarning('AbortController is not defined. If using lru-cache in ' + + 'node 14, load an AbortController polyfill from the ' + + '`node-abort-controller` package. A minimal polyfill is ' + + 'provided for use by LRUCache.fetch(), but it should not be ' + + 'relied upon in other contexts (eg, passing it to other APIs that ' + + 'use AbortController/AbortSignal might have undesirable effects). ' + + 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); + }; +} +/* c8 ignore stop */ +const shouldWarn = (code) => !warned.has(code); +const TYPE = Symbol('type'); +const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); +/* c8 ignore start */ +// This is a little bit ridiculous, tbh. +// The maximum array length is 2^32-1 or thereabouts on most JS impls. +// And well before that point, you're caching the entire world, I mean, +// that's ~32GB of just integers for the next/prev links, plus whatever +// else to hold that many keys and values. Just filling the memory with +// zeroes at init time is brutal when you get that big. +// But why not be complete? +// Maybe in the future, these limits will have expanded. +const getUintArray = (max) => !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null; +/* c8 ignore stop */ +class ZeroArray extends Array { + constructor(size) { + super(size); + this.fill(0); + } +} +class Stack { + heap; + length; + // private constructor + static #constructing = false; + static create(max) { + const HeapCls = getUintArray(max); + if (!HeapCls) + return []; + Stack.#constructing = true; + const s = new Stack(max, HeapCls); + Stack.#constructing = false; + return s; + } + constructor(max, HeapCls) { + /* c8 ignore start */ + if (!Stack.#constructing) { + throw new TypeError('instantiate Stack using Stack.create(n)'); + } + /* c8 ignore stop */ + this.heap = new HeapCls(max); + this.length = 0; + } + push(n) { + this.heap[this.length++] = n; + } + pop() { + return this.heap[--this.length]; + } +} +/** + * Default export, the thing you're using this module to get. + * + * All properties from the options object (with the exception of + * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as + * normal public members. (`max` and `maxBase` are read-only getters.) + * Changing any of these will alter the defaults for subsequent method calls, + * but is otherwise safe. + */ +class LRUCache { + // properties coming in from the options of these, only max and maxSize + // really *need* to be protected. The rest can be modified, as they just + // set defaults for various methods. + #max; + #maxSize; + #dispose; + #disposeAfter; + #fetchMethod; + /** + * {@link LRUCache.OptionsBase.ttl} + */ + ttl; + /** + * {@link LRUCache.OptionsBase.ttlResolution} + */ + ttlResolution; + /** + * {@link LRUCache.OptionsBase.ttlAutopurge} + */ + ttlAutopurge; + /** + * {@link LRUCache.OptionsBase.updateAgeOnGet} + */ + updateAgeOnGet; + /** + * {@link LRUCache.OptionsBase.updateAgeOnHas} + */ + updateAgeOnHas; + /** + * {@link LRUCache.OptionsBase.allowStale} + */ + allowStale; + /** + * {@link LRUCache.OptionsBase.noDisposeOnSet} + */ + noDisposeOnSet; + /** + * {@link LRUCache.OptionsBase.noUpdateTTL} + */ + noUpdateTTL; + /** + * {@link LRUCache.OptionsBase.maxEntrySize} + */ + maxEntrySize; + /** + * {@link LRUCache.OptionsBase.sizeCalculation} + */ + sizeCalculation; + /** + * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} + */ + noDeleteOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} + */ + noDeleteOnStaleGet; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} + */ + allowStaleOnFetchAbort; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} + */ + allowStaleOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.ignoreFetchAbort} + */ + ignoreFetchAbort; + // computed properties + #size; + #calculatedSize; + #keyMap; + #keyList; + #valList; + #next; + #prev; + #head; + #tail; + #free; + #disposed; + #sizes; + #starts; + #ttls; + #hasDispose; + #hasFetchMethod; + #hasDisposeAfter; + /** + * Do not call this method unless you need to inspect the + * inner workings of the cache. If anything returned by this + * object is modified in any way, strange breakage may occur. + * + * These fields are private for a reason! + * + * @internal + */ + static unsafeExposeInternals(c) { + return { + // properties + starts: c.#starts, + ttls: c.#ttls, + sizes: c.#sizes, + keyMap: c.#keyMap, + keyList: c.#keyList, + valList: c.#valList, + next: c.#next, + prev: c.#prev, + get head() { + return c.#head; + }, + get tail() { + return c.#tail; + }, + free: c.#free, + // methods + isBackgroundFetch: (p) => c.#isBackgroundFetch(p), + backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), + moveToTail: (index) => c.#moveToTail(index), + indexes: (options) => c.#indexes(options), + rindexes: (options) => c.#rindexes(options), + isStale: (index) => c.#isStale(index), + }; + } + // Protected read-only members + /** + * {@link LRUCache.OptionsBase.max} (read-only) + */ + get max() { + return this.#max; + } + /** + * {@link LRUCache.OptionsBase.maxSize} (read-only) + */ + get maxSize() { + return this.#maxSize; + } + /** + * The total computed size of items in the cache (read-only) + */ + get calculatedSize() { + return this.#calculatedSize; + } + /** + * The number of items stored in the cache (read-only) + */ + get size() { + return this.#size; + } + /** + * {@link LRUCache.OptionsBase.fetchMethod} (read-only) + */ + get fetchMethod() { + return this.#fetchMethod; + } + /** + * {@link LRUCache.OptionsBase.dispose} (read-only) + */ + get dispose() { + return this.#dispose; + } + /** + * {@link LRUCache.OptionsBase.disposeAfter} (read-only) + */ + get disposeAfter() { + return this.#disposeAfter; + } + constructor(options) { + const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer'); + } + const UintArray = max ? getUintArray(max) : Array; + if (!UintArray) { + throw new Error('invalid max value: ' + max); + } + this.#max = max; + this.#maxSize = maxSize; + this.maxEntrySize = maxEntrySize || this.#maxSize; + this.sizeCalculation = sizeCalculation; + if (this.sizeCalculation) { + if (!this.#maxSize && !this.maxEntrySize) { + throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function'); + } + } + if (fetchMethod !== undefined && + typeof fetchMethod !== 'function') { + throw new TypeError('fetchMethod must be a function if specified'); + } + this.#fetchMethod = fetchMethod; + this.#hasFetchMethod = !!fetchMethod; + this.#keyMap = new Map(); + this.#keyList = new Array(max).fill(undefined); + this.#valList = new Array(max).fill(undefined); + this.#next = new UintArray(max); + this.#prev = new UintArray(max); + this.#head = 0; + this.#tail = 0; + this.#free = Stack.create(max); + this.#size = 0; + this.#calculatedSize = 0; + if (typeof dispose === 'function') { + this.#dispose = dispose; + } + if (typeof disposeAfter === 'function') { + this.#disposeAfter = disposeAfter; + this.#disposed = []; + } + else { + this.#disposeAfter = undefined; + this.#disposed = undefined; + } + this.#hasDispose = !!this.#dispose; + this.#hasDisposeAfter = !!this.#disposeAfter; + this.noDisposeOnSet = !!noDisposeOnSet; + this.noUpdateTTL = !!noUpdateTTL; + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; + this.ignoreFetchAbort = !!ignoreFetchAbort; + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.#maxSize !== 0) { + if (!isPosInt(this.#maxSize)) { + throw new TypeError('maxSize must be a positive integer if specified'); + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError('maxEntrySize must be a positive integer if specified'); + } + this.#initializeSizeTracking(); + } + this.allowStale = !!allowStale; + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; + this.updateAgeOnGet = !!updateAgeOnGet; + this.updateAgeOnHas = !!updateAgeOnHas; + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1; + this.ttlAutopurge = !!ttlAutopurge; + this.ttl = ttl || 0; + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError('ttl must be a positive integer if specified'); + } + this.#initializeTTLTracking(); + } + // do not allow completely unbounded caches + if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { + throw new TypeError('At least one of max, maxSize, or ttl is required'); + } + if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { + const code = 'LRU_CACHE_UNBOUNDED'; + if (shouldWarn(code)) { + warned.add(code); + const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.'; + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); + } + } + } + /** + * Return the remaining TTL time for a given entry key + */ + getRemainingTTL(key) { + return this.#keyMap.has(key) ? Infinity : 0; + } + #initializeTTLTracking() { + const ttls = new ZeroArray(this.#max); + const starts = new ZeroArray(this.#max); + this.#ttls = ttls; + this.#starts = starts; + this.#setItemTTL = (index, ttl, start = perf.now()) => { + starts[index] = ttl !== 0 ? start : 0; + ttls[index] = ttl; + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.#isStale(index)) { + this.delete(this.#keyList[index]); + } + }, ttl + 1); + // unref() not supported on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + }; + this.#updateItemAge = index => { + starts[index] = ttls[index] !== 0 ? perf.now() : 0; + }; + this.#statusTTL = (status, index) => { + if (ttls[index]) { + const ttl = ttls[index]; + const start = starts[index]; + status.ttl = ttl; + status.start = start; + status.now = cachedNow || getNow(); + const age = status.now - start; + status.remainingTTL = ttl - age; + } + }; + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0; + const getNow = () => { + const n = perf.now(); + if (this.ttlResolution > 0) { + cachedNow = n; + const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); + // not available on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + return n; + }; + this.getRemainingTTL = key => { + const index = this.#keyMap.get(key); + if (index === undefined) { + return 0; + } + const ttl = ttls[index]; + const start = starts[index]; + if (ttl === 0 || start === 0) { + return Infinity; + } + const age = (cachedNow || getNow()) - start; + return ttl - age; + }; + this.#isStale = index => { + return (ttls[index] !== 0 && + starts[index] !== 0 && + (cachedNow || getNow()) - starts[index] > ttls[index]); + }; + } + // conditionally set private methods related to TTL + #updateItemAge = () => { }; + #statusTTL = () => { }; + #setItemTTL = () => { }; + /* c8 ignore stop */ + #isStale = () => false; + #initializeSizeTracking() { + const sizes = new ZeroArray(this.#max); + this.#calculatedSize = 0; + this.#sizes = sizes; + this.#removeItemSize = index => { + this.#calculatedSize -= sizes[index]; + sizes[index] = 0; + }; + this.#requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.#isBackgroundFetch(v)) { + return 0; + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function'); + } + size = sizeCalculation(v, k); + if (!isPosInt(size)) { + throw new TypeError('sizeCalculation return invalid (expect positive integer)'); + } + } + else { + throw new TypeError('invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation ' + + 'or size must be set.'); + } + } + return size; + }; + this.#addItemSize = (index, size, status) => { + sizes[index] = size; + if (this.#maxSize) { + const maxSize = this.#maxSize - sizes[index]; + while (this.#calculatedSize > maxSize) { + this.#evict(true); + } + } + this.#calculatedSize += sizes[index]; + if (status) { + status.entrySize = size; + status.totalCalculatedSize = this.#calculatedSize; + } + }; + } + #removeItemSize = _i => { }; + #addItemSize = (_i, _s, _st) => { }; + #requireSize = (_k, _v, size, sizeCalculation) => { + if (size || sizeCalculation) { + throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); + } + return 0; + }; + *#indexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#tail; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#head) { + break; + } + else { + i = this.#prev[i]; + } + } + } + } + *#rindexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#head; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#tail) { + break; + } + else { + i = this.#next[i]; + } + } + } + } + #isValidIndex(index) { + return (index !== undefined && + this.#keyMap.get(this.#keyList[index]) === index); + } + /** + * Return a generator yielding `[key, value]` pairs, + * in order from most recently used to least recently used. + */ + *entries() { + for (const i of this.#indexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Inverse order version of {@link LRUCache.entries} + * + * Return a generator yielding `[key, value]` pairs, + * in order from least recently used to most recently used. + */ + *rentries() { + for (const i of this.#rindexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Return a generator yielding the keys in the cache, + * in order from most recently used to least recently used. + */ + *keys() { + for (const i of this.#indexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Inverse order version of {@link LRUCache.keys} + * + * Return a generator yielding the keys in the cache, + * in order from least recently used to most recently used. + */ + *rkeys() { + for (const i of this.#rindexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Return a generator yielding the values in the cache, + * in order from most recently used to least recently used. + */ + *values() { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Inverse order version of {@link LRUCache.values} + * + * Return a generator yielding the values in the cache, + * in order from least recently used to most recently used. + */ + *rvalues() { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Iterating over the cache itself yields the same results as + * {@link LRUCache.entries} + */ + [Symbol.iterator]() { + return this.entries(); + } + /** + * Find a value for which the supplied fn method returns a truthy value, + * similar to Array.find(). fn is called as fn(value, key, cache). + */ + find(fn, getOptions = {}) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + if (fn(value, this.#keyList[i], this)) { + return this.get(this.#keyList[i], getOptions); + } + } + } + /** + * Call the supplied function on each item in the cache, in order from + * most recently used to least recently used. fn is called as + * fn(value, key, cache). Does not update age or recenty of use. + * Does not iterate over stale values. + */ + forEach(fn, thisp = this) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * The same as {@link LRUCache.forEach} but items are iterated over in + * reverse order. (ie, less recently used items are iterated over first.) + */ + rforEach(fn, thisp = this) { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * Delete any stale entries. Returns true if anything was removed, + * false otherwise. + */ + purgeStale() { + let deleted = false; + for (const i of this.#rindexes({ allowStale: true })) { + if (this.#isStale(i)) { + this.delete(this.#keyList[i]); + deleted = true; + } + } + return deleted; + } + /** + * Return an array of [key, {@link LRUCache.Entry}] tuples which can be + * passed to cache.load() + */ + dump() { + const arr = []; + for (const i of this.#indexes({ allowStale: true })) { + const key = this.#keyList[i]; + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined || key === undefined) + continue; + const entry = { value }; + if (this.#ttls && this.#starts) { + entry.ttl = this.#ttls[i]; + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.#starts[i]; + entry.start = Math.floor(Date.now() - age); + } + if (this.#sizes) { + entry.size = this.#sizes[i]; + } + arr.unshift([key, entry]); + } + return arr; + } + /** + * Reset the cache and load in the items in entries in the order listed. + * Note that the shape of the resulting cache may be different if the + * same options are not used in both caches. + */ + load(arr) { + this.clear(); + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset, so that + // we get the intended remaining TTL, no matter how long it's + // been on ice. + // + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start; + entry.start = perf.now() - age; + } + this.set(key, entry.value, entry); + } + } + /** + * Add a value to the cache. + * + * Note: if `undefined` is specified as a value, this is an alias for + * {@link LRUCache#delete} + */ + set(k, v, setOptions = {}) { + if (v === undefined) { + this.delete(k); + return this; + } + const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; + let { noUpdateTTL = this.noUpdateTTL } = setOptions; + const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss'; + status.maxEntrySizeExceeded = true; + } + // have to delete, in case something is there already. + this.delete(k); + return this; + } + let index = this.#size === 0 ? undefined : this.#keyMap.get(k); + if (index === undefined) { + // addition + index = (this.#size === 0 + ? this.#tail + : this.#free.length !== 0 + ? this.#free.pop() + : this.#size === this.#max + ? this.#evict(false) + : this.#size); + this.#keyList[index] = k; + this.#valList[index] = v; + this.#keyMap.set(k, index); + this.#next[this.#tail] = index; + this.#prev[index] = this.#tail; + this.#tail = index; + this.#size++; + this.#addItemSize(index, size, status); + if (status) + status.set = 'add'; + noUpdateTTL = false; + } + else { + // update + this.#moveToTail(index); + const oldVal = this.#valList[index]; + if (v !== oldVal) { + if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')); + const { __staleWhileFetching: s } = oldVal; + if (s !== undefined && !noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(s, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([s, k, 'set']); + } + } + } + else if (!noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(oldVal, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([oldVal, k, 'set']); + } + } + this.#removeItemSize(index); + this.#addItemSize(index, size, status); + this.#valList[index] = v; + if (status) { + status.set = 'replace'; + const oldValue = oldVal && this.#isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal; + if (oldValue !== undefined) + status.oldValue = oldValue; + } + } + else if (status) { + status.set = 'update'; + } + } + if (ttl !== 0 && !this.#ttls) { + this.#initializeTTLTracking(); + } + if (this.#ttls) { + if (!noUpdateTTL) { + this.#setItemTTL(index, ttl, start); + } + if (status) + this.#statusTTL(status, index); + } + if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return this; + } + /** + * Evict the least recently used item, returning its value or + * `undefined` if cache is empty. + */ + pop() { + try { + while (this.#size) { + const val = this.#valList[this.#head]; + this.#evict(true); + if (this.#isBackgroundFetch(val)) { + if (val.__staleWhileFetching) { + return val.__staleWhileFetching; + } + } + else if (val !== undefined) { + return val; + } + } + } + finally { + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } + } + #evict(free) { + const head = this.#head; + const k = this.#keyList[head]; + const v = this.#valList[head]; + if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'evict'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'evict']); + } + } + this.#removeItemSize(head); + // if we aren't about to use the index, then null these out + if (free) { + this.#keyList[head] = undefined; + this.#valList[head] = undefined; + this.#free.push(head); + } + if (this.#size === 1) { + this.#head = this.#tail = 0; + this.#free.length = 0; + } + else { + this.#head = this.#next[head]; + } + this.#keyMap.delete(k); + this.#size--; + return head; + } + /** + * Check if a key is in the cache, without updating the recency of use. + * Will return false if the item is stale, even though it is technically + * in the cache. + * + * Will not update item age unless + * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. + */ + has(k, hasOptions = {}) { + const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v) && + v.__staleWhileFetching === undefined) { + return false; + } + if (!this.#isStale(index)) { + if (updateAgeOnHas) { + this.#updateItemAge(index); + } + if (status) { + status.has = 'hit'; + this.#statusTTL(status, index); + } + return true; + } + else if (status) { + status.has = 'stale'; + this.#statusTTL(status, index); + } + } + else if (status) { + status.has = 'miss'; + } + return false; + } + /** + * Like {@link LRUCache#get} but doesn't update recency or delete stale + * items. + * + * Returns `undefined` if the item is stale, unless + * {@link LRUCache.OptionsBase.allowStale} is set. + */ + peek(k, peekOptions = {}) { + const { allowStale = this.allowStale } = peekOptions; + const index = this.#keyMap.get(k); + if (index !== undefined && + (allowStale || !this.#isStale(index))) { + const v = this.#valList[index]; + // either stale and allowed, or forcing a refresh of non-stale value + return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + } + } + #backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + return v; + } + const ac = new AC(); + const { signal } = options; + // when/if our AC signals, then stop listening to theirs. + signal?.addEventListener('abort', () => ac.abort(signal.reason), { + signal: ac.signal, + }); + const fetchOpts = { + signal: ac.signal, + options, + context, + }; + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal; + const ignoreAbort = options.ignoreFetchAbort && v !== undefined; + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true; + options.status.fetchError = ac.signal.reason; + if (ignoreAbort) + options.status.fetchAbortIgnored = true; + } + else { + options.status.fetchResolved = true; + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason); + } + // either we didn't abort, and are still here, or we did, and ignored + const bf = p; + if (this.#valList[index] === p) { + if (v === undefined) { + if (bf.__staleWhileFetching) { + this.#valList[index] = bf.__staleWhileFetching; + } + else { + this.delete(k); + } + } + else { + if (options.status) + options.status.fetchUpdated = true; + this.set(k, v, fetchOpts.options); + } + } + return v; + }; + const eb = (er) => { + if (options.status) { + options.status.fetchRejected = true; + options.status.fetchError = er; + } + return fetchFail(er); + }; + const fetchFail = (er) => { + const { aborted } = ac.signal; + const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; + const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; + const noDelete = allowStale || options.noDeleteOnFetchRejection; + const bf = p; + if (this.#valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || bf.__staleWhileFetching === undefined; + if (del) { + this.delete(k); + } + else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.#valList[index] = bf.__staleWhileFetching; + } + } + if (allowStale) { + if (options.status && bf.__staleWhileFetching !== undefined) { + options.status.returnedStale = true; + } + return bf.__staleWhileFetching; + } + else if (bf.__returned === bf) { + throw er; + } + }; + const pcall = (res, rej) => { + const fmp = this.#fetchMethod?.(k, v, fetchOpts); + if (fmp && fmp instanceof Promise) { + fmp.then(v => res(v === undefined ? undefined : v), rej); + } + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if (!options.ignoreFetchAbort || + options.allowStaleOnFetchAbort) { + res(undefined); + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true); + } + } + }); + }; + if (options.status) + options.status.fetchDispatched = true; + const p = new Promise(pcall).then(cb, eb); + const bf = Object.assign(p, { + __abortController: ac, + __staleWhileFetching: v, + __returned: undefined, + }); + if (index === undefined) { + // internal, don't expose status. + this.set(k, bf, { ...fetchOpts.options, status: undefined }); + index = this.#keyMap.get(k); + } + else { + this.#valList[index] = bf; + } + return bf; + } + #isBackgroundFetch(p) { + if (!this.#hasFetchMethod) + return false; + const b = p; + return (!!b && + b instanceof Promise && + b.hasOwnProperty('__staleWhileFetching') && + b.__abortController instanceof AC); + } + async fetch(k, fetchOptions = {}) { + const { + // get options + allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; + if (!this.#hasFetchMethod) { + if (status) + status.fetch = 'get'; + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }); + } + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + }; + let index = this.#keyMap.get(k); + if (index === undefined) { + if (status) + status.fetch = 'miss'; + const p = this.#backgroundFetch(k, index, options, context); + return (p.__returned = p); + } + else { + // in cache, maybe already fetching + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + const stale = allowStale && v.__staleWhileFetching !== undefined; + if (status) { + status.fetch = 'inflight'; + if (stale) + status.returnedStale = true; + } + return stale ? v.__staleWhileFetching : (v.__returned = v); + } + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.#isStale(index); + if (!forceRefresh && !isStale) { + if (status) + status.fetch = 'hit'; + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + if (status) + this.#statusTTL(status, index); + return v; + } + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.#backgroundFetch(k, index, options, context); + const hasStale = p.__staleWhileFetching !== undefined; + const staleVal = hasStale && allowStale; + if (status) { + status.fetch = isStale ? 'stale' : 'refresh'; + if (staleVal && isStale) + status.returnedStale = true; + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p); + } + } + /** + * Return a value from the cache. Will update the recency of the cache + * entry found. + * + * If the key is not found, get() will return `undefined`. + */ + get(k, getOptions = {}) { + const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const value = this.#valList[index]; + const fetching = this.#isBackgroundFetch(value); + if (status) + this.#statusTTL(status, index); + if (this.#isStale(index)) { + if (status) + status.get = 'stale'; + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k); + } + if (status && allowStale) + status.returnedStale = true; + return allowStale ? value : undefined; + } + else { + if (status && + allowStale && + value.__staleWhileFetching !== undefined) { + status.returnedStale = true; + } + return allowStale ? value.__staleWhileFetching : undefined; + } + } + else { + if (status) + status.get = 'hit'; + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching; + } + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + return value; + } + } + else if (status) { + status.get = 'miss'; + } + } + #connect(p, n) { + this.#prev[n] = p; + this.#next[p] = n; + } + #moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.#tail) { + if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#connect(this.#prev[index], this.#next[index]); + } + this.#connect(this.#tail, index); + this.#tail = index; + } + } + /** + * Deletes a key out of the cache. + * Returns true if the key was deleted, false otherwise. + */ + delete(k) { + let deleted = false; + if (this.#size !== 0) { + const index = this.#keyMap.get(k); + if (index !== undefined) { + deleted = true; + if (this.#size === 1) { + this.clear(); + } + else { + this.#removeItemSize(index); + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + this.#keyMap.delete(k); + this.#keyList[index] = undefined; + this.#valList[index] = undefined; + if (index === this.#tail) { + this.#tail = this.#prev[index]; + } + else if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#next[this.#prev[index]] = this.#next[index]; + this.#prev[this.#next[index]] = this.#prev[index]; + } + this.#size--; + this.#free.push(index); + } + } + } + if (this.#hasDisposeAfter && this.#disposed?.length) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return deleted; + } + /** + * Clear the cache entirely, throwing away all values. + */ + clear() { + for (const index of this.#rindexes({ allowStale: true })) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else { + const k = this.#keyList[index]; + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + } + this.#keyMap.clear(); + this.#valList.fill(undefined); + this.#keyList.fill(undefined); + if (this.#ttls && this.#starts) { + this.#ttls.fill(0); + this.#starts.fill(0); + } + if (this.#sizes) { + this.#sizes.fill(0); + } + this.#head = 0; + this.#tail = 0; + this.#free.length = 0; + this.#calculatedSize = 0; + this.#size = 0; + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } +} +exports.LRUCache = LRUCache; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.min.js b/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.min.js new file mode 100644 index 0000000000000..8d34a03041d25 --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.min.js @@ -0,0 +1,2 @@ +"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=R.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#W(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};exports.LRUCache=C; +//# sourceMappingURL=index.min.js.map diff --git a/node_modules/cacache/node_modules/lru-cache/dist/cjs/package.json b/node_modules/cacache/node_modules/lru-cache/dist/cjs/package.json new file mode 100644 index 0000000000000..5bbefffbabee3 --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/dist/cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.js b/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.js new file mode 100644 index 0000000000000..23b9754ad6c76 --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.js @@ -0,0 +1,1400 @@ +/** + * @module LRUCache + */ +const perf = typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date; +const warned = new Set(); +/* c8 ignore start */ +const PROCESS = (typeof process === 'object' && !!process ? process : {}); +/* c8 ignore start */ +const emitWarning = (msg, type, code, fn) => { + typeof PROCESS.emitWarning === 'function' + ? PROCESS.emitWarning(msg, type, code, fn) + : console.error(`[${code}] ${type}: ${msg}`); +}; +let AC = globalThis.AbortController; +let AS = globalThis.AbortSignal; +/* c8 ignore start */ +if (typeof AC === 'undefined') { + //@ts-ignore + AS = class AbortSignal { + onabort; + _onabort = []; + reason; + aborted = false; + addEventListener(_, fn) { + this._onabort.push(fn); + } + }; + //@ts-ignore + AC = class AbortController { + constructor() { + warnACPolyfill(); + } + signal = new AS(); + abort(reason) { + if (this.signal.aborted) + return; + //@ts-ignore + this.signal.reason = reason; + //@ts-ignore + this.signal.aborted = true; + //@ts-ignore + for (const fn of this.signal._onabort) { + fn(reason); + } + this.signal.onabort?.(reason); + } + }; + let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; + const warnACPolyfill = () => { + if (!printACPolyfillWarning) + return; + printACPolyfillWarning = false; + emitWarning('AbortController is not defined. If using lru-cache in ' + + 'node 14, load an AbortController polyfill from the ' + + '`node-abort-controller` package. A minimal polyfill is ' + + 'provided for use by LRUCache.fetch(), but it should not be ' + + 'relied upon in other contexts (eg, passing it to other APIs that ' + + 'use AbortController/AbortSignal might have undesirable effects). ' + + 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); + }; +} +/* c8 ignore stop */ +const shouldWarn = (code) => !warned.has(code); +const TYPE = Symbol('type'); +const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); +/* c8 ignore start */ +// This is a little bit ridiculous, tbh. +// The maximum array length is 2^32-1 or thereabouts on most JS impls. +// And well before that point, you're caching the entire world, I mean, +// that's ~32GB of just integers for the next/prev links, plus whatever +// else to hold that many keys and values. Just filling the memory with +// zeroes at init time is brutal when you get that big. +// But why not be complete? +// Maybe in the future, these limits will have expanded. +const getUintArray = (max) => !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null; +/* c8 ignore stop */ +class ZeroArray extends Array { + constructor(size) { + super(size); + this.fill(0); + } +} +class Stack { + heap; + length; + // private constructor + static #constructing = false; + static create(max) { + const HeapCls = getUintArray(max); + if (!HeapCls) + return []; + Stack.#constructing = true; + const s = new Stack(max, HeapCls); + Stack.#constructing = false; + return s; + } + constructor(max, HeapCls) { + /* c8 ignore start */ + if (!Stack.#constructing) { + throw new TypeError('instantiate Stack using Stack.create(n)'); + } + /* c8 ignore stop */ + this.heap = new HeapCls(max); + this.length = 0; + } + push(n) { + this.heap[this.length++] = n; + } + pop() { + return this.heap[--this.length]; + } +} +/** + * Default export, the thing you're using this module to get. + * + * All properties from the options object (with the exception of + * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as + * normal public members. (`max` and `maxBase` are read-only getters.) + * Changing any of these will alter the defaults for subsequent method calls, + * but is otherwise safe. + */ +export class LRUCache { + // properties coming in from the options of these, only max and maxSize + // really *need* to be protected. The rest can be modified, as they just + // set defaults for various methods. + #max; + #maxSize; + #dispose; + #disposeAfter; + #fetchMethod; + /** + * {@link LRUCache.OptionsBase.ttl} + */ + ttl; + /** + * {@link LRUCache.OptionsBase.ttlResolution} + */ + ttlResolution; + /** + * {@link LRUCache.OptionsBase.ttlAutopurge} + */ + ttlAutopurge; + /** + * {@link LRUCache.OptionsBase.updateAgeOnGet} + */ + updateAgeOnGet; + /** + * {@link LRUCache.OptionsBase.updateAgeOnHas} + */ + updateAgeOnHas; + /** + * {@link LRUCache.OptionsBase.allowStale} + */ + allowStale; + /** + * {@link LRUCache.OptionsBase.noDisposeOnSet} + */ + noDisposeOnSet; + /** + * {@link LRUCache.OptionsBase.noUpdateTTL} + */ + noUpdateTTL; + /** + * {@link LRUCache.OptionsBase.maxEntrySize} + */ + maxEntrySize; + /** + * {@link LRUCache.OptionsBase.sizeCalculation} + */ + sizeCalculation; + /** + * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} + */ + noDeleteOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} + */ + noDeleteOnStaleGet; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} + */ + allowStaleOnFetchAbort; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} + */ + allowStaleOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.ignoreFetchAbort} + */ + ignoreFetchAbort; + // computed properties + #size; + #calculatedSize; + #keyMap; + #keyList; + #valList; + #next; + #prev; + #head; + #tail; + #free; + #disposed; + #sizes; + #starts; + #ttls; + #hasDispose; + #hasFetchMethod; + #hasDisposeAfter; + /** + * Do not call this method unless you need to inspect the + * inner workings of the cache. If anything returned by this + * object is modified in any way, strange breakage may occur. + * + * These fields are private for a reason! + * + * @internal + */ + static unsafeExposeInternals(c) { + return { + // properties + starts: c.#starts, + ttls: c.#ttls, + sizes: c.#sizes, + keyMap: c.#keyMap, + keyList: c.#keyList, + valList: c.#valList, + next: c.#next, + prev: c.#prev, + get head() { + return c.#head; + }, + get tail() { + return c.#tail; + }, + free: c.#free, + // methods + isBackgroundFetch: (p) => c.#isBackgroundFetch(p), + backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), + moveToTail: (index) => c.#moveToTail(index), + indexes: (options) => c.#indexes(options), + rindexes: (options) => c.#rindexes(options), + isStale: (index) => c.#isStale(index), + }; + } + // Protected read-only members + /** + * {@link LRUCache.OptionsBase.max} (read-only) + */ + get max() { + return this.#max; + } + /** + * {@link LRUCache.OptionsBase.maxSize} (read-only) + */ + get maxSize() { + return this.#maxSize; + } + /** + * The total computed size of items in the cache (read-only) + */ + get calculatedSize() { + return this.#calculatedSize; + } + /** + * The number of items stored in the cache (read-only) + */ + get size() { + return this.#size; + } + /** + * {@link LRUCache.OptionsBase.fetchMethod} (read-only) + */ + get fetchMethod() { + return this.#fetchMethod; + } + /** + * {@link LRUCache.OptionsBase.dispose} (read-only) + */ + get dispose() { + return this.#dispose; + } + /** + * {@link LRUCache.OptionsBase.disposeAfter} (read-only) + */ + get disposeAfter() { + return this.#disposeAfter; + } + constructor(options) { + const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer'); + } + const UintArray = max ? getUintArray(max) : Array; + if (!UintArray) { + throw new Error('invalid max value: ' + max); + } + this.#max = max; + this.#maxSize = maxSize; + this.maxEntrySize = maxEntrySize || this.#maxSize; + this.sizeCalculation = sizeCalculation; + if (this.sizeCalculation) { + if (!this.#maxSize && !this.maxEntrySize) { + throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function'); + } + } + if (fetchMethod !== undefined && + typeof fetchMethod !== 'function') { + throw new TypeError('fetchMethod must be a function if specified'); + } + this.#fetchMethod = fetchMethod; + this.#hasFetchMethod = !!fetchMethod; + this.#keyMap = new Map(); + this.#keyList = new Array(max).fill(undefined); + this.#valList = new Array(max).fill(undefined); + this.#next = new UintArray(max); + this.#prev = new UintArray(max); + this.#head = 0; + this.#tail = 0; + this.#free = Stack.create(max); + this.#size = 0; + this.#calculatedSize = 0; + if (typeof dispose === 'function') { + this.#dispose = dispose; + } + if (typeof disposeAfter === 'function') { + this.#disposeAfter = disposeAfter; + this.#disposed = []; + } + else { + this.#disposeAfter = undefined; + this.#disposed = undefined; + } + this.#hasDispose = !!this.#dispose; + this.#hasDisposeAfter = !!this.#disposeAfter; + this.noDisposeOnSet = !!noDisposeOnSet; + this.noUpdateTTL = !!noUpdateTTL; + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; + this.ignoreFetchAbort = !!ignoreFetchAbort; + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.#maxSize !== 0) { + if (!isPosInt(this.#maxSize)) { + throw new TypeError('maxSize must be a positive integer if specified'); + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError('maxEntrySize must be a positive integer if specified'); + } + this.#initializeSizeTracking(); + } + this.allowStale = !!allowStale; + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; + this.updateAgeOnGet = !!updateAgeOnGet; + this.updateAgeOnHas = !!updateAgeOnHas; + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1; + this.ttlAutopurge = !!ttlAutopurge; + this.ttl = ttl || 0; + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError('ttl must be a positive integer if specified'); + } + this.#initializeTTLTracking(); + } + // do not allow completely unbounded caches + if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { + throw new TypeError('At least one of max, maxSize, or ttl is required'); + } + if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { + const code = 'LRU_CACHE_UNBOUNDED'; + if (shouldWarn(code)) { + warned.add(code); + const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.'; + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); + } + } + } + /** + * Return the remaining TTL time for a given entry key + */ + getRemainingTTL(key) { + return this.#keyMap.has(key) ? Infinity : 0; + } + #initializeTTLTracking() { + const ttls = new ZeroArray(this.#max); + const starts = new ZeroArray(this.#max); + this.#ttls = ttls; + this.#starts = starts; + this.#setItemTTL = (index, ttl, start = perf.now()) => { + starts[index] = ttl !== 0 ? start : 0; + ttls[index] = ttl; + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.#isStale(index)) { + this.delete(this.#keyList[index]); + } + }, ttl + 1); + // unref() not supported on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + }; + this.#updateItemAge = index => { + starts[index] = ttls[index] !== 0 ? perf.now() : 0; + }; + this.#statusTTL = (status, index) => { + if (ttls[index]) { + const ttl = ttls[index]; + const start = starts[index]; + status.ttl = ttl; + status.start = start; + status.now = cachedNow || getNow(); + const age = status.now - start; + status.remainingTTL = ttl - age; + } + }; + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0; + const getNow = () => { + const n = perf.now(); + if (this.ttlResolution > 0) { + cachedNow = n; + const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); + // not available on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + return n; + }; + this.getRemainingTTL = key => { + const index = this.#keyMap.get(key); + if (index === undefined) { + return 0; + } + const ttl = ttls[index]; + const start = starts[index]; + if (ttl === 0 || start === 0) { + return Infinity; + } + const age = (cachedNow || getNow()) - start; + return ttl - age; + }; + this.#isStale = index => { + return (ttls[index] !== 0 && + starts[index] !== 0 && + (cachedNow || getNow()) - starts[index] > ttls[index]); + }; + } + // conditionally set private methods related to TTL + #updateItemAge = () => { }; + #statusTTL = () => { }; + #setItemTTL = () => { }; + /* c8 ignore stop */ + #isStale = () => false; + #initializeSizeTracking() { + const sizes = new ZeroArray(this.#max); + this.#calculatedSize = 0; + this.#sizes = sizes; + this.#removeItemSize = index => { + this.#calculatedSize -= sizes[index]; + sizes[index] = 0; + }; + this.#requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.#isBackgroundFetch(v)) { + return 0; + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function'); + } + size = sizeCalculation(v, k); + if (!isPosInt(size)) { + throw new TypeError('sizeCalculation return invalid (expect positive integer)'); + } + } + else { + throw new TypeError('invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation ' + + 'or size must be set.'); + } + } + return size; + }; + this.#addItemSize = (index, size, status) => { + sizes[index] = size; + if (this.#maxSize) { + const maxSize = this.#maxSize - sizes[index]; + while (this.#calculatedSize > maxSize) { + this.#evict(true); + } + } + this.#calculatedSize += sizes[index]; + if (status) { + status.entrySize = size; + status.totalCalculatedSize = this.#calculatedSize; + } + }; + } + #removeItemSize = _i => { }; + #addItemSize = (_i, _s, _st) => { }; + #requireSize = (_k, _v, size, sizeCalculation) => { + if (size || sizeCalculation) { + throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); + } + return 0; + }; + *#indexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#tail; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#head) { + break; + } + else { + i = this.#prev[i]; + } + } + } + } + *#rindexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#head; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#tail) { + break; + } + else { + i = this.#next[i]; + } + } + } + } + #isValidIndex(index) { + return (index !== undefined && + this.#keyMap.get(this.#keyList[index]) === index); + } + /** + * Return a generator yielding `[key, value]` pairs, + * in order from most recently used to least recently used. + */ + *entries() { + for (const i of this.#indexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Inverse order version of {@link LRUCache.entries} + * + * Return a generator yielding `[key, value]` pairs, + * in order from least recently used to most recently used. + */ + *rentries() { + for (const i of this.#rindexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Return a generator yielding the keys in the cache, + * in order from most recently used to least recently used. + */ + *keys() { + for (const i of this.#indexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Inverse order version of {@link LRUCache.keys} + * + * Return a generator yielding the keys in the cache, + * in order from least recently used to most recently used. + */ + *rkeys() { + for (const i of this.#rindexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Return a generator yielding the values in the cache, + * in order from most recently used to least recently used. + */ + *values() { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Inverse order version of {@link LRUCache.values} + * + * Return a generator yielding the values in the cache, + * in order from least recently used to most recently used. + */ + *rvalues() { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Iterating over the cache itself yields the same results as + * {@link LRUCache.entries} + */ + [Symbol.iterator]() { + return this.entries(); + } + /** + * Find a value for which the supplied fn method returns a truthy value, + * similar to Array.find(). fn is called as fn(value, key, cache). + */ + find(fn, getOptions = {}) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + if (fn(value, this.#keyList[i], this)) { + return this.get(this.#keyList[i], getOptions); + } + } + } + /** + * Call the supplied function on each item in the cache, in order from + * most recently used to least recently used. fn is called as + * fn(value, key, cache). Does not update age or recenty of use. + * Does not iterate over stale values. + */ + forEach(fn, thisp = this) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * The same as {@link LRUCache.forEach} but items are iterated over in + * reverse order. (ie, less recently used items are iterated over first.) + */ + rforEach(fn, thisp = this) { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * Delete any stale entries. Returns true if anything was removed, + * false otherwise. + */ + purgeStale() { + let deleted = false; + for (const i of this.#rindexes({ allowStale: true })) { + if (this.#isStale(i)) { + this.delete(this.#keyList[i]); + deleted = true; + } + } + return deleted; + } + /** + * Return an array of [key, {@link LRUCache.Entry}] tuples which can be + * passed to cache.load() + */ + dump() { + const arr = []; + for (const i of this.#indexes({ allowStale: true })) { + const key = this.#keyList[i]; + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined || key === undefined) + continue; + const entry = { value }; + if (this.#ttls && this.#starts) { + entry.ttl = this.#ttls[i]; + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.#starts[i]; + entry.start = Math.floor(Date.now() - age); + } + if (this.#sizes) { + entry.size = this.#sizes[i]; + } + arr.unshift([key, entry]); + } + return arr; + } + /** + * Reset the cache and load in the items in entries in the order listed. + * Note that the shape of the resulting cache may be different if the + * same options are not used in both caches. + */ + load(arr) { + this.clear(); + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset, so that + // we get the intended remaining TTL, no matter how long it's + // been on ice. + // + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start; + entry.start = perf.now() - age; + } + this.set(key, entry.value, entry); + } + } + /** + * Add a value to the cache. + * + * Note: if `undefined` is specified as a value, this is an alias for + * {@link LRUCache#delete} + */ + set(k, v, setOptions = {}) { + if (v === undefined) { + this.delete(k); + return this; + } + const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; + let { noUpdateTTL = this.noUpdateTTL } = setOptions; + const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss'; + status.maxEntrySizeExceeded = true; + } + // have to delete, in case something is there already. + this.delete(k); + return this; + } + let index = this.#size === 0 ? undefined : this.#keyMap.get(k); + if (index === undefined) { + // addition + index = (this.#size === 0 + ? this.#tail + : this.#free.length !== 0 + ? this.#free.pop() + : this.#size === this.#max + ? this.#evict(false) + : this.#size); + this.#keyList[index] = k; + this.#valList[index] = v; + this.#keyMap.set(k, index); + this.#next[this.#tail] = index; + this.#prev[index] = this.#tail; + this.#tail = index; + this.#size++; + this.#addItemSize(index, size, status); + if (status) + status.set = 'add'; + noUpdateTTL = false; + } + else { + // update + this.#moveToTail(index); + const oldVal = this.#valList[index]; + if (v !== oldVal) { + if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')); + const { __staleWhileFetching: s } = oldVal; + if (s !== undefined && !noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(s, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([s, k, 'set']); + } + } + } + else if (!noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(oldVal, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([oldVal, k, 'set']); + } + } + this.#removeItemSize(index); + this.#addItemSize(index, size, status); + this.#valList[index] = v; + if (status) { + status.set = 'replace'; + const oldValue = oldVal && this.#isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal; + if (oldValue !== undefined) + status.oldValue = oldValue; + } + } + else if (status) { + status.set = 'update'; + } + } + if (ttl !== 0 && !this.#ttls) { + this.#initializeTTLTracking(); + } + if (this.#ttls) { + if (!noUpdateTTL) { + this.#setItemTTL(index, ttl, start); + } + if (status) + this.#statusTTL(status, index); + } + if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return this; + } + /** + * Evict the least recently used item, returning its value or + * `undefined` if cache is empty. + */ + pop() { + try { + while (this.#size) { + const val = this.#valList[this.#head]; + this.#evict(true); + if (this.#isBackgroundFetch(val)) { + if (val.__staleWhileFetching) { + return val.__staleWhileFetching; + } + } + else if (val !== undefined) { + return val; + } + } + } + finally { + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } + } + #evict(free) { + const head = this.#head; + const k = this.#keyList[head]; + const v = this.#valList[head]; + if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'evict'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'evict']); + } + } + this.#removeItemSize(head); + // if we aren't about to use the index, then null these out + if (free) { + this.#keyList[head] = undefined; + this.#valList[head] = undefined; + this.#free.push(head); + } + if (this.#size === 1) { + this.#head = this.#tail = 0; + this.#free.length = 0; + } + else { + this.#head = this.#next[head]; + } + this.#keyMap.delete(k); + this.#size--; + return head; + } + /** + * Check if a key is in the cache, without updating the recency of use. + * Will return false if the item is stale, even though it is technically + * in the cache. + * + * Will not update item age unless + * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. + */ + has(k, hasOptions = {}) { + const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v) && + v.__staleWhileFetching === undefined) { + return false; + } + if (!this.#isStale(index)) { + if (updateAgeOnHas) { + this.#updateItemAge(index); + } + if (status) { + status.has = 'hit'; + this.#statusTTL(status, index); + } + return true; + } + else if (status) { + status.has = 'stale'; + this.#statusTTL(status, index); + } + } + else if (status) { + status.has = 'miss'; + } + return false; + } + /** + * Like {@link LRUCache#get} but doesn't update recency or delete stale + * items. + * + * Returns `undefined` if the item is stale, unless + * {@link LRUCache.OptionsBase.allowStale} is set. + */ + peek(k, peekOptions = {}) { + const { allowStale = this.allowStale } = peekOptions; + const index = this.#keyMap.get(k); + if (index !== undefined && + (allowStale || !this.#isStale(index))) { + const v = this.#valList[index]; + // either stale and allowed, or forcing a refresh of non-stale value + return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + } + } + #backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + return v; + } + const ac = new AC(); + const { signal } = options; + // when/if our AC signals, then stop listening to theirs. + signal?.addEventListener('abort', () => ac.abort(signal.reason), { + signal: ac.signal, + }); + const fetchOpts = { + signal: ac.signal, + options, + context, + }; + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal; + const ignoreAbort = options.ignoreFetchAbort && v !== undefined; + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true; + options.status.fetchError = ac.signal.reason; + if (ignoreAbort) + options.status.fetchAbortIgnored = true; + } + else { + options.status.fetchResolved = true; + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason); + } + // either we didn't abort, and are still here, or we did, and ignored + const bf = p; + if (this.#valList[index] === p) { + if (v === undefined) { + if (bf.__staleWhileFetching) { + this.#valList[index] = bf.__staleWhileFetching; + } + else { + this.delete(k); + } + } + else { + if (options.status) + options.status.fetchUpdated = true; + this.set(k, v, fetchOpts.options); + } + } + return v; + }; + const eb = (er) => { + if (options.status) { + options.status.fetchRejected = true; + options.status.fetchError = er; + } + return fetchFail(er); + }; + const fetchFail = (er) => { + const { aborted } = ac.signal; + const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; + const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; + const noDelete = allowStale || options.noDeleteOnFetchRejection; + const bf = p; + if (this.#valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || bf.__staleWhileFetching === undefined; + if (del) { + this.delete(k); + } + else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.#valList[index] = bf.__staleWhileFetching; + } + } + if (allowStale) { + if (options.status && bf.__staleWhileFetching !== undefined) { + options.status.returnedStale = true; + } + return bf.__staleWhileFetching; + } + else if (bf.__returned === bf) { + throw er; + } + }; + const pcall = (res, rej) => { + const fmp = this.#fetchMethod?.(k, v, fetchOpts); + if (fmp && fmp instanceof Promise) { + fmp.then(v => res(v === undefined ? undefined : v), rej); + } + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if (!options.ignoreFetchAbort || + options.allowStaleOnFetchAbort) { + res(undefined); + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true); + } + } + }); + }; + if (options.status) + options.status.fetchDispatched = true; + const p = new Promise(pcall).then(cb, eb); + const bf = Object.assign(p, { + __abortController: ac, + __staleWhileFetching: v, + __returned: undefined, + }); + if (index === undefined) { + // internal, don't expose status. + this.set(k, bf, { ...fetchOpts.options, status: undefined }); + index = this.#keyMap.get(k); + } + else { + this.#valList[index] = bf; + } + return bf; + } + #isBackgroundFetch(p) { + if (!this.#hasFetchMethod) + return false; + const b = p; + return (!!b && + b instanceof Promise && + b.hasOwnProperty('__staleWhileFetching') && + b.__abortController instanceof AC); + } + async fetch(k, fetchOptions = {}) { + const { + // get options + allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; + if (!this.#hasFetchMethod) { + if (status) + status.fetch = 'get'; + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }); + } + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + }; + let index = this.#keyMap.get(k); + if (index === undefined) { + if (status) + status.fetch = 'miss'; + const p = this.#backgroundFetch(k, index, options, context); + return (p.__returned = p); + } + else { + // in cache, maybe already fetching + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + const stale = allowStale && v.__staleWhileFetching !== undefined; + if (status) { + status.fetch = 'inflight'; + if (stale) + status.returnedStale = true; + } + return stale ? v.__staleWhileFetching : (v.__returned = v); + } + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.#isStale(index); + if (!forceRefresh && !isStale) { + if (status) + status.fetch = 'hit'; + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + if (status) + this.#statusTTL(status, index); + return v; + } + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.#backgroundFetch(k, index, options, context); + const hasStale = p.__staleWhileFetching !== undefined; + const staleVal = hasStale && allowStale; + if (status) { + status.fetch = isStale ? 'stale' : 'refresh'; + if (staleVal && isStale) + status.returnedStale = true; + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p); + } + } + /** + * Return a value from the cache. Will update the recency of the cache + * entry found. + * + * If the key is not found, get() will return `undefined`. + */ + get(k, getOptions = {}) { + const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const value = this.#valList[index]; + const fetching = this.#isBackgroundFetch(value); + if (status) + this.#statusTTL(status, index); + if (this.#isStale(index)) { + if (status) + status.get = 'stale'; + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k); + } + if (status && allowStale) + status.returnedStale = true; + return allowStale ? value : undefined; + } + else { + if (status && + allowStale && + value.__staleWhileFetching !== undefined) { + status.returnedStale = true; + } + return allowStale ? value.__staleWhileFetching : undefined; + } + } + else { + if (status) + status.get = 'hit'; + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching; + } + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + return value; + } + } + else if (status) { + status.get = 'miss'; + } + } + #connect(p, n) { + this.#prev[n] = p; + this.#next[p] = n; + } + #moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.#tail) { + if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#connect(this.#prev[index], this.#next[index]); + } + this.#connect(this.#tail, index); + this.#tail = index; + } + } + /** + * Deletes a key out of the cache. + * Returns true if the key was deleted, false otherwise. + */ + delete(k) { + let deleted = false; + if (this.#size !== 0) { + const index = this.#keyMap.get(k); + if (index !== undefined) { + deleted = true; + if (this.#size === 1) { + this.clear(); + } + else { + this.#removeItemSize(index); + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + this.#keyMap.delete(k); + this.#keyList[index] = undefined; + this.#valList[index] = undefined; + if (index === this.#tail) { + this.#tail = this.#prev[index]; + } + else if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#next[this.#prev[index]] = this.#next[index]; + this.#prev[this.#next[index]] = this.#prev[index]; + } + this.#size--; + this.#free.push(index); + } + } + } + if (this.#hasDisposeAfter && this.#disposed?.length) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return deleted; + } + /** + * Clear the cache entirely, throwing away all values. + */ + clear() { + for (const index of this.#rindexes({ allowStale: true })) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else { + const k = this.#keyList[index]; + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + } + this.#keyMap.clear(); + this.#valList.fill(undefined); + this.#keyList.fill(undefined); + if (this.#ttls && this.#starts) { + this.#ttls.fill(0); + this.#starts.fill(0); + } + if (this.#sizes) { + this.#sizes.fill(0); + } + this.#head = 0; + this.#tail = 0; + this.#free.length = 0; + this.#calculatedSize = 0; + this.#size = 0; + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.min.js b/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.min.js new file mode 100644 index 0000000000000..5a16b3940d6df --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.min.js @@ -0,0 +1,2 @@ +var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var R=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=C.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,R))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#W=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#R(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#W=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#R(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#W(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#W(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#R(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#R(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};export{R as LRUCache}; +//# sourceMappingURL=index.min.js.map diff --git a/node_modules/cacache/node_modules/lru-cache/dist/mjs/package.json b/node_modules/cacache/node_modules/lru-cache/dist/mjs/package.json new file mode 100644 index 0000000000000..3dbc1ca591c05 --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/dist/mjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/cacache/node_modules/lru-cache/package.json b/node_modules/cacache/node_modules/lru-cache/package.json new file mode 100644 index 0000000000000..bae4a04839d1f --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/package.json @@ -0,0 +1,108 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "10.0.1", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "sideEffects": false, + "scripts": { + "build": "npm run prepare", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", + "postprepare": "bash fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write .", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts", + "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh", + "prebenchmark": "npm run prepare", + "benchmark": "make -C benchmark", + "preprofile": "npm run prepare", + "profile": "make -C benchmark profile" + }, + "main": "./dist/cjs/index.js", + "module": "./dist/mjs/index.js", + "exports": { + "./min": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.min.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.min.js" + } + }, + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + } + }, + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "@size-limit/preset-small-lib": "^7.0.8", + "@types/node": "^20.2.5", + "@types/tap": "^15.0.6", + "benchmark": "^2.1.4", + "c8": "^7.11.2", + "clock-mock": "^1.0.6", + "esbuild": "^0.17.11", + "eslint-config-prettier": "^8.5.0", + "marked": "^4.2.12", + "mkdirp": "^2.1.5", + "prettier": "^2.6.2", + "size-limit": "^7.0.8", + "tap": "^16.3.4", + "ts-node": "^10.9.1", + "tslib": "^2.4.0", + "typedoc": "^0.24.6", + "typescript": "^5.0.4" + }, + "license": "ISC", + "files": [ + "dist" + ], + "engines": { + "node": "14 || >=16.14" + }, + "prettier": { + "semi": false, + "printWidth": 70, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "tap": { + "coverage": false, + "node-arg": [ + "--expose-gc", + "-r", + "ts-node/register" + ], + "ts": false + }, + "size-limit": [ + { + "path": "./dist/mjs/index.js" + } + ] +} diff --git a/node_modules/cacache/package.json b/node_modules/cacache/package.json index ab58cb8b7c50f..1b14bf4bd1490 100644 --- a/node_modules/cacache/package.json +++ b/node_modules/cacache/package.json @@ -1,6 +1,6 @@ { "name": "cacache", - "version": "17.1.4", + "version": "18.0.0", "cache-version": { "content": "2", "index": "5" @@ -48,7 +48,7 @@ "@npmcli/fs": "^3.1.0", "fs-minipass": "^3.0.0", "glob": "^10.2.2", - "lru-cache": "^7.7.1", + "lru-cache": "^10.0.1", "minipass": "^7.0.3", "minipass-collect": "^1.0.2", "minipass-flush": "^1.0.5", @@ -64,13 +64,19 @@ "tap": "^16.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "windowsCI": false, "version": "4.18.0", - "publish": "true" + "publish": "true", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] }, "author": "GitHub Inc.", "tap": { diff --git a/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md b/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md new file mode 100644 index 0000000000000..8d28acf866d93 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js new file mode 100644 index 0000000000000..ad5a76a4f73f2 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js @@ -0,0 +1,29 @@ +'use strict' + +const contentVer = require('../../package.json')['cache-version'].content +const hashToSegments = require('../util/hash-to-segments') +const path = require('path') +const ssri = require('ssri') + +// Current format of content file path: +// +// sha512-BaSE64Hex= -> +// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee +// +module.exports = contentPath + +function contentPath (cache, integrity) { + const sri = ssri.parse(integrity, { single: true }) + // contentPath is the *strongest* algo given + return path.join( + contentDir(cache), + sri.algorithm, + ...hashToSegments(sri.hexDigest()) + ) +} + +module.exports.contentDir = contentDir + +function contentDir (cache) { + return path.join(cache, `content-v${contentVer}`) +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js new file mode 100644 index 0000000000000..f41b539df65dc --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js @@ -0,0 +1,166 @@ +'use strict' + +const fs = require('fs/promises') +const fsm = require('fs-minipass') +const ssri = require('ssri') +const contentPath = require('./path') +const Pipeline = require('minipass-pipeline') + +module.exports = read + +const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 +async function read (cache, integrity, opts = {}) { + const { size } = opts + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = await fs.stat(cpath) + return { stat, cpath, sri } + }) + if (typeof size === 'number' && stat.size !== size) { + throw sizeError(size, stat.size) + } + + if (stat.size > MAX_SINGLE_READ_SIZE) { + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } + + const data = await fs.readFile(cpath, { encoding: null }) + if (!ssri.checkData(data, sri)) { + throw integrityError(sri, cpath) + } + + return data +} + +const readPipeline = (cpath, size, sri, stream) => { + stream.push( + new fsm.ReadStream(cpath, { + size, + readSize: MAX_SINGLE_READ_SIZE, + }), + ssri.integrityStream({ + integrity: sri, + size, + }) + ) + return stream +} + +module.exports.stream = readStream +module.exports.readStream = readStream + +function readStream (cache, integrity, opts = {}) { + const { size } = opts + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // just stat to ensure it exists + const stat = await fs.stat(cpath) + return { stat, cpath, sri } + }) + if (typeof size === 'number' && size !== stat.size) { + return stream.emit('error', sizeError(size, stat.size)) + } + + return readPipeline(cpath, stat.size, sri, stream) + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.copy = copy + +function copy (cache, integrity, dest) { + return withContentSri(cache, integrity, (cpath, sri) => { + return fs.copyFile(cpath, dest) + }) +} + +module.exports.hasContent = hasContent + +async function hasContent (cache, integrity) { + if (!integrity) { + return false + } + + try { + return await withContentSri(cache, integrity, async (cpath, sri) => { + const stat = await fs.stat(cpath) + return { size: stat.size, sri, stat } + }) + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + } +} + +async function withContentSri (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + const results = await Promise.all(digests.map(async (meta) => { + try { + return await withContentSri(cache, meta, fn) + } catch (err) { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + } + })) + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) { + return result + } + + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) { + throw enoentError + } + + // Throw generic error + throw results.find((r) => r instanceof Error) + } +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function integrityError (sri, path) { + const err = new Error(`Integrity verification failed for ${sri} (${path})`) + err.code = 'EINTEGRITY' + err.sri = sri + err.path = path + return err +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js new file mode 100644 index 0000000000000..ce58d679e4cb2 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js @@ -0,0 +1,18 @@ +'use strict' + +const fs = require('fs/promises') +const contentPath = require('./path') +const { hasContent } = require('./read') + +module.exports = rm + +async function rm (cache, integrity) { + const content = await hasContent(cache, integrity) + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) { + await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) + return true + } else { + return false + } +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js new file mode 100644 index 0000000000000..7146146581287 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js @@ -0,0 +1,205 @@ +'use strict' + +const events = require('events') + +const contentPath = require('./path') +const fs = require('fs/promises') +const { moveFile } = require('@npmcli/fs') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') +const Flush = require('minipass-flush') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') +const fsm = require('fs-minipass') + +module.exports = write + +// Cache of move operations in process so we don't duplicate +const moveOperations = new Map() + +async function write (cache, data, opts = {}) { + const { algorithms, size, integrity } = opts + + if (typeof size === 'number' && data.length !== size) { + throw sizeError(size, data.length) + } + + const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) + if (integrity && !ssri.checkData(data, integrity, opts)) { + throw checksumError(integrity, sri) + } + + for (const algo in sri) { + const tmp = await makeTmp(cache, opts) + const hash = sri[algo].toString() + try { + await fs.writeFile(tmp.target, data, { flag: 'wx' }) + await moveToDestination(tmp, cache, hash, opts) + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } + } + return { integrity: sri, size: data.length } +} + +module.exports.stream = writeStream + +// writes proxied to the 'inputStream' that is passed to the Promise +// 'end' is deferred until content is handled. +class CacacheWriteStream extends Flush { + constructor (cache, opts) { + super() + this.opts = opts + this.cache = cache + this.inputStream = new Minipass() + this.inputStream.on('error', er => this.emit('error', er)) + this.inputStream.on('drain', () => this.emit('drain')) + this.handleContentP = null + } + + write (chunk, encoding, cb) { + if (!this.handleContentP) { + this.handleContentP = handleContent( + this.inputStream, + this.cache, + this.opts + ) + } + return this.inputStream.write(chunk, encoding, cb) + } + + flush (cb) { + this.inputStream.end(() => { + if (!this.handleContentP) { + const e = new Error('Cache input stream was empty') + e.code = 'ENODATA' + // empty streams are probably emitting end right away. + // defer this one tick by rejecting a promise on it. + return Promise.reject(e).catch(cb) + } + // eslint-disable-next-line promise/catch-or-return + this.handleContentP.then( + (res) => { + res.integrity && this.emit('integrity', res.integrity) + // eslint-disable-next-line promise/always-return + res.size !== null && this.emit('size', res.size) + cb() + }, + (er) => cb(er) + ) + }) + } +} + +function writeStream (cache, opts = {}) { + return new CacacheWriteStream(cache, opts) +} + +async function handleContent (inputStream, cache, opts) { + const tmp = await makeTmp(cache, opts) + try { + const res = await pipeToTmp(inputStream, cache, tmp.target, opts) + await moveToDestination( + tmp, + cache, + res.integrity, + opts + ) + return res + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } +} + +async function pipeToTmp (inputStream, cache, tmpTarget, opts) { + const outStream = new fsm.WriteStream(tmpTarget, { + flags: 'wx', + }) + + if (opts.integrityEmitter) { + // we need to create these all simultaneously since they can fire in any order + const [integrity, size] = await Promise.all([ + events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), + events.once(opts.integrityEmitter, 'size').then(res => res[0]), + new Pipeline(inputStream, outStream).promise(), + ]) + return { integrity, size } + } + + let integrity + let size + const hashStream = ssri.integrityStream({ + integrity: opts.integrity, + algorithms: opts.algorithms, + size: opts.size, + }) + hashStream.on('integrity', i => { + integrity = i + }) + hashStream.on('size', s => { + size = s + }) + + const pipeline = new Pipeline(inputStream, hashStream, outStream) + await pipeline.promise() + return { integrity, size } +} + +async function makeTmp (cache, opts) { + const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) + return { + target: tmpTarget, + moved: false, + } +} + +async function moveToDestination (tmp, cache, sri, opts) { + const destination = contentPath(cache, sri) + const destDir = path.dirname(destination) + if (moveOperations.has(destination)) { + return moveOperations.get(destination) + } + moveOperations.set( + destination, + fs.mkdir(destDir, { recursive: true }) + .then(async () => { + await moveFile(tmp.target, destination, { overwrite: false }) + tmp.moved = true + return tmp.moved + }) + .catch(err => { + if (!err.message.startsWith('The destination file exists')) { + throw Object.assign(err, { code: 'EEXIST' }) + } + }).finally(() => { + moveOperations.delete(destination) + }) + + ) + return moveOperations.get(destination) +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function checksumError (expected, found) { + const err = new Error(`Integrity check failed: + Wanted: ${expected} + Found: ${found}`) + err.code = 'EINTEGRITY' + err.expected = expected + err.found = found + return err +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js new file mode 100644 index 0000000000000..722a37af5ce15 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js @@ -0,0 +1,330 @@ +'use strict' + +const crypto = require('crypto') +const { + appendFile, + mkdir, + readFile, + readdir, + rm, + writeFile, +} = require('fs/promises') +const { Minipass } = require('minipass') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') + +const contentPath = require('./content/path') +const hashToSegments = require('./util/hash-to-segments') +const indexV = require('../package.json')['cache-version'].index +const { moveFile } = require('@npmcli/fs') + +module.exports.NotFoundError = class NotFoundError extends Error { + constructor (cache, key) { + super(`No cache entry for ${key} found in ${cache}`) + this.code = 'ENOENT' + this.cache = cache + this.key = key + } +} + +module.exports.compact = compact + +async function compact (cache, key, matchFn, opts = {}) { + const bucket = bucketPath(cache, key) + const entries = await bucketEntries(bucket) + const newEntries = [] + // we loop backwards because the bottom-most result is the newest + // since we add new entries with appendFile + for (let i = entries.length - 1; i >= 0; --i) { + const entry = entries[i] + // a null integrity could mean either a delete was appended + // or the user has simply stored an index that does not map + // to any content. we determine if the user wants to keep the + // null integrity based on the validateEntry function passed in options. + // if the integrity is null and no validateEntry is provided, we break + // as we consider the null integrity to be a deletion of everything + // that came before it. + if (entry.integrity === null && !opts.validateEntry) { + break + } + + // if this entry is valid, and it is either the first entry or + // the newEntries array doesn't already include an entry that + // matches this one based on the provided matchFn, then we add + // it to the beginning of our list + if ((!opts.validateEntry || opts.validateEntry(entry) === true) && + (newEntries.length === 0 || + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { + newEntries.unshift(entry) + } + } + + const newIndex = '\n' + newEntries.map((entry) => { + const stringified = JSON.stringify(entry) + const hash = hashEntry(stringified) + return `${hash}\t${stringified}` + }).join('\n') + + const setup = async () => { + const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await mkdir(path.dirname(target), { recursive: true }) + return { + target, + moved: false, + } + } + + const teardown = async (tmp) => { + if (!tmp.moved) { + return rm(tmp.target, { recursive: true, force: true }) + } + } + + const write = async (tmp) => { + await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await mkdir(path.dirname(bucket), { recursive: true }) + // we use @npmcli/move-file directly here because we + // want to overwrite the existing file + await moveFile(tmp.target, bucket) + tmp.moved = true + } + + // write the file atomically + const tmp = await setup() + try { + await write(tmp) + } finally { + await teardown(tmp) + } + + // we reverse the list we generated such that the newest + // entries come first in order to make looping through them easier + // the true passed to formatEntry tells it to keep null + // integrity values, if they made it this far it's because + // validateEntry returned true, and as such we should return it + return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) +} + +module.exports.insert = insert + +async function insert (cache, key, integrity, opts = {}) { + const { metadata, size, time } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: time || Date.now(), + size, + metadata, + } + try { + await mkdir(path.dirname(bucket), { recursive: true }) + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + + throw err + } + return formatEntry(cache, entry) +} + +module.exports.find = find + +async function find (cache, key) { + const bucket = bucketPath(cache, key) + try { + const entries = await bucketEntries(bucket) + return entries.reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } +} + +module.exports.delete = del + +function del (cache, key, opts = {}) { + if (!opts.removeFully) { + return insert(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rm(bucket, { recursive: true, force: true }) +} + +module.exports.lsStream = lsStream + +function lsStream (cache) { + const indexDir = bucketDir(cache) + const stream = new Minipass({ objectMode: true }) + + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const buckets = await readdirOrEmpty(indexDir) + await Promise.all(buckets.map(async (bucket) => { + const bucketPath = path.join(indexDir, bucket) + const subbuckets = await readdirOrEmpty(bucketPath) + await Promise.all(subbuckets.map(async (subbucket) => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + const subbucketEntries = await readdirOrEmpty(subbucketPath) + await Promise.all(subbucketEntries.map(async (entry) => { + const entryPath = path.join(subbucketPath, entry) + try { + const entries = await bucketEntries(entryPath) + // using a Map here prevents duplicate keys from showing up + // twice, I guess? + const reduced = entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) { + stream.write(formatted) + } + } + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + throw err + } + })) + })) + })) + stream.end() + return stream + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.ls = ls + +async function ls (cache) { + const entries = await lsStream(cache).collect() + return entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) +} + +module.exports.bucketEntries = bucketEntries + +async function bucketEntries (bucket, filter) { + const data = await readFile(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +function _bucketEntries (data, filter) { + const entries = [] + data.split('\n').forEach((entry) => { + if (!entry) { + return + } + + const pieces = entry.split('\t') + if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { + // Hash is no good! Corruption or malice? Doesn't matter! + // EJECT EJECT + return + } + let obj + try { + obj = JSON.parse(pieces[1]) + } catch (_) { + // eslint-ignore-next-line no-empty-block + } + // coverage disabled here, no need to test with an entry that parses to something falsey + // istanbul ignore else + if (obj) { + entries.push(obj) + } + }) + return entries +} + +module.exports.bucketDir = bucketDir + +function bucketDir (cache) { + return path.join(cache, `index-v${indexV}`) +} + +module.exports.bucketPath = bucketPath + +function bucketPath (cache, key) { + const hashed = hashKey(key) + return path.join.apply( + path, + [bucketDir(cache)].concat(hashToSegments(hashed)) + ) +} + +module.exports.hashKey = hashKey + +function hashKey (key) { + return hash(key, 'sha256') +} + +module.exports.hashEntry = hashEntry + +function hashEntry (str) { + return hash(str, 'sha1') +} + +function hash (str, digest) { + return crypto + .createHash(digest) + .update(str) + .digest('hex') +} + +function formatEntry (cache, entry, keepAll) { + // Treat null digests as deletions. They'll shadow any previous entries. + if (!entry.integrity && !keepAll) { + return null + } + + return { + key: entry.key, + integrity: entry.integrity, + path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, + size: entry.size, + time: entry.time, + metadata: entry.metadata, + } +} + +function readdirOrEmpty (dir) { + return readdir(dir).catch((err) => { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + return [] + } + + throw err + }) +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js new file mode 100644 index 0000000000000..80ec206c7ecaa --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js @@ -0,0 +1,170 @@ +'use strict' + +const Collect = require('minipass-collect') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') + +const index = require('./entry-index') +const memo = require('./memoization') +const read = require('./content/read') + +async function getData (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } + } + + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = await read(cache, entry.integrity, { integrity, size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} +module.exports = getData + +async function getDataByDigest (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, key, opts) + if (memoized && memoize !== false) { + return memoized + } + + const res = await read(cache, key, { integrity, size }) + if (memoize) { + memo.put.byDigest(cache, key, res, opts) + } + return res +} +module.exports.byDigest = getDataByDigest + +const getMemoizedStream = (memoized) => { + const stream = new Minipass() + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(memoized.entry.metadata) + ev === 'integrity' && cb(memoized.entry.integrity) + ev === 'size' && cb(memoized.entry.size) + }) + stream.end(memoized.data) + return stream +} + +function getStream (cache, key, opts = {}) { + const { memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return getMemoizedStream(memoized) + } + + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const entry = await index.find(cache, key) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) + }) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + return stream + }).catch((err) => stream.emit('error', err)) + + return stream +} + +module.exports.stream = getStream + +function getStreamDigest (cache, integrity, opts = {}) { + const { memoize } = opts + const memoized = memo.get.byDigest(cache, integrity, opts) + if (memoized && memoize !== false) { + const stream = new Minipass() + stream.end(memoized) + return stream + } else { + const stream = read.readStream(cache, integrity, opts) + if (!memoize) { + return stream + } + + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put.byDigest( + cache, + integrity, + data, + opts + )) + return new Pipeline(stream, memoStream) + } +} + +module.exports.stream.byDigest = getStreamDigest + +function info (cache, key, opts = {}) { + const { memoize } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized.entry) + } else { + return index.find(cache, key) + } +} +module.exports.info = info + +async function copy (cache, key, dest, opts = {}) { + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + await read.copy(cache, entry.integrity, dest, opts) + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} + +module.exports.copy = copy + +async function copyByDigest (cache, key, dest, opts = {}) { + await read.copy(cache, key, dest, opts) + return key +} + +module.exports.copy.byDigest = copyByDigest + +module.exports.hasContent = read.hasContent diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js new file mode 100644 index 0000000000000..c9b0da5f3a271 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js @@ -0,0 +1,42 @@ +'use strict' + +const get = require('./get.js') +const put = require('./put.js') +const rm = require('./rm.js') +const verify = require('./verify.js') +const { clearMemoized } = require('./memoization.js') +const tmp = require('./util/tmp.js') +const index = require('./entry-index.js') + +module.exports.index = {} +module.exports.index.compact = index.compact +module.exports.index.insert = index.insert + +module.exports.ls = index.ls +module.exports.ls.stream = index.lsStream + +module.exports.get = get +module.exports.get.byDigest = get.byDigest +module.exports.get.stream = get.stream +module.exports.get.stream.byDigest = get.stream.byDigest +module.exports.get.copy = get.copy +module.exports.get.copy.byDigest = get.copy.byDigest +module.exports.get.info = get.info +module.exports.get.hasContent = get.hasContent + +module.exports.put = put +module.exports.put.stream = put.stream + +module.exports.rm = rm.entry +module.exports.rm.all = rm.all +module.exports.rm.entry = module.exports.rm +module.exports.rm.content = rm.content + +module.exports.clearMemoized = clearMemoized + +module.exports.tmp = {} +module.exports.tmp.mkdir = tmp.mkdir +module.exports.tmp.withTmp = tmp.withTmp + +module.exports.verify = verify +module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js new file mode 100644 index 0000000000000..0ff604a479c9c --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js @@ -0,0 +1,72 @@ +'use strict' + +const LRU = require('lru-cache') + +const MEMOIZED = new LRU({ + max: 500, + maxSize: 50 * 1024 * 1024, // 50MB + ttl: 3 * 60 * 1000, // 3 minutes + sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, +}) + +module.exports.clearMemoized = clearMemoized + +function clearMemoized () { + const old = {} + MEMOIZED.forEach((v, k) => { + old[k] = v + }) + MEMOIZED.clear() + return old +} + +module.exports.put = put + +function put (cache, entry, data, opts) { + pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) + putDigest(cache, entry.integrity, data, opts) +} + +module.exports.put.byDigest = putDigest + +function putDigest (cache, integrity, data, opts) { + pickMem(opts).set(`digest:${cache}:${integrity}`, data) +} + +module.exports.get = get + +function get (cache, key, opts) { + return pickMem(opts).get(`key:${cache}:${key}`) +} + +module.exports.get.byDigest = getDigest + +function getDigest (cache, integrity, opts) { + return pickMem(opts).get(`digest:${cache}:${integrity}`) +} + +class ObjProxy { + constructor (obj) { + this.obj = obj + } + + get (key) { + return this.obj[key] + } + + set (key, val) { + this.obj[key] = val + } +} + +function pickMem (opts) { + if (!opts || !opts.memoize) { + return MEMOIZED + } else if (opts.memoize.get && opts.memoize.set) { + return opts.memoize + } else if (typeof opts.memoize === 'object') { + return new ObjProxy(opts.memoize) + } else { + return MEMOIZED + } +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js new file mode 100644 index 0000000000000..9fc932d5f6dec --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js @@ -0,0 +1,80 @@ +'use strict' + +const index = require('./entry-index') +const memo = require('./memoization') +const write = require('./content/write') +const Flush = require('minipass-flush') +const { PassThrough } = require('minipass-collect') +const Pipeline = require('minipass-pipeline') + +const putOpts = (opts) => ({ + algorithms: ['sha512'], + ...opts, +}) + +module.exports = putData + +async function putData (cache, key, data, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + const res = await write(cache, data, opts) + const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return res.integrity +} + +module.exports.stream = putStream + +function putStream (cache, key, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + let integrity + let size + let error + + let memoData + const pipeline = new Pipeline() + // first item in the pipeline is the memoizer, because we need + // that to end first and get the collected data. + if (memoize) { + const memoizer = new PassThrough().on('collect', data => { + memoData = data + }) + pipeline.push(memoizer) + } + + // contentStream is a write-only, not a passthrough + // no data comes out of it. + const contentStream = write.stream(cache, opts) + .on('integrity', (int) => { + integrity = int + }) + .on('size', (s) => { + size = s + }) + .on('error', (err) => { + error = err + }) + + pipeline.push(contentStream) + + // last but not least, we write the index and emit hash and size, + // and memoize if we're doing that + pipeline.push(new Flush({ + async flush () { + if (!error) { + const entry = await index.insert(cache, key, integrity, { ...opts, size }) + if (memoize && memoData) { + memo.put(cache, entry, memoData, opts) + } + pipeline.emit('integrity', integrity) + pipeline.emit('size', size) + } + }, + })) + + return pipeline +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js new file mode 100644 index 0000000000000..a94760c7cf243 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js @@ -0,0 +1,31 @@ +'use strict' + +const { rm } = require('fs/promises') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const memo = require('./memoization') +const path = require('path') +const rmContent = require('./content/rm') + +module.exports = entry +module.exports.entry = entry + +function entry (cache, key, opts) { + memo.clearMemoized() + return index.delete(cache, key, opts) +} + +module.exports.content = content + +function content (cache, integrity) { + memo.clearMemoized() + return rmContent(cache, integrity) +} + +module.exports.all = all + +async function all (cache) { + memo.clearMemoized() + const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) + return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/glob.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/glob.js new file mode 100644 index 0000000000000..8500c1c16a429 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/glob.js @@ -0,0 +1,7 @@ +'use strict' + +const { glob } = require('glob') +const path = require('path') + +const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) +module.exports = (path, options) => glob(globify(path), options) diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js new file mode 100644 index 0000000000000..445599b503808 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = hashToSegments + +function hashToSegments (hash) { + return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js new file mode 100644 index 0000000000000..0bf5302136ebe --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js @@ -0,0 +1,26 @@ +'use strict' + +const { withTempDir } = require('@npmcli/fs') +const fs = require('fs/promises') +const path = require('path') + +module.exports.mkdir = mktmpdir + +async function mktmpdir (cache, opts = {}) { + const { tmpPrefix } = opts + const tmpDir = path.join(cache, 'tmp') + await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) +} + +module.exports.withTmp = withTmp + +function withTmp (cache, opts, cb) { + if (!cb) { + cb = opts + opts = {} + } + return withTempDir(path.join(cache, 'tmp'), cb, opts) +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js new file mode 100644 index 0000000000000..62e85c946490f --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js @@ -0,0 +1,257 @@ +'use strict' + +const { + mkdir, + readFile, + rm, + stat, + truncate, + writeFile, +} = require('fs/promises') +const pMap = require('p-map') +const contentPath = require('./content/path') +const fsm = require('fs-minipass') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const path = require('path') +const ssri = require('ssri') + +const hasOwnProperty = (obj, key) => + Object.prototype.hasOwnProperty.call(obj, key) + +const verifyOpts = (opts) => ({ + concurrency: 20, + log: { silly () {} }, + ...opts, +}) + +module.exports = verify + +async function verify (cache, opts) { + opts = verifyOpts(opts) + opts.log.silly('verify', 'verifying cache at', cache) + + const steps = [ + markStartTime, + fixPerms, + garbageCollect, + rebuildIndex, + cleanTmp, + writeVerifile, + markEndTime, + ] + + const stats = {} + for (const step of steps) { + const label = step.name + const start = new Date() + const s = await step(cache, opts) + if (s) { + Object.keys(s).forEach((k) => { + stats[k] = s[k] + }) + } + const end = new Date() + if (!stats.runTime) { + stats.runTime = {} + } + stats.runTime[label] = end - start + } + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats +} + +async function markStartTime (cache, opts) { + return { startTime: new Date() } +} + +async function markEndTime (cache, opts) { + return { endTime: new Date() } +} + +async function fixPerms (cache, opts) { + opts.log.silly('verify', 'fixing cache permissions') + await mkdir(cache, { recursive: true }) + return null +} + +// Implements a naive mark-and-sweep tracing garbage collector. +// +// The algorithm is basically as follows: +// 1. Read (and filter) all index entries ("pointers") +// 2. Mark each integrity value as "live" +// 3. Read entire filesystem tree in `content-vX/` dir +// 4. If content is live, verify its checksum and delete it if it fails +// 5. If content is not marked as live, rm it. +// +async function garbageCollect (cache, opts) { + opts.log.silly('verify', 'garbage collecting content') + const indexStream = index.lsStream(cache) + const liveContent = new Set() + indexStream.on('data', (entry) => { + if (opts.filter && !opts.filter(entry)) { + return + } + + // integrity is stringified, re-parse it so we can get each hash + const integrity = ssri.parse(entry.integrity) + for (const algo in integrity) { + liveContent.add(integrity[algo].toString()) + } + }) + await new Promise((resolve, reject) => { + indexStream.on('end', resolve).on('error', reject) + }) + const contentDir = contentPath.contentDir(cache) + const files = await glob(path.join(contentDir, '**'), { + follow: false, + nodir: true, + nosort: true, + }) + const stats = { + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + } + await pMap( + files, + async (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + const info = await verifyContent(f, integrity) + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + const s = await stat(f) + await rm(f, { recursive: true, force: true }) + stats.reclaimedSize += s.size + } + return stats + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function verifyContent (filepath, sri) { + const contentInfo = {} + try { + const { size } = await stat(filepath) + contentInfo.size = size + contentInfo.valid = true + await ssri.checkStream(new fsm.ReadStream(filepath), sri) + } catch (err) { + if (err.code === 'ENOENT') { + return { size: 0, valid: false } + } + if (err.code !== 'EINTEGRITY') { + throw err + } + + await rm(filepath, { recursive: true, force: true }) + contentInfo.valid = false + } + return contentInfo +} + +async function rebuildIndex (cache, opts) { + opts.log.silly('verify', 'rebuilding index') + const entries = await index.ls(cache) + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) { + buckets[hashed].push(entry) + } else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) + } + } + } + await pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function rebuildBucket (cache, bucket, stats, opts) { + await truncate(bucket._path) + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + for (const entry of bucket) { + const content = contentPath(cache, entry.integrity) + try { + await stat(content) + await index.insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, + time: entry.time, + }) + stats.totalEntries++ + } catch (err) { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + } else { + throw err + } + } + } +} + +function cleanTmp (cache, opts) { + opts.log.silly('verify', 'cleaning tmp directory') + return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) +} + +async function writeVerifile (cache, opts) { + const verifile = path.join(cache, '_lastverified') + opts.log.silly('verify', 'writing verifile to ' + verifile) + return writeFile(verifile, `${Date.now()}`) +} + +module.exports.lastRun = lastRun + +async function lastRun (cache) { + const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) + return new Date(+data) +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/package.json b/node_modules/make-fetch-happen/node_modules/cacache/package.json new file mode 100644 index 0000000000000..ab58cb8b7c50f --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/package.json @@ -0,0 +1,82 @@ +{ + "name": "cacache", + "version": "17.1.4", + "cache-version": { + "content": "2", + "index": "5" + }, + "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "snap": "tap", + "coverage": "tap", + "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", + "lint": "eslint \"**/*.js\"", + "npmclilint": "npmcli-lint", + "lintfix": "npm run lint -- --fix", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/cacache.git" + }, + "keywords": [ + "cache", + "caching", + "content-addressable", + "sri", + "sri hash", + "subresource integrity", + "cache", + "storage", + "store", + "file store", + "filesystem", + "disk cache", + "disk storage" + ], + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^3.1.0", + "fs-minipass": "^3.0.0", + "glob": "^10.2.2", + "lru-cache": "^7.7.1", + "minipass": "^7.0.3", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "p-map": "^4.0.0", + "ssri": "^10.0.0", + "tar": "^6.1.11", + "unique-filename": "^3.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.18.0", + "tap": "^16.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "windowsCI": false, + "version": "4.18.0", + "publish": "true" + }, + "author": "GitHub Inc.", + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/node_modules/node-gyp/node_modules/cacache/LICENSE.md b/node_modules/node-gyp/node_modules/cacache/LICENSE.md new file mode 100644 index 0000000000000..8d28acf866d93 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/path.js b/node_modules/node-gyp/node_modules/cacache/lib/content/path.js new file mode 100644 index 0000000000000..ad5a76a4f73f2 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/content/path.js @@ -0,0 +1,29 @@ +'use strict' + +const contentVer = require('../../package.json')['cache-version'].content +const hashToSegments = require('../util/hash-to-segments') +const path = require('path') +const ssri = require('ssri') + +// Current format of content file path: +// +// sha512-BaSE64Hex= -> +// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee +// +module.exports = contentPath + +function contentPath (cache, integrity) { + const sri = ssri.parse(integrity, { single: true }) + // contentPath is the *strongest* algo given + return path.join( + contentDir(cache), + sri.algorithm, + ...hashToSegments(sri.hexDigest()) + ) +} + +module.exports.contentDir = contentDir + +function contentDir (cache) { + return path.join(cache, `content-v${contentVer}`) +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/read.js b/node_modules/node-gyp/node_modules/cacache/lib/content/read.js new file mode 100644 index 0000000000000..f41b539df65dc --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/content/read.js @@ -0,0 +1,166 @@ +'use strict' + +const fs = require('fs/promises') +const fsm = require('fs-minipass') +const ssri = require('ssri') +const contentPath = require('./path') +const Pipeline = require('minipass-pipeline') + +module.exports = read + +const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 +async function read (cache, integrity, opts = {}) { + const { size } = opts + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = await fs.stat(cpath) + return { stat, cpath, sri } + }) + if (typeof size === 'number' && stat.size !== size) { + throw sizeError(size, stat.size) + } + + if (stat.size > MAX_SINGLE_READ_SIZE) { + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } + + const data = await fs.readFile(cpath, { encoding: null }) + if (!ssri.checkData(data, sri)) { + throw integrityError(sri, cpath) + } + + return data +} + +const readPipeline = (cpath, size, sri, stream) => { + stream.push( + new fsm.ReadStream(cpath, { + size, + readSize: MAX_SINGLE_READ_SIZE, + }), + ssri.integrityStream({ + integrity: sri, + size, + }) + ) + return stream +} + +module.exports.stream = readStream +module.exports.readStream = readStream + +function readStream (cache, integrity, opts = {}) { + const { size } = opts + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // just stat to ensure it exists + const stat = await fs.stat(cpath) + return { stat, cpath, sri } + }) + if (typeof size === 'number' && size !== stat.size) { + return stream.emit('error', sizeError(size, stat.size)) + } + + return readPipeline(cpath, stat.size, sri, stream) + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.copy = copy + +function copy (cache, integrity, dest) { + return withContentSri(cache, integrity, (cpath, sri) => { + return fs.copyFile(cpath, dest) + }) +} + +module.exports.hasContent = hasContent + +async function hasContent (cache, integrity) { + if (!integrity) { + return false + } + + try { + return await withContentSri(cache, integrity, async (cpath, sri) => { + const stat = await fs.stat(cpath) + return { size: stat.size, sri, stat } + }) + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + } +} + +async function withContentSri (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + const results = await Promise.all(digests.map(async (meta) => { + try { + return await withContentSri(cache, meta, fn) + } catch (err) { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + } + })) + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) { + return result + } + + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) { + throw enoentError + } + + // Throw generic error + throw results.find((r) => r instanceof Error) + } +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function integrityError (sri, path) { + const err = new Error(`Integrity verification failed for ${sri} (${path})`) + err.code = 'EINTEGRITY' + err.sri = sri + err.path = path + return err +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js b/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js new file mode 100644 index 0000000000000..ce58d679e4cb2 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js @@ -0,0 +1,18 @@ +'use strict' + +const fs = require('fs/promises') +const contentPath = require('./path') +const { hasContent } = require('./read') + +module.exports = rm + +async function rm (cache, integrity) { + const content = await hasContent(cache, integrity) + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) { + await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) + return true + } else { + return false + } +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/write.js b/node_modules/node-gyp/node_modules/cacache/lib/content/write.js new file mode 100644 index 0000000000000..7146146581287 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/content/write.js @@ -0,0 +1,205 @@ +'use strict' + +const events = require('events') + +const contentPath = require('./path') +const fs = require('fs/promises') +const { moveFile } = require('@npmcli/fs') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') +const Flush = require('minipass-flush') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') +const fsm = require('fs-minipass') + +module.exports = write + +// Cache of move operations in process so we don't duplicate +const moveOperations = new Map() + +async function write (cache, data, opts = {}) { + const { algorithms, size, integrity } = opts + + if (typeof size === 'number' && data.length !== size) { + throw sizeError(size, data.length) + } + + const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) + if (integrity && !ssri.checkData(data, integrity, opts)) { + throw checksumError(integrity, sri) + } + + for (const algo in sri) { + const tmp = await makeTmp(cache, opts) + const hash = sri[algo].toString() + try { + await fs.writeFile(tmp.target, data, { flag: 'wx' }) + await moveToDestination(tmp, cache, hash, opts) + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } + } + return { integrity: sri, size: data.length } +} + +module.exports.stream = writeStream + +// writes proxied to the 'inputStream' that is passed to the Promise +// 'end' is deferred until content is handled. +class CacacheWriteStream extends Flush { + constructor (cache, opts) { + super() + this.opts = opts + this.cache = cache + this.inputStream = new Minipass() + this.inputStream.on('error', er => this.emit('error', er)) + this.inputStream.on('drain', () => this.emit('drain')) + this.handleContentP = null + } + + write (chunk, encoding, cb) { + if (!this.handleContentP) { + this.handleContentP = handleContent( + this.inputStream, + this.cache, + this.opts + ) + } + return this.inputStream.write(chunk, encoding, cb) + } + + flush (cb) { + this.inputStream.end(() => { + if (!this.handleContentP) { + const e = new Error('Cache input stream was empty') + e.code = 'ENODATA' + // empty streams are probably emitting end right away. + // defer this one tick by rejecting a promise on it. + return Promise.reject(e).catch(cb) + } + // eslint-disable-next-line promise/catch-or-return + this.handleContentP.then( + (res) => { + res.integrity && this.emit('integrity', res.integrity) + // eslint-disable-next-line promise/always-return + res.size !== null && this.emit('size', res.size) + cb() + }, + (er) => cb(er) + ) + }) + } +} + +function writeStream (cache, opts = {}) { + return new CacacheWriteStream(cache, opts) +} + +async function handleContent (inputStream, cache, opts) { + const tmp = await makeTmp(cache, opts) + try { + const res = await pipeToTmp(inputStream, cache, tmp.target, opts) + await moveToDestination( + tmp, + cache, + res.integrity, + opts + ) + return res + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } +} + +async function pipeToTmp (inputStream, cache, tmpTarget, opts) { + const outStream = new fsm.WriteStream(tmpTarget, { + flags: 'wx', + }) + + if (opts.integrityEmitter) { + // we need to create these all simultaneously since they can fire in any order + const [integrity, size] = await Promise.all([ + events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), + events.once(opts.integrityEmitter, 'size').then(res => res[0]), + new Pipeline(inputStream, outStream).promise(), + ]) + return { integrity, size } + } + + let integrity + let size + const hashStream = ssri.integrityStream({ + integrity: opts.integrity, + algorithms: opts.algorithms, + size: opts.size, + }) + hashStream.on('integrity', i => { + integrity = i + }) + hashStream.on('size', s => { + size = s + }) + + const pipeline = new Pipeline(inputStream, hashStream, outStream) + await pipeline.promise() + return { integrity, size } +} + +async function makeTmp (cache, opts) { + const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) + return { + target: tmpTarget, + moved: false, + } +} + +async function moveToDestination (tmp, cache, sri, opts) { + const destination = contentPath(cache, sri) + const destDir = path.dirname(destination) + if (moveOperations.has(destination)) { + return moveOperations.get(destination) + } + moveOperations.set( + destination, + fs.mkdir(destDir, { recursive: true }) + .then(async () => { + await moveFile(tmp.target, destination, { overwrite: false }) + tmp.moved = true + return tmp.moved + }) + .catch(err => { + if (!err.message.startsWith('The destination file exists')) { + throw Object.assign(err, { code: 'EEXIST' }) + } + }).finally(() => { + moveOperations.delete(destination) + }) + + ) + return moveOperations.get(destination) +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function checksumError (expected, found) { + const err = new Error(`Integrity check failed: + Wanted: ${expected} + Found: ${found}`) + err.code = 'EINTEGRITY' + err.expected = expected + err.found = found + return err +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js b/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js new file mode 100644 index 0000000000000..722a37af5ce15 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js @@ -0,0 +1,330 @@ +'use strict' + +const crypto = require('crypto') +const { + appendFile, + mkdir, + readFile, + readdir, + rm, + writeFile, +} = require('fs/promises') +const { Minipass } = require('minipass') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') + +const contentPath = require('./content/path') +const hashToSegments = require('./util/hash-to-segments') +const indexV = require('../package.json')['cache-version'].index +const { moveFile } = require('@npmcli/fs') + +module.exports.NotFoundError = class NotFoundError extends Error { + constructor (cache, key) { + super(`No cache entry for ${key} found in ${cache}`) + this.code = 'ENOENT' + this.cache = cache + this.key = key + } +} + +module.exports.compact = compact + +async function compact (cache, key, matchFn, opts = {}) { + const bucket = bucketPath(cache, key) + const entries = await bucketEntries(bucket) + const newEntries = [] + // we loop backwards because the bottom-most result is the newest + // since we add new entries with appendFile + for (let i = entries.length - 1; i >= 0; --i) { + const entry = entries[i] + // a null integrity could mean either a delete was appended + // or the user has simply stored an index that does not map + // to any content. we determine if the user wants to keep the + // null integrity based on the validateEntry function passed in options. + // if the integrity is null and no validateEntry is provided, we break + // as we consider the null integrity to be a deletion of everything + // that came before it. + if (entry.integrity === null && !opts.validateEntry) { + break + } + + // if this entry is valid, and it is either the first entry or + // the newEntries array doesn't already include an entry that + // matches this one based on the provided matchFn, then we add + // it to the beginning of our list + if ((!opts.validateEntry || opts.validateEntry(entry) === true) && + (newEntries.length === 0 || + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { + newEntries.unshift(entry) + } + } + + const newIndex = '\n' + newEntries.map((entry) => { + const stringified = JSON.stringify(entry) + const hash = hashEntry(stringified) + return `${hash}\t${stringified}` + }).join('\n') + + const setup = async () => { + const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await mkdir(path.dirname(target), { recursive: true }) + return { + target, + moved: false, + } + } + + const teardown = async (tmp) => { + if (!tmp.moved) { + return rm(tmp.target, { recursive: true, force: true }) + } + } + + const write = async (tmp) => { + await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await mkdir(path.dirname(bucket), { recursive: true }) + // we use @npmcli/move-file directly here because we + // want to overwrite the existing file + await moveFile(tmp.target, bucket) + tmp.moved = true + } + + // write the file atomically + const tmp = await setup() + try { + await write(tmp) + } finally { + await teardown(tmp) + } + + // we reverse the list we generated such that the newest + // entries come first in order to make looping through them easier + // the true passed to formatEntry tells it to keep null + // integrity values, if they made it this far it's because + // validateEntry returned true, and as such we should return it + return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) +} + +module.exports.insert = insert + +async function insert (cache, key, integrity, opts = {}) { + const { metadata, size, time } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: time || Date.now(), + size, + metadata, + } + try { + await mkdir(path.dirname(bucket), { recursive: true }) + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + + throw err + } + return formatEntry(cache, entry) +} + +module.exports.find = find + +async function find (cache, key) { + const bucket = bucketPath(cache, key) + try { + const entries = await bucketEntries(bucket) + return entries.reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } +} + +module.exports.delete = del + +function del (cache, key, opts = {}) { + if (!opts.removeFully) { + return insert(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rm(bucket, { recursive: true, force: true }) +} + +module.exports.lsStream = lsStream + +function lsStream (cache) { + const indexDir = bucketDir(cache) + const stream = new Minipass({ objectMode: true }) + + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const buckets = await readdirOrEmpty(indexDir) + await Promise.all(buckets.map(async (bucket) => { + const bucketPath = path.join(indexDir, bucket) + const subbuckets = await readdirOrEmpty(bucketPath) + await Promise.all(subbuckets.map(async (subbucket) => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + const subbucketEntries = await readdirOrEmpty(subbucketPath) + await Promise.all(subbucketEntries.map(async (entry) => { + const entryPath = path.join(subbucketPath, entry) + try { + const entries = await bucketEntries(entryPath) + // using a Map here prevents duplicate keys from showing up + // twice, I guess? + const reduced = entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) { + stream.write(formatted) + } + } + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + throw err + } + })) + })) + })) + stream.end() + return stream + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.ls = ls + +async function ls (cache) { + const entries = await lsStream(cache).collect() + return entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) +} + +module.exports.bucketEntries = bucketEntries + +async function bucketEntries (bucket, filter) { + const data = await readFile(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +function _bucketEntries (data, filter) { + const entries = [] + data.split('\n').forEach((entry) => { + if (!entry) { + return + } + + const pieces = entry.split('\t') + if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { + // Hash is no good! Corruption or malice? Doesn't matter! + // EJECT EJECT + return + } + let obj + try { + obj = JSON.parse(pieces[1]) + } catch (_) { + // eslint-ignore-next-line no-empty-block + } + // coverage disabled here, no need to test with an entry that parses to something falsey + // istanbul ignore else + if (obj) { + entries.push(obj) + } + }) + return entries +} + +module.exports.bucketDir = bucketDir + +function bucketDir (cache) { + return path.join(cache, `index-v${indexV}`) +} + +module.exports.bucketPath = bucketPath + +function bucketPath (cache, key) { + const hashed = hashKey(key) + return path.join.apply( + path, + [bucketDir(cache)].concat(hashToSegments(hashed)) + ) +} + +module.exports.hashKey = hashKey + +function hashKey (key) { + return hash(key, 'sha256') +} + +module.exports.hashEntry = hashEntry + +function hashEntry (str) { + return hash(str, 'sha1') +} + +function hash (str, digest) { + return crypto + .createHash(digest) + .update(str) + .digest('hex') +} + +function formatEntry (cache, entry, keepAll) { + // Treat null digests as deletions. They'll shadow any previous entries. + if (!entry.integrity && !keepAll) { + return null + } + + return { + key: entry.key, + integrity: entry.integrity, + path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, + size: entry.size, + time: entry.time, + metadata: entry.metadata, + } +} + +function readdirOrEmpty (dir) { + return readdir(dir).catch((err) => { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + return [] + } + + throw err + }) +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/get.js b/node_modules/node-gyp/node_modules/cacache/lib/get.js new file mode 100644 index 0000000000000..80ec206c7ecaa --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/get.js @@ -0,0 +1,170 @@ +'use strict' + +const Collect = require('minipass-collect') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') + +const index = require('./entry-index') +const memo = require('./memoization') +const read = require('./content/read') + +async function getData (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } + } + + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = await read(cache, entry.integrity, { integrity, size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} +module.exports = getData + +async function getDataByDigest (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, key, opts) + if (memoized && memoize !== false) { + return memoized + } + + const res = await read(cache, key, { integrity, size }) + if (memoize) { + memo.put.byDigest(cache, key, res, opts) + } + return res +} +module.exports.byDigest = getDataByDigest + +const getMemoizedStream = (memoized) => { + const stream = new Minipass() + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(memoized.entry.metadata) + ev === 'integrity' && cb(memoized.entry.integrity) + ev === 'size' && cb(memoized.entry.size) + }) + stream.end(memoized.data) + return stream +} + +function getStream (cache, key, opts = {}) { + const { memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return getMemoizedStream(memoized) + } + + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const entry = await index.find(cache, key) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) + }) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + return stream + }).catch((err) => stream.emit('error', err)) + + return stream +} + +module.exports.stream = getStream + +function getStreamDigest (cache, integrity, opts = {}) { + const { memoize } = opts + const memoized = memo.get.byDigest(cache, integrity, opts) + if (memoized && memoize !== false) { + const stream = new Minipass() + stream.end(memoized) + return stream + } else { + const stream = read.readStream(cache, integrity, opts) + if (!memoize) { + return stream + } + + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put.byDigest( + cache, + integrity, + data, + opts + )) + return new Pipeline(stream, memoStream) + } +} + +module.exports.stream.byDigest = getStreamDigest + +function info (cache, key, opts = {}) { + const { memoize } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized.entry) + } else { + return index.find(cache, key) + } +} +module.exports.info = info + +async function copy (cache, key, dest, opts = {}) { + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + await read.copy(cache, entry.integrity, dest, opts) + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} + +module.exports.copy = copy + +async function copyByDigest (cache, key, dest, opts = {}) { + await read.copy(cache, key, dest, opts) + return key +} + +module.exports.copy.byDigest = copyByDigest + +module.exports.hasContent = read.hasContent diff --git a/node_modules/node-gyp/node_modules/cacache/lib/index.js b/node_modules/node-gyp/node_modules/cacache/lib/index.js new file mode 100644 index 0000000000000..c9b0da5f3a271 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/index.js @@ -0,0 +1,42 @@ +'use strict' + +const get = require('./get.js') +const put = require('./put.js') +const rm = require('./rm.js') +const verify = require('./verify.js') +const { clearMemoized } = require('./memoization.js') +const tmp = require('./util/tmp.js') +const index = require('./entry-index.js') + +module.exports.index = {} +module.exports.index.compact = index.compact +module.exports.index.insert = index.insert + +module.exports.ls = index.ls +module.exports.ls.stream = index.lsStream + +module.exports.get = get +module.exports.get.byDigest = get.byDigest +module.exports.get.stream = get.stream +module.exports.get.stream.byDigest = get.stream.byDigest +module.exports.get.copy = get.copy +module.exports.get.copy.byDigest = get.copy.byDigest +module.exports.get.info = get.info +module.exports.get.hasContent = get.hasContent + +module.exports.put = put +module.exports.put.stream = put.stream + +module.exports.rm = rm.entry +module.exports.rm.all = rm.all +module.exports.rm.entry = module.exports.rm +module.exports.rm.content = rm.content + +module.exports.clearMemoized = clearMemoized + +module.exports.tmp = {} +module.exports.tmp.mkdir = tmp.mkdir +module.exports.tmp.withTmp = tmp.withTmp + +module.exports.verify = verify +module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/node-gyp/node_modules/cacache/lib/memoization.js b/node_modules/node-gyp/node_modules/cacache/lib/memoization.js new file mode 100644 index 0000000000000..0ff604a479c9c --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/memoization.js @@ -0,0 +1,72 @@ +'use strict' + +const LRU = require('lru-cache') + +const MEMOIZED = new LRU({ + max: 500, + maxSize: 50 * 1024 * 1024, // 50MB + ttl: 3 * 60 * 1000, // 3 minutes + sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, +}) + +module.exports.clearMemoized = clearMemoized + +function clearMemoized () { + const old = {} + MEMOIZED.forEach((v, k) => { + old[k] = v + }) + MEMOIZED.clear() + return old +} + +module.exports.put = put + +function put (cache, entry, data, opts) { + pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) + putDigest(cache, entry.integrity, data, opts) +} + +module.exports.put.byDigest = putDigest + +function putDigest (cache, integrity, data, opts) { + pickMem(opts).set(`digest:${cache}:${integrity}`, data) +} + +module.exports.get = get + +function get (cache, key, opts) { + return pickMem(opts).get(`key:${cache}:${key}`) +} + +module.exports.get.byDigest = getDigest + +function getDigest (cache, integrity, opts) { + return pickMem(opts).get(`digest:${cache}:${integrity}`) +} + +class ObjProxy { + constructor (obj) { + this.obj = obj + } + + get (key) { + return this.obj[key] + } + + set (key, val) { + this.obj[key] = val + } +} + +function pickMem (opts) { + if (!opts || !opts.memoize) { + return MEMOIZED + } else if (opts.memoize.get && opts.memoize.set) { + return opts.memoize + } else if (typeof opts.memoize === 'object') { + return new ObjProxy(opts.memoize) + } else { + return MEMOIZED + } +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/put.js b/node_modules/node-gyp/node_modules/cacache/lib/put.js new file mode 100644 index 0000000000000..9fc932d5f6dec --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/put.js @@ -0,0 +1,80 @@ +'use strict' + +const index = require('./entry-index') +const memo = require('./memoization') +const write = require('./content/write') +const Flush = require('minipass-flush') +const { PassThrough } = require('minipass-collect') +const Pipeline = require('minipass-pipeline') + +const putOpts = (opts) => ({ + algorithms: ['sha512'], + ...opts, +}) + +module.exports = putData + +async function putData (cache, key, data, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + const res = await write(cache, data, opts) + const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return res.integrity +} + +module.exports.stream = putStream + +function putStream (cache, key, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + let integrity + let size + let error + + let memoData + const pipeline = new Pipeline() + // first item in the pipeline is the memoizer, because we need + // that to end first and get the collected data. + if (memoize) { + const memoizer = new PassThrough().on('collect', data => { + memoData = data + }) + pipeline.push(memoizer) + } + + // contentStream is a write-only, not a passthrough + // no data comes out of it. + const contentStream = write.stream(cache, opts) + .on('integrity', (int) => { + integrity = int + }) + .on('size', (s) => { + size = s + }) + .on('error', (err) => { + error = err + }) + + pipeline.push(contentStream) + + // last but not least, we write the index and emit hash and size, + // and memoize if we're doing that + pipeline.push(new Flush({ + async flush () { + if (!error) { + const entry = await index.insert(cache, key, integrity, { ...opts, size }) + if (memoize && memoData) { + memo.put(cache, entry, memoData, opts) + } + pipeline.emit('integrity', integrity) + pipeline.emit('size', size) + } + }, + })) + + return pipeline +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/rm.js b/node_modules/node-gyp/node_modules/cacache/lib/rm.js new file mode 100644 index 0000000000000..a94760c7cf243 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/rm.js @@ -0,0 +1,31 @@ +'use strict' + +const { rm } = require('fs/promises') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const memo = require('./memoization') +const path = require('path') +const rmContent = require('./content/rm') + +module.exports = entry +module.exports.entry = entry + +function entry (cache, key, opts) { + memo.clearMemoized() + return index.delete(cache, key, opts) +} + +module.exports.content = content + +function content (cache, integrity) { + memo.clearMemoized() + return rmContent(cache, integrity) +} + +module.exports.all = all + +async function all (cache) { + memo.clearMemoized() + const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) + return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js b/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js new file mode 100644 index 0000000000000..8500c1c16a429 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js @@ -0,0 +1,7 @@ +'use strict' + +const { glob } = require('glob') +const path = require('path') + +const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) +module.exports = (path, options) => glob(globify(path), options) diff --git a/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js new file mode 100644 index 0000000000000..445599b503808 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = hashToSegments + +function hashToSegments (hash) { + return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js b/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js new file mode 100644 index 0000000000000..0bf5302136ebe --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js @@ -0,0 +1,26 @@ +'use strict' + +const { withTempDir } = require('@npmcli/fs') +const fs = require('fs/promises') +const path = require('path') + +module.exports.mkdir = mktmpdir + +async function mktmpdir (cache, opts = {}) { + const { tmpPrefix } = opts + const tmpDir = path.join(cache, 'tmp') + await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) +} + +module.exports.withTmp = withTmp + +function withTmp (cache, opts, cb) { + if (!cb) { + cb = opts + opts = {} + } + return withTempDir(path.join(cache, 'tmp'), cb, opts) +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/verify.js b/node_modules/node-gyp/node_modules/cacache/lib/verify.js new file mode 100644 index 0000000000000..62e85c946490f --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/verify.js @@ -0,0 +1,257 @@ +'use strict' + +const { + mkdir, + readFile, + rm, + stat, + truncate, + writeFile, +} = require('fs/promises') +const pMap = require('p-map') +const contentPath = require('./content/path') +const fsm = require('fs-minipass') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const path = require('path') +const ssri = require('ssri') + +const hasOwnProperty = (obj, key) => + Object.prototype.hasOwnProperty.call(obj, key) + +const verifyOpts = (opts) => ({ + concurrency: 20, + log: { silly () {} }, + ...opts, +}) + +module.exports = verify + +async function verify (cache, opts) { + opts = verifyOpts(opts) + opts.log.silly('verify', 'verifying cache at', cache) + + const steps = [ + markStartTime, + fixPerms, + garbageCollect, + rebuildIndex, + cleanTmp, + writeVerifile, + markEndTime, + ] + + const stats = {} + for (const step of steps) { + const label = step.name + const start = new Date() + const s = await step(cache, opts) + if (s) { + Object.keys(s).forEach((k) => { + stats[k] = s[k] + }) + } + const end = new Date() + if (!stats.runTime) { + stats.runTime = {} + } + stats.runTime[label] = end - start + } + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats +} + +async function markStartTime (cache, opts) { + return { startTime: new Date() } +} + +async function markEndTime (cache, opts) { + return { endTime: new Date() } +} + +async function fixPerms (cache, opts) { + opts.log.silly('verify', 'fixing cache permissions') + await mkdir(cache, { recursive: true }) + return null +} + +// Implements a naive mark-and-sweep tracing garbage collector. +// +// The algorithm is basically as follows: +// 1. Read (and filter) all index entries ("pointers") +// 2. Mark each integrity value as "live" +// 3. Read entire filesystem tree in `content-vX/` dir +// 4. If content is live, verify its checksum and delete it if it fails +// 5. If content is not marked as live, rm it. +// +async function garbageCollect (cache, opts) { + opts.log.silly('verify', 'garbage collecting content') + const indexStream = index.lsStream(cache) + const liveContent = new Set() + indexStream.on('data', (entry) => { + if (opts.filter && !opts.filter(entry)) { + return + } + + // integrity is stringified, re-parse it so we can get each hash + const integrity = ssri.parse(entry.integrity) + for (const algo in integrity) { + liveContent.add(integrity[algo].toString()) + } + }) + await new Promise((resolve, reject) => { + indexStream.on('end', resolve).on('error', reject) + }) + const contentDir = contentPath.contentDir(cache) + const files = await glob(path.join(contentDir, '**'), { + follow: false, + nodir: true, + nosort: true, + }) + const stats = { + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + } + await pMap( + files, + async (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + const info = await verifyContent(f, integrity) + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + const s = await stat(f) + await rm(f, { recursive: true, force: true }) + stats.reclaimedSize += s.size + } + return stats + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function verifyContent (filepath, sri) { + const contentInfo = {} + try { + const { size } = await stat(filepath) + contentInfo.size = size + contentInfo.valid = true + await ssri.checkStream(new fsm.ReadStream(filepath), sri) + } catch (err) { + if (err.code === 'ENOENT') { + return { size: 0, valid: false } + } + if (err.code !== 'EINTEGRITY') { + throw err + } + + await rm(filepath, { recursive: true, force: true }) + contentInfo.valid = false + } + return contentInfo +} + +async function rebuildIndex (cache, opts) { + opts.log.silly('verify', 'rebuilding index') + const entries = await index.ls(cache) + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) { + buckets[hashed].push(entry) + } else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) + } + } + } + await pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function rebuildBucket (cache, bucket, stats, opts) { + await truncate(bucket._path) + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + for (const entry of bucket) { + const content = contentPath(cache, entry.integrity) + try { + await stat(content) + await index.insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, + time: entry.time, + }) + stats.totalEntries++ + } catch (err) { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + } else { + throw err + } + } + } +} + +function cleanTmp (cache, opts) { + opts.log.silly('verify', 'cleaning tmp directory') + return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) +} + +async function writeVerifile (cache, opts) { + const verifile = path.join(cache, '_lastverified') + opts.log.silly('verify', 'writing verifile to ' + verifile) + return writeFile(verifile, `${Date.now()}`) +} + +module.exports.lastRun = lastRun + +async function lastRun (cache) { + const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) + return new Date(+data) +} diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE b/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE new file mode 100644 index 0000000000000..de3226673c387 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013 Julian Gruber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js b/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js new file mode 100644 index 0000000000000..4af9ddee463f4 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js @@ -0,0 +1,203 @@ +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m) return [str]; + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + if (/\$$/.test(m.pre)) { + for (var k = 0; k < post.length; k++) { + var expansion = pre+ '{' + m.body + '}' + post[k]; + expansions.push(expansion); + } + } else { + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = []; + + for (var j = 0; j < n.length; j++) { + N.push.apply(N, expand(n[j], false)); + } + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + } + + return expansions; +} + diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json new file mode 100644 index 0000000000000..7097d41e39de5 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json @@ -0,0 +1,46 @@ +{ + "name": "brace-expansion", + "description": "Brace expansion as known from sh/bash", + "version": "2.0.1", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/brace-expansion.git" + }, + "homepage": "https://github.com/juliangruber/brace-expansion", + "main": "index.js", + "scripts": { + "test": "tape test/*.js", + "gentest": "bash test/generate.sh", + "bench": "matcha test/perf/bench.js" + }, + "dependencies": { + "balanced-match": "^1.0.0" + }, + "devDependencies": { + "@c4312/matcha": "^1.3.1", + "tape": "^4.6.0" + }, + "keywords": [], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE new file mode 100644 index 0000000000000..ec7df93329abf --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2009-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/package.json new file mode 100644 index 0000000000000..c15df94a3582b --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/package.json @@ -0,0 +1,4 @@ +{ + "version": "10.3.3", + "type": "commonjs" +} diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js new file mode 100755 index 0000000000000..4a8a88f2734d2 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js @@ -0,0 +1,270 @@ +#!/usr/bin/env node +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const foreground_child_1 = require("foreground-child"); +const fs_1 = require("fs"); +const jackspeak_1 = require("jackspeak"); +const package_json_1 = require("../package.json"); +const index_js_1 = require("./index.js"); +const j = (0, jackspeak_1.jack)({ + usage: 'glob [options] [ [ ...]]', +}) + .description(` + Glob v${package_json_1.version} + + Expand the positional glob expression arguments into any matching file + system paths found. + `) + .opt({ + cmd: { + short: 'c', + hint: 'command', + description: `Run the command provided, passing the glob expression + matches as arguments.`, + }, +}) + .opt({ + default: { + short: 'p', + hint: 'pattern', + description: `If no positional arguments are provided, glob will use + this pattern`, + }, +}) + .flag({ + all: { + short: 'A', + description: `By default, the glob cli command will not expand any + arguments that are an exact match to a file on disk. + + This prevents double-expanding, in case the shell expands + an argument whose filename is a glob expression. + + For example, if 'app/*.ts' would match 'app/[id].ts', then + on Windows powershell or cmd.exe, 'glob app/*.ts' will + expand to 'app/[id].ts', as expected. However, in posix + shells such as bash or zsh, the shell will first expand + 'app/*.ts' to a list of filenames. Then glob will look + for a file matching 'app/[id].ts' (ie, 'app/i.ts' or + 'app/d.ts'), which is unexpected. + + Setting '--all' prevents this behavior, causing glob + to treat ALL patterns as glob expressions to be expanded, + even if they are an exact match to a file on disk. + + When setting this option, be sure to enquote arguments + so that the shell will not expand them prior to passing + them to the glob command process. + `, + }, + absolute: { + short: 'a', + description: 'Expand to absolute paths', + }, + 'dot-relative': { + short: 'd', + description: `Prepend './' on relative matches`, + }, + mark: { + short: 'm', + description: `Append a / on any directories matched`, + }, + posix: { + short: 'x', + description: `Always resolve to posix style paths, using '/' as the + directory separator, even on Windows. Drive letter + absolute matches on Windows will be expanded to their + full resolved UNC maths, eg instead of 'C:\\foo\\bar', + it will expand to '//?/C:/foo/bar'. + `, + }, + follow: { + short: 'f', + description: `Follow symlinked directories when expanding '**'`, + }, + realpath: { + short: 'R', + description: `Call 'fs.realpath' on all of the results. In the case + of an entry that cannot be resolved, the entry is + omitted. This incurs a slight performance penalty, of + course, because of the added system calls.`, + }, + stat: { + short: 's', + description: `Call 'fs.lstat' on all entries, whether required or not + to determine if it's a valid match.`, + }, + 'match-base': { + short: 'b', + description: `Perform a basename-only match if the pattern does not + contain any slash characters. That is, '*.js' would be + treated as equivalent to '**/*.js', matching js files + in all directories. + `, + }, + dot: { + description: `Allow patterns to match files/directories that start + with '.', even if the pattern does not start with '.' + `, + }, + nobrace: { + description: 'Do not expand {...} patterns', + }, + nocase: { + description: `Perform a case-insensitive match. This defaults to + 'true' on macOS and Windows platforms, and false on + all others. + + Note: 'nocase' should only be explicitly set when it is + known that the filesystem's case sensitivity differs + from the platform default. If set 'true' on + case-insensitive file systems, then the walk may return + more or less results than expected. + `, + }, + nodir: { + description: `Do not match directories, only files. + + Note: to *only* match directories, append a '/' at the + end of the pattern. + `, + }, + noext: { + description: `Do not expand extglob patterns, such as '+(a|b)'`, + }, + noglobstar: { + description: `Do not expand '**' against multiple path portions. + Ie, treat it as a normal '*' instead.`, + }, + 'windows-path-no-escape': { + description: `Use '\\' as a path separator *only*, and *never* as an + escape character. If set, all '\\' characters are + replaced with '/' in the pattern.`, + }, +}) + .num({ + 'max-depth': { + short: 'D', + description: `Maximum depth to traverse from the current + working directory`, + }, +}) + .opt({ + cwd: { + short: 'C', + description: 'Current working directory to execute/match in', + default: process.cwd(), + }, + root: { + short: 'r', + description: `A string path resolved against the 'cwd', which is + used as the starting point for absolute patterns that + start with '/' (but not drive letters or UNC paths + on Windows). + + Note that this *doesn't* necessarily limit the walk to + the 'root' directory, and doesn't affect the cwd + starting point for non-absolute patterns. A pattern + containing '..' will still be able to traverse out of + the root directory, if it is not an actual root directory + on the filesystem, and any non-absolute patterns will + still be matched in the 'cwd'. + + To start absolute and non-absolute patterns in the same + path, you can use '--root=' to set it to the empty + string. However, be aware that on Windows systems, a + pattern like 'x:/*' or '//host/share/*' will *always* + start in the 'x:/' or '//host/share/' directory, + regardless of the --root setting. + `, + }, + platform: { + description: `Defaults to the value of 'process.platform' if + available, or 'linux' if not. Setting --platform=win32 + on non-Windows systems may cause strange behavior!`, + validate: v => new Set([ + 'aix', + 'android', + 'darwin', + 'freebsd', + 'haiku', + 'linux', + 'openbsd', + 'sunos', + 'win32', + 'cygwin', + 'netbsd', + ]).has(v), + }, +}) + .optList({ + ignore: { + short: 'i', + description: `Glob patterns to ignore`, + }, +}) + .flag({ + debug: { + short: 'v', + description: `Output a huge amount of noisy debug information about + patterns as they are parsed and used to match files.`, + }, +}) + .flag({ + help: { + short: 'h', + description: 'Show this usage information', + }, +}); +try { + const { positionals, values } = j.parse(); + if (values.help) { + console.log(j.usage()); + process.exit(0); + } + if (positionals.length === 0 && !values.default) + throw 'No patterns provided'; + if (positionals.length === 0 && values.default) + positionals.push(values.default); + const patterns = values.all + ? positionals + : positionals.filter(p => !(0, fs_1.existsSync)(p)); + const matches = values.all ? [] : positionals.filter(p => (0, fs_1.existsSync)(p)); + const stream = (0, index_js_1.globStream)(patterns, { + absolute: values.absolute, + cwd: values.cwd, + dot: values.dot, + dotRelative: values['dot-relative'], + follow: values.follow, + ignore: values.ignore, + mark: values.mark, + matchBase: values['match-base'], + maxDepth: values['max-depth'], + nobrace: values.nobrace, + nocase: values.nocase, + nodir: values.nodir, + noext: values.noext, + noglobstar: values.noglobstar, + platform: values.platform, + realpath: values.realpath, + root: values.root, + stat: values.stat, + debug: values.debug, + posix: values.posix, + }); + const cmd = values.cmd; + if (!cmd) { + matches.forEach(m => console.log(m)); + stream.on('data', f => console.log(f)); + } + else { + stream.on('data', f => matches.push(f)); + stream.on('end', () => (0, foreground_child_1.foregroundChild)(cmd, matches, { shell: true })); + } +} +catch (e) { + console.error(j.usage()); + console.error(e instanceof Error ? e.message : String(e)); + process.exit(1); +} +//# sourceMappingURL=bin.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js new file mode 100644 index 0000000000000..eb37c6b9a6601 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js @@ -0,0 +1,238 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Glob = void 0; +const minimatch_1 = require("minimatch"); +const path_scurry_1 = require("path-scurry"); +const url_1 = require("url"); +const pattern_js_1 = require("./pattern.js"); +const walker_js_1 = require("./walker.js"); +// if no process global, just call it linux. +// so we default to case-sensitive, / separators +const defaultPlatform = typeof process === 'object' && + process && + typeof process.platform === 'string' + ? process.platform + : 'linux'; +/** + * An object that can perform glob pattern traversals. + */ +class Glob { + absolute; + cwd; + root; + dot; + dotRelative; + follow; + ignore; + magicalBraces; + mark; + matchBase; + maxDepth; + nobrace; + nocase; + nodir; + noext; + noglobstar; + pattern; + platform; + realpath; + scurry; + stat; + signal; + windowsPathsNoEscape; + withFileTypes; + /** + * The options provided to the constructor. + */ + opts; + /** + * An array of parsed immutable {@link Pattern} objects. + */ + patterns; + /** + * All options are stored as properties on the `Glob` object. + * + * See {@link GlobOptions} for full options descriptions. + * + * Note that a previous `Glob` object can be passed as the + * `GlobOptions` to another `Glob` instantiation to re-use settings + * and caches with a new pattern. + * + * Traversal functions can be called multiple times to run the walk + * again. + */ + constructor(pattern, opts) { + /* c8 ignore start */ + if (!opts) + throw new TypeError('glob options required'); + /* c8 ignore stop */ + this.withFileTypes = !!opts.withFileTypes; + this.signal = opts.signal; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.dotRelative = !!opts.dotRelative; + this.nodir = !!opts.nodir; + this.mark = !!opts.mark; + if (!opts.cwd) { + this.cwd = ''; + } + else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) { + opts.cwd = (0, url_1.fileURLToPath)(opts.cwd); + } + this.cwd = opts.cwd || ''; + this.root = opts.root; + this.magicalBraces = !!opts.magicalBraces; + this.nobrace = !!opts.nobrace; + this.noext = !!opts.noext; + this.realpath = !!opts.realpath; + this.absolute = opts.absolute; + this.noglobstar = !!opts.noglobstar; + this.matchBase = !!opts.matchBase; + this.maxDepth = + typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity; + this.stat = !!opts.stat; + this.ignore = opts.ignore; + if (this.withFileTypes && this.absolute !== undefined) { + throw new Error('cannot set absolute and withFileTypes:true'); + } + if (typeof pattern === 'string') { + pattern = [pattern]; + } + this.windowsPathsNoEscape = + !!opts.windowsPathsNoEscape || + opts.allowWindowsEscape === false; + if (this.windowsPathsNoEscape) { + pattern = pattern.map(p => p.replace(/\\/g, '/')); + } + if (this.matchBase) { + if (opts.noglobstar) { + throw new TypeError('base matching requires globstar'); + } + pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`)); + } + this.pattern = pattern; + this.platform = opts.platform || defaultPlatform; + this.opts = { ...opts, platform: this.platform }; + if (opts.scurry) { + this.scurry = opts.scurry; + if (opts.nocase !== undefined && + opts.nocase !== opts.scurry.nocase) { + throw new Error('nocase option contradicts provided scurry option'); + } + } + else { + const Scurry = opts.platform === 'win32' + ? path_scurry_1.PathScurryWin32 + : opts.platform === 'darwin' + ? path_scurry_1.PathScurryDarwin + : opts.platform + ? path_scurry_1.PathScurryPosix + : path_scurry_1.PathScurry; + this.scurry = new Scurry(this.cwd, { + nocase: opts.nocase, + fs: opts.fs, + }); + } + this.nocase = this.scurry.nocase; + // If you do nocase:true on a case-sensitive file system, then + // we need to use regexps instead of strings for non-magic + // path portions, because statting `aBc` won't return results + // for the file `AbC` for example. + const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32'; + const mmo = { + // default nocase based on platform + ...opts, + dot: this.dot, + matchBase: this.matchBase, + nobrace: this.nobrace, + nocase: this.nocase, + nocaseMagicOnly, + nocomment: true, + noext: this.noext, + nonegate: true, + optimizationLevel: 2, + platform: this.platform, + windowsPathsNoEscape: this.windowsPathsNoEscape, + debug: !!this.opts.debug, + }; + const mms = this.pattern.map(p => new minimatch_1.Minimatch(p, mmo)); + const [matchSet, globParts] = mms.reduce((set, m) => { + set[0].push(...m.set); + set[1].push(...m.globParts); + return set; + }, [[], []]); + this.patterns = matchSet.map((set, i) => { + return new pattern_js_1.Pattern(set, globParts[i], 0, this.platform); + }); + } + async walk() { + // Walkers always return array of Path objects, so we just have to + // coerce them into the right shape. It will have already called + // realpath() if the option was set to do so, so we know that's cached. + // start out knowing the cwd, at least + return [ + ...(await new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).walk()), + ]; + } + walkSync() { + return [ + ...new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).walkSync(), + ]; + } + stream() { + return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).stream(); + } + streamSync() { + return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).streamSync(); + } + /** + * Default sync iteration function. Returns a Generator that + * iterates over the results. + */ + iterateSync() { + return this.streamSync()[Symbol.iterator](); + } + [Symbol.iterator]() { + return this.iterateSync(); + } + /** + * Default async iteration function. Returns an AsyncGenerator that + * iterates over the results. + */ + iterate() { + return this.stream()[Symbol.asyncIterator](); + } + [Symbol.asyncIterator]() { + return this.iterate(); + } +} +exports.Glob = Glob; +//# sourceMappingURL=glob.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js new file mode 100644 index 0000000000000..0918bd57e0f1c --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.hasMagic = void 0; +const minimatch_1 = require("minimatch"); +/** + * Return true if the patterns provided contain any magic glob characters, + * given the options provided. + * + * Brace expansion is not considered "magic" unless the `magicalBraces` option + * is set, as brace expansion just turns one string into an array of strings. + * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and + * `'xby'` both do not contain any magic glob characters, and it's treated the + * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true` + * is in the options, brace expansion _is_ treated as a pattern having magic. + */ +const hasMagic = (pattern, options = {}) => { + if (!Array.isArray(pattern)) { + pattern = [pattern]; + } + for (const p of pattern) { + if (new minimatch_1.Minimatch(p, options).hasMagic()) + return true; + } + return false; +}; +exports.hasMagic = hasMagic; +//# sourceMappingURL=has-magic.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js new file mode 100644 index 0000000000000..0cbcca335e1cc --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js @@ -0,0 +1,103 @@ +"use strict"; +// give it a pattern, and it'll be able to tell you if +// a given path should be ignored. +// Ignoring a path ignores its children if the pattern ends in /** +// Ignores are always parsed in dot:true mode +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Ignore = void 0; +const minimatch_1 = require("minimatch"); +const pattern_js_1 = require("./pattern.js"); +const defaultPlatform = typeof process === 'object' && + process && + typeof process.platform === 'string' + ? process.platform + : 'linux'; +/** + * Class used to process ignored patterns + */ +class Ignore { + relative; + relativeChildren; + absolute; + absoluteChildren; + constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) { + this.relative = []; + this.absolute = []; + this.relativeChildren = []; + this.absoluteChildren = []; + const mmopts = { + dot: true, + nobrace, + nocase, + noext, + noglobstar, + optimizationLevel: 2, + platform, + nocomment: true, + nonegate: true, + }; + // this is a little weird, but it gives us a clean set of optimized + // minimatch matchers, without getting tripped up if one of them + // ends in /** inside a brace section, and it's only inefficient at + // the start of the walk, not along it. + // It'd be nice if the Pattern class just had a .test() method, but + // handling globstars is a bit of a pita, and that code already lives + // in minimatch anyway. + // Another way would be if maybe Minimatch could take its set/globParts + // as an option, and then we could at least just use Pattern to test + // for absolute-ness. + // Yet another way, Minimatch could take an array of glob strings, and + // a cwd option, and do the right thing. + for (const ign of ignored) { + const mm = new minimatch_1.Minimatch(ign, mmopts); + for (let i = 0; i < mm.set.length; i++) { + const parsed = mm.set[i]; + const globParts = mm.globParts[i]; + const p = new pattern_js_1.Pattern(parsed, globParts, 0, platform); + const m = new minimatch_1.Minimatch(p.globString(), mmopts); + const children = globParts[globParts.length - 1] === '**'; + const absolute = p.isAbsolute(); + if (absolute) + this.absolute.push(m); + else + this.relative.push(m); + if (children) { + if (absolute) + this.absoluteChildren.push(m); + else + this.relativeChildren.push(m); + } + } + } + } + ignored(p) { + const fullpath = p.fullpath(); + const fullpaths = `${fullpath}/`; + const relative = p.relative() || '.'; + const relatives = `${relative}/`; + for (const m of this.relative) { + if (m.match(relative) || m.match(relatives)) + return true; + } + for (const m of this.absolute) { + if (m.match(fullpath) || m.match(fullpaths)) + return true; + } + return false; + } + childrenIgnored(p) { + const fullpath = p.fullpath() + '/'; + const relative = (p.relative() || '.') + '/'; + for (const m of this.relativeChildren) { + if (m.match(relative)) + return true; + } + for (const m of this.absoluteChildren) { + if (m.match(fullpath)) + true; + } + return false; + } +} +exports.Ignore = Ignore; +//# sourceMappingURL=ignore.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js new file mode 100644 index 0000000000000..71c31c03dd339 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js @@ -0,0 +1,68 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.glob = exports.hasMagic = exports.Glob = exports.unescape = exports.escape = exports.sync = exports.iterate = exports.iterateSync = exports.stream = exports.streamSync = exports.globIterate = exports.globIterateSync = exports.globSync = exports.globStream = exports.globStreamSync = void 0; +const minimatch_1 = require("minimatch"); +const glob_js_1 = require("./glob.js"); +const has_magic_js_1 = require("./has-magic.js"); +function globStreamSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).streamSync(); +} +exports.globStreamSync = globStreamSync; +function globStream(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).stream(); +} +exports.globStream = globStream; +function globSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).walkSync(); +} +exports.globSync = globSync; +async function glob_(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).walk(); +} +function globIterateSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).iterateSync(); +} +exports.globIterateSync = globIterateSync; +function globIterate(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).iterate(); +} +exports.globIterate = globIterate; +// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc +exports.streamSync = globStreamSync; +exports.stream = Object.assign(globStream, { sync: globStreamSync }); +exports.iterateSync = globIterateSync; +exports.iterate = Object.assign(globIterate, { + sync: globIterateSync, +}); +exports.sync = Object.assign(globSync, { + stream: globStreamSync, + iterate: globIterateSync, +}); +/* c8 ignore start */ +var minimatch_2 = require("minimatch"); +Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return minimatch_2.escape; } }); +Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return minimatch_2.unescape; } }); +var glob_js_2 = require("./glob.js"); +Object.defineProperty(exports, "Glob", { enumerable: true, get: function () { return glob_js_2.Glob; } }); +var has_magic_js_2 = require("./has-magic.js"); +Object.defineProperty(exports, "hasMagic", { enumerable: true, get: function () { return has_magic_js_2.hasMagic; } }); +/* c8 ignore stop */ +exports.glob = Object.assign(glob_, { + glob: glob_, + globSync, + sync: exports.sync, + globStream, + stream: exports.stream, + globStreamSync, + streamSync: exports.streamSync, + globIterate, + iterate: exports.iterate, + globIterateSync, + iterateSync: exports.iterateSync, + Glob: glob_js_1.Glob, + hasMagic: has_magic_js_1.hasMagic, + escape: minimatch_1.escape, + unescape: minimatch_1.unescape, +}); +exports.glob.glob = exports.glob; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js new file mode 100644 index 0000000000000..181371293d860 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js @@ -0,0 +1,219 @@ +"use strict"; +// this is just a very light wrapper around 2 arrays with an offset index +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Pattern = void 0; +const minimatch_1 = require("minimatch"); +const isPatternList = (pl) => pl.length >= 1; +const isGlobList = (gl) => gl.length >= 1; +/** + * An immutable-ish view on an array of glob parts and their parsed + * results + */ +class Pattern { + #patternList; + #globList; + #index; + length; + #platform; + #rest; + #globString; + #isDrive; + #isUNC; + #isAbsolute; + #followGlobstar = true; + constructor(patternList, globList, index, platform) { + if (!isPatternList(patternList)) { + throw new TypeError('empty pattern list'); + } + if (!isGlobList(globList)) { + throw new TypeError('empty glob list'); + } + if (globList.length !== patternList.length) { + throw new TypeError('mismatched pattern list and glob list lengths'); + } + this.length = patternList.length; + if (index < 0 || index >= this.length) { + throw new TypeError('index out of range'); + } + this.#patternList = patternList; + this.#globList = globList; + this.#index = index; + this.#platform = platform; + // normalize root entries of absolute patterns on initial creation. + if (this.#index === 0) { + // c: => ['c:/'] + // C:/ => ['C:/'] + // C:/x => ['C:/', 'x'] + // //host/share => ['//host/share/'] + // //host/share/ => ['//host/share/'] + // //host/share/x => ['//host/share/', 'x'] + // /etc => ['/', 'etc'] + // / => ['/'] + if (this.isUNC()) { + // '' / '' / 'host' / 'share' + const [p0, p1, p2, p3, ...prest] = this.#patternList; + const [g0, g1, g2, g3, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = [p0, p1, p2, p3, ''].join('/'); + const g = [g0, g1, g2, g3, ''].join('/'); + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + else if (this.isDrive() || this.isAbsolute()) { + const [p1, ...prest] = this.#patternList; + const [g1, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = p1 + '/'; + const g = g1 + '/'; + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + } + } + /** + * The first entry in the parsed list of patterns + */ + pattern() { + return this.#patternList[this.#index]; + } + /** + * true of if pattern() returns a string + */ + isString() { + return typeof this.#patternList[this.#index] === 'string'; + } + /** + * true of if pattern() returns GLOBSTAR + */ + isGlobstar() { + return this.#patternList[this.#index] === minimatch_1.GLOBSTAR; + } + /** + * true if pattern() returns a regexp + */ + isRegExp() { + return this.#patternList[this.#index] instanceof RegExp; + } + /** + * The /-joined set of glob parts that make up this pattern + */ + globString() { + return (this.#globString = + this.#globString || + (this.#index === 0 + ? this.isAbsolute() + ? this.#globList[0] + this.#globList.slice(1).join('/') + : this.#globList.join('/') + : this.#globList.slice(this.#index).join('/'))); + } + /** + * true if there are more pattern parts after this one + */ + hasMore() { + return this.length > this.#index + 1; + } + /** + * The rest of the pattern after this part, or null if this is the end + */ + rest() { + if (this.#rest !== undefined) + return this.#rest; + if (!this.hasMore()) + return (this.#rest = null); + this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform); + this.#rest.#isAbsolute = this.#isAbsolute; + this.#rest.#isUNC = this.#isUNC; + this.#rest.#isDrive = this.#isDrive; + return this.#rest; + } + /** + * true if the pattern represents a //unc/path/ on windows + */ + isUNC() { + const pl = this.#patternList; + return this.#isUNC !== undefined + ? this.#isUNC + : (this.#isUNC = + this.#platform === 'win32' && + this.#index === 0 && + pl[0] === '' && + pl[1] === '' && + typeof pl[2] === 'string' && + !!pl[2] && + typeof pl[3] === 'string' && + !!pl[3]); + } + // pattern like C:/... + // split = ['C:', ...] + // XXX: would be nice to handle patterns like `c:*` to test the cwd + // in c: for *, but I don't know of a way to even figure out what that + // cwd is without actually chdir'ing into it? + /** + * True if the pattern starts with a drive letter on Windows + */ + isDrive() { + const pl = this.#patternList; + return this.#isDrive !== undefined + ? this.#isDrive + : (this.#isDrive = + this.#platform === 'win32' && + this.#index === 0 && + this.length > 1 && + typeof pl[0] === 'string' && + /^[a-z]:$/i.test(pl[0])); + } + // pattern = '/' or '/...' or '/x/...' + // split = ['', ''] or ['', ...] or ['', 'x', ...] + // Drive and UNC both considered absolute on windows + /** + * True if the pattern is rooted on an absolute path + */ + isAbsolute() { + const pl = this.#patternList; + return this.#isAbsolute !== undefined + ? this.#isAbsolute + : (this.#isAbsolute = + (pl[0] === '' && pl.length > 1) || + this.isDrive() || + this.isUNC()); + } + /** + * consume the root of the pattern, and return it + */ + root() { + const p = this.#patternList[0]; + return typeof p === 'string' && this.isAbsolute() && this.#index === 0 + ? p + : ''; + } + /** + * Check to see if the current globstar pattern is allowed to follow + * a symbolic link. + */ + checkFollowGlobstar() { + return !(this.#index === 0 || + !this.isGlobstar() || + !this.#followGlobstar); + } + /** + * Mark that the current globstar pattern is following a symbolic link + */ + markFollowGlobstar() { + if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar) + return false; + this.#followGlobstar = false; + return true; + } +} +exports.Pattern = Pattern; +//# sourceMappingURL=pattern.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js new file mode 100644 index 0000000000000..bd067e9b9033d --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js @@ -0,0 +1,309 @@ +"use strict"; +// synchronous utility for filtering entries and calculating subwalks +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Processor = exports.SubWalks = exports.MatchRecord = exports.HasWalkedCache = void 0; +const minimatch_1 = require("minimatch"); +/** + * A cache of which patterns have been processed for a given Path + */ +class HasWalkedCache { + store; + constructor(store = new Map()) { + this.store = store; + } + copy() { + return new HasWalkedCache(new Map(this.store)); + } + hasWalked(target, pattern) { + return this.store.get(target.fullpath())?.has(pattern.globString()); + } + storeWalked(target, pattern) { + const fullpath = target.fullpath(); + const cached = this.store.get(fullpath); + if (cached) + cached.add(pattern.globString()); + else + this.store.set(fullpath, new Set([pattern.globString()])); + } +} +exports.HasWalkedCache = HasWalkedCache; +/** + * A record of which paths have been matched in a given walk step, + * and whether they only are considered a match if they are a directory, + * and whether their absolute or relative path should be returned. + */ +class MatchRecord { + store = new Map(); + add(target, absolute, ifDir) { + const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0); + const current = this.store.get(target); + this.store.set(target, current === undefined ? n : n & current); + } + // match, absolute, ifdir + entries() { + return [...this.store.entries()].map(([path, n]) => [ + path, + !!(n & 2), + !!(n & 1), + ]); + } +} +exports.MatchRecord = MatchRecord; +/** + * A collection of patterns that must be processed in a subsequent step + * for a given path. + */ +class SubWalks { + store = new Map(); + add(target, pattern) { + if (!target.canReaddir()) { + return; + } + const subs = this.store.get(target); + if (subs) { + if (!subs.find(p => p.globString() === pattern.globString())) { + subs.push(pattern); + } + } + else + this.store.set(target, [pattern]); + } + get(target) { + const subs = this.store.get(target); + /* c8 ignore start */ + if (!subs) { + throw new Error('attempting to walk unknown path'); + } + /* c8 ignore stop */ + return subs; + } + entries() { + return this.keys().map(k => [k, this.store.get(k)]); + } + keys() { + return [...this.store.keys()].filter(t => t.canReaddir()); + } +} +exports.SubWalks = SubWalks; +/** + * The class that processes patterns for a given path. + * + * Handles child entry filtering, and determining whether a path's + * directory contents must be read. + */ +class Processor { + hasWalkedCache; + matches = new MatchRecord(); + subwalks = new SubWalks(); + patterns; + follow; + dot; + opts; + constructor(opts, hasWalkedCache) { + this.opts = opts; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.hasWalkedCache = hasWalkedCache + ? hasWalkedCache.copy() + : new HasWalkedCache(); + } + processPatterns(target, patterns) { + this.patterns = patterns; + const processingSet = patterns.map(p => [target, p]); + // map of paths to the magic-starting subwalks they need to walk + // first item in patterns is the filter + for (let [t, pattern] of processingSet) { + this.hasWalkedCache.storeWalked(t, pattern); + const root = pattern.root(); + const absolute = pattern.isAbsolute() && this.opts.absolute !== false; + // start absolute patterns at root + if (root) { + t = t.resolve(root === '/' && this.opts.root !== undefined + ? this.opts.root + : root); + const rest = pattern.rest(); + if (!rest) { + this.matches.add(t, true, false); + continue; + } + else { + pattern = rest; + } + } + if (t.isENOENT()) + continue; + let p; + let rest; + let changed = false; + while (typeof (p = pattern.pattern()) === 'string' && + (rest = pattern.rest())) { + const c = t.resolve(p); + // we can be reasonably sure that .. is a readable dir + if (c.isUnknown() && p !== '..') + break; + t = c; + pattern = rest; + changed = true; + } + p = pattern.pattern(); + rest = pattern.rest(); + if (changed) { + if (this.hasWalkedCache.hasWalked(t, pattern)) + continue; + this.hasWalkedCache.storeWalked(t, pattern); + } + // now we have either a final string for a known entry, + // more strings for an unknown entry, + // or a pattern starting with magic, mounted on t. + if (typeof p === 'string') { + // must be final entry + if (!rest) { + const ifDir = p === '..' || p === '' || p === '.'; + this.matches.add(t.resolve(p), absolute, ifDir); + } + else { + this.subwalks.add(t, pattern); + } + continue; + } + else if (p === minimatch_1.GLOBSTAR) { + // if no rest, match and subwalk pattern + // if rest, process rest and subwalk pattern + // if it's a symlink, but we didn't get here by way of a + // globstar match (meaning it's the first time THIS globstar + // has traversed a symlink), then we follow it. Otherwise, stop. + if (!t.isSymbolicLink() || + this.follow || + pattern.checkFollowGlobstar()) { + this.subwalks.add(t, pattern); + } + const rp = rest?.pattern(); + const rrest = rest?.rest(); + if (!rest || ((rp === '' || rp === '.') && !rrest)) { + // only HAS to be a dir if it ends in **/ or **/. + // but ending in ** will match files as well. + this.matches.add(t, absolute, rp === '' || rp === '.'); + } + else { + if (rp === '..') { + // this would mean you're matching **/.. at the fs root, + // and no thanks, I'm not gonna test that specific case. + /* c8 ignore start */ + const tp = t.parent || t; + /* c8 ignore stop */ + if (!rrest) + this.matches.add(tp, absolute, true); + else if (!this.hasWalkedCache.hasWalked(tp, rrest)) { + this.subwalks.add(tp, rrest); + } + } + } + } + else if (p instanceof RegExp) { + this.subwalks.add(t, pattern); + } + } + return this; + } + subwalkTargets() { + return this.subwalks.keys(); + } + child() { + return new Processor(this.opts, this.hasWalkedCache); + } + // return a new Processor containing the subwalks for each + // child entry, and a set of matches, and + // a hasWalkedCache that's a copy of this one + // then we're going to call + filterEntries(parent, entries) { + const patterns = this.subwalks.get(parent); + // put matches and entry walks into the results processor + const results = this.child(); + for (const e of entries) { + for (const pattern of patterns) { + const absolute = pattern.isAbsolute(); + const p = pattern.pattern(); + const rest = pattern.rest(); + if (p === minimatch_1.GLOBSTAR) { + results.testGlobstar(e, pattern, rest, absolute); + } + else if (p instanceof RegExp) { + results.testRegExp(e, p, rest, absolute); + } + else { + results.testString(e, p, rest, absolute); + } + } + } + return results; + } + testGlobstar(e, pattern, rest, absolute) { + if (this.dot || !e.name.startsWith('.')) { + if (!pattern.hasMore()) { + this.matches.add(e, absolute, false); + } + if (e.canReaddir()) { + // if we're in follow mode or it's not a symlink, just keep + // testing the same pattern. If there's more after the globstar, + // then this symlink consumes the globstar. If not, then we can + // follow at most ONE symlink along the way, so we mark it, which + // also checks to ensure that it wasn't already marked. + if (this.follow || !e.isSymbolicLink()) { + this.subwalks.add(e, pattern); + } + else if (e.isSymbolicLink()) { + if (rest && pattern.checkFollowGlobstar()) { + this.subwalks.add(e, rest); + } + else if (pattern.markFollowGlobstar()) { + this.subwalks.add(e, pattern); + } + } + } + } + // if the NEXT thing matches this entry, then also add + // the rest. + if (rest) { + const rp = rest.pattern(); + if (typeof rp === 'string' && + // dots and empty were handled already + rp !== '..' && + rp !== '' && + rp !== '.') { + this.testString(e, rp, rest.rest(), absolute); + } + else if (rp === '..') { + /* c8 ignore start */ + const ep = e.parent || e; + /* c8 ignore stop */ + this.subwalks.add(ep, rest); + } + else if (rp instanceof RegExp) { + this.testRegExp(e, rp, rest.rest(), absolute); + } + } + } + testRegExp(e, p, rest, absolute) { + if (!p.test(e.name)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } + testString(e, p, rest, absolute) { + // should never happen? + if (!e.isNamed(p)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } +} +exports.Processor = Processor; +//# sourceMappingURL=processor.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.js new file mode 100644 index 0000000000000..9651ce1164016 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.js @@ -0,0 +1,358 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GlobStream = exports.GlobWalker = exports.GlobUtil = void 0; +/** + * Single-use utility classes to provide functionality to the {@link Glob} + * methods. + * + * @module + */ +const minipass_1 = require("minipass"); +const ignore_js_1 = require("./ignore.js"); +const processor_js_1 = require("./processor.js"); +const makeIgnore = (ignore, opts) => typeof ignore === 'string' + ? new ignore_js_1.Ignore([ignore], opts) + : Array.isArray(ignore) + ? new ignore_js_1.Ignore(ignore, opts) + : ignore; +/** + * basic walking utilities that all the glob walker types use + */ +class GlobUtil { + path; + patterns; + opts; + seen = new Set(); + paused = false; + aborted = false; + #onResume = []; + #ignore; + #sep; + signal; + maxDepth; + constructor(patterns, path, opts) { + this.patterns = patterns; + this.path = path; + this.opts = opts; + this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/'; + if (opts.ignore) { + this.#ignore = makeIgnore(opts.ignore, opts); + } + // ignore, always set with maxDepth, but it's optional on the + // GlobOptions type + /* c8 ignore start */ + this.maxDepth = opts.maxDepth || Infinity; + /* c8 ignore stop */ + if (opts.signal) { + this.signal = opts.signal; + this.signal.addEventListener('abort', () => { + this.#onResume.length = 0; + }); + } + } + #ignored(path) { + return this.seen.has(path) || !!this.#ignore?.ignored?.(path); + } + #childrenIgnored(path) { + return !!this.#ignore?.childrenIgnored?.(path); + } + // backpressure mechanism + pause() { + this.paused = true; + } + resume() { + /* c8 ignore start */ + if (this.signal?.aborted) + return; + /* c8 ignore stop */ + this.paused = false; + let fn = undefined; + while (!this.paused && (fn = this.#onResume.shift())) { + fn(); + } + } + onResume(fn) { + if (this.signal?.aborted) + return; + /* c8 ignore start */ + if (!this.paused) { + fn(); + } + else { + /* c8 ignore stop */ + this.#onResume.push(fn); + } + } + // do the requisite realpath/stat checking, and return the path + // to add or undefined to filter it out. + async matchCheck(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || (await e.realpath()); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + return this.matchCheckTest(needStat ? await e.lstat() : e, ifDir); + } + matchCheckTest(e, ifDir) { + return e && + (this.maxDepth === Infinity || e.depth() <= this.maxDepth) && + (!ifDir || e.canReaddir()) && + (!this.opts.nodir || !e.isDirectory()) && + !this.#ignored(e) + ? e + : undefined; + } + matchCheckSync(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || e.realpathSync(); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + return this.matchCheckTest(needStat ? e.lstatSync() : e, ifDir); + } + matchFinish(e, absolute) { + if (this.#ignored(e)) + return; + const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute; + this.seen.add(e); + const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''; + // ok, we have what we need! + if (this.opts.withFileTypes) { + this.matchEmit(e); + } + else if (abs) { + const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath(); + this.matchEmit(abs + mark); + } + else { + const rel = this.opts.posix ? e.relativePosix() : e.relative(); + const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) + ? '.' + this.#sep + : ''; + this.matchEmit(!rel ? '.' + mark : pre + rel + mark); + } + } + async match(e, absolute, ifDir) { + const p = await this.matchCheck(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + matchSync(e, absolute, ifDir) { + const p = this.matchCheckSync(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + walkCB(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2(target, patterns, new processor_js_1.Processor(this.opts), cb); + } + walkCB2(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const childrenCached = t.readdirCached(); + if (t.calledReaddir()) + this.walkCB3(t, childrenCached, processor, next); + else { + t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true); + } + } + next(); + } + walkCB3(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2(target, patterns, processor.child(), next); + } + next(); + } + walkCBSync(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2Sync(target, patterns, new processor_js_1.Processor(this.opts), cb); + } + walkCB2Sync(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const children = t.readdirSync(); + this.walkCB3Sync(t, children, processor, next); + } + next(); + } + walkCB3Sync(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2Sync(target, patterns, processor.child(), next); + } + next(); + } +} +exports.GlobUtil = GlobUtil; +class GlobWalker extends GlobUtil { + matches; + constructor(patterns, path, opts) { + super(patterns, path, opts); + this.matches = new Set(); + } + matchEmit(e) { + this.matches.add(e); + } + async walk() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + await this.path.lstat(); + } + await new Promise((res, rej) => { + this.walkCB(this.path, this.patterns, () => { + if (this.signal?.aborted) { + rej(this.signal.reason); + } + else { + res(this.matches); + } + }); + }); + return this.matches; + } + walkSync() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + // nothing for the callback to do, because this never pauses + this.walkCBSync(this.path, this.patterns, () => { + if (this.signal?.aborted) + throw this.signal.reason; + }); + return this.matches; + } +} +exports.GlobWalker = GlobWalker; +class GlobStream extends GlobUtil { + results; + constructor(patterns, path, opts) { + super(patterns, path, opts); + this.results = new minipass_1.Minipass({ + signal: this.signal, + objectMode: true, + }); + this.results.on('drain', () => this.resume()); + this.results.on('resume', () => this.resume()); + } + matchEmit(e) { + this.results.write(e); + if (!this.results.flowing) + this.pause(); + } + stream() { + const target = this.path; + if (target.isUnknown()) { + target.lstat().then(() => { + this.walkCB(target, this.patterns, () => this.results.end()); + }); + } + else { + this.walkCB(target, this.patterns, () => this.results.end()); + } + return this.results; + } + streamSync() { + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + this.walkCBSync(this.path, this.patterns, () => this.results.end()); + return this.results; + } +} +exports.GlobStream = GlobStream; +//# sourceMappingURL=walker.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.js new file mode 100644 index 0000000000000..8ff26154427be --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.js @@ -0,0 +1,234 @@ +import { Minimatch } from 'minimatch'; +import { PathScurry, PathScurryDarwin, PathScurryPosix, PathScurryWin32, } from 'path-scurry'; +import { fileURLToPath } from 'url'; +import { Pattern } from './pattern.js'; +import { GlobStream, GlobWalker } from './walker.js'; +// if no process global, just call it linux. +// so we default to case-sensitive, / separators +const defaultPlatform = typeof process === 'object' && + process && + typeof process.platform === 'string' + ? process.platform + : 'linux'; +/** + * An object that can perform glob pattern traversals. + */ +export class Glob { + absolute; + cwd; + root; + dot; + dotRelative; + follow; + ignore; + magicalBraces; + mark; + matchBase; + maxDepth; + nobrace; + nocase; + nodir; + noext; + noglobstar; + pattern; + platform; + realpath; + scurry; + stat; + signal; + windowsPathsNoEscape; + withFileTypes; + /** + * The options provided to the constructor. + */ + opts; + /** + * An array of parsed immutable {@link Pattern} objects. + */ + patterns; + /** + * All options are stored as properties on the `Glob` object. + * + * See {@link GlobOptions} for full options descriptions. + * + * Note that a previous `Glob` object can be passed as the + * `GlobOptions` to another `Glob` instantiation to re-use settings + * and caches with a new pattern. + * + * Traversal functions can be called multiple times to run the walk + * again. + */ + constructor(pattern, opts) { + /* c8 ignore start */ + if (!opts) + throw new TypeError('glob options required'); + /* c8 ignore stop */ + this.withFileTypes = !!opts.withFileTypes; + this.signal = opts.signal; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.dotRelative = !!opts.dotRelative; + this.nodir = !!opts.nodir; + this.mark = !!opts.mark; + if (!opts.cwd) { + this.cwd = ''; + } + else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) { + opts.cwd = fileURLToPath(opts.cwd); + } + this.cwd = opts.cwd || ''; + this.root = opts.root; + this.magicalBraces = !!opts.magicalBraces; + this.nobrace = !!opts.nobrace; + this.noext = !!opts.noext; + this.realpath = !!opts.realpath; + this.absolute = opts.absolute; + this.noglobstar = !!opts.noglobstar; + this.matchBase = !!opts.matchBase; + this.maxDepth = + typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity; + this.stat = !!opts.stat; + this.ignore = opts.ignore; + if (this.withFileTypes && this.absolute !== undefined) { + throw new Error('cannot set absolute and withFileTypes:true'); + } + if (typeof pattern === 'string') { + pattern = [pattern]; + } + this.windowsPathsNoEscape = + !!opts.windowsPathsNoEscape || + opts.allowWindowsEscape === false; + if (this.windowsPathsNoEscape) { + pattern = pattern.map(p => p.replace(/\\/g, '/')); + } + if (this.matchBase) { + if (opts.noglobstar) { + throw new TypeError('base matching requires globstar'); + } + pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`)); + } + this.pattern = pattern; + this.platform = opts.platform || defaultPlatform; + this.opts = { ...opts, platform: this.platform }; + if (opts.scurry) { + this.scurry = opts.scurry; + if (opts.nocase !== undefined && + opts.nocase !== opts.scurry.nocase) { + throw new Error('nocase option contradicts provided scurry option'); + } + } + else { + const Scurry = opts.platform === 'win32' + ? PathScurryWin32 + : opts.platform === 'darwin' + ? PathScurryDarwin + : opts.platform + ? PathScurryPosix + : PathScurry; + this.scurry = new Scurry(this.cwd, { + nocase: opts.nocase, + fs: opts.fs, + }); + } + this.nocase = this.scurry.nocase; + // If you do nocase:true on a case-sensitive file system, then + // we need to use regexps instead of strings for non-magic + // path portions, because statting `aBc` won't return results + // for the file `AbC` for example. + const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32'; + const mmo = { + // default nocase based on platform + ...opts, + dot: this.dot, + matchBase: this.matchBase, + nobrace: this.nobrace, + nocase: this.nocase, + nocaseMagicOnly, + nocomment: true, + noext: this.noext, + nonegate: true, + optimizationLevel: 2, + platform: this.platform, + windowsPathsNoEscape: this.windowsPathsNoEscape, + debug: !!this.opts.debug, + }; + const mms = this.pattern.map(p => new Minimatch(p, mmo)); + const [matchSet, globParts] = mms.reduce((set, m) => { + set[0].push(...m.set); + set[1].push(...m.globParts); + return set; + }, [[], []]); + this.patterns = matchSet.map((set, i) => { + return new Pattern(set, globParts[i], 0, this.platform); + }); + } + async walk() { + // Walkers always return array of Path objects, so we just have to + // coerce them into the right shape. It will have already called + // realpath() if the option was set to do so, so we know that's cached. + // start out knowing the cwd, at least + return [ + ...(await new GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).walk()), + ]; + } + walkSync() { + return [ + ...new GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).walkSync(), + ]; + } + stream() { + return new GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).stream(); + } + streamSync() { + return new GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).streamSync(); + } + /** + * Default sync iteration function. Returns a Generator that + * iterates over the results. + */ + iterateSync() { + return this.streamSync()[Symbol.iterator](); + } + [Symbol.iterator]() { + return this.iterateSync(); + } + /** + * Default async iteration function. Returns an AsyncGenerator that + * iterates over the results. + */ + iterate() { + return this.stream()[Symbol.asyncIterator](); + } + [Symbol.asyncIterator]() { + return this.iterate(); + } +} +//# sourceMappingURL=glob.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.js new file mode 100644 index 0000000000000..ba2321ab868d0 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.js @@ -0,0 +1,23 @@ +import { Minimatch } from 'minimatch'; +/** + * Return true if the patterns provided contain any magic glob characters, + * given the options provided. + * + * Brace expansion is not considered "magic" unless the `magicalBraces` option + * is set, as brace expansion just turns one string into an array of strings. + * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and + * `'xby'` both do not contain any magic glob characters, and it's treated the + * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true` + * is in the options, brace expansion _is_ treated as a pattern having magic. + */ +export const hasMagic = (pattern, options = {}) => { + if (!Array.isArray(pattern)) { + pattern = [pattern]; + } + for (const p of pattern) { + if (new Minimatch(p, options).hasMagic()) + return true; + } + return false; +}; +//# sourceMappingURL=has-magic.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.js new file mode 100644 index 0000000000000..2dbaa16a11460 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.js @@ -0,0 +1,99 @@ +// give it a pattern, and it'll be able to tell you if +// a given path should be ignored. +// Ignoring a path ignores its children if the pattern ends in /** +// Ignores are always parsed in dot:true mode +import { Minimatch } from 'minimatch'; +import { Pattern } from './pattern.js'; +const defaultPlatform = typeof process === 'object' && + process && + typeof process.platform === 'string' + ? process.platform + : 'linux'; +/** + * Class used to process ignored patterns + */ +export class Ignore { + relative; + relativeChildren; + absolute; + absoluteChildren; + constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) { + this.relative = []; + this.absolute = []; + this.relativeChildren = []; + this.absoluteChildren = []; + const mmopts = { + dot: true, + nobrace, + nocase, + noext, + noglobstar, + optimizationLevel: 2, + platform, + nocomment: true, + nonegate: true, + }; + // this is a little weird, but it gives us a clean set of optimized + // minimatch matchers, without getting tripped up if one of them + // ends in /** inside a brace section, and it's only inefficient at + // the start of the walk, not along it. + // It'd be nice if the Pattern class just had a .test() method, but + // handling globstars is a bit of a pita, and that code already lives + // in minimatch anyway. + // Another way would be if maybe Minimatch could take its set/globParts + // as an option, and then we could at least just use Pattern to test + // for absolute-ness. + // Yet another way, Minimatch could take an array of glob strings, and + // a cwd option, and do the right thing. + for (const ign of ignored) { + const mm = new Minimatch(ign, mmopts); + for (let i = 0; i < mm.set.length; i++) { + const parsed = mm.set[i]; + const globParts = mm.globParts[i]; + const p = new Pattern(parsed, globParts, 0, platform); + const m = new Minimatch(p.globString(), mmopts); + const children = globParts[globParts.length - 1] === '**'; + const absolute = p.isAbsolute(); + if (absolute) + this.absolute.push(m); + else + this.relative.push(m); + if (children) { + if (absolute) + this.absoluteChildren.push(m); + else + this.relativeChildren.push(m); + } + } + } + } + ignored(p) { + const fullpath = p.fullpath(); + const fullpaths = `${fullpath}/`; + const relative = p.relative() || '.'; + const relatives = `${relative}/`; + for (const m of this.relative) { + if (m.match(relative) || m.match(relatives)) + return true; + } + for (const m of this.absolute) { + if (m.match(fullpath) || m.match(fullpaths)) + return true; + } + return false; + } + childrenIgnored(p) { + const fullpath = p.fullpath() + '/'; + const relative = (p.relative() || '.') + '/'; + for (const m of this.relativeChildren) { + if (m.match(relative)) + return true; + } + for (const m of this.absoluteChildren) { + if (m.match(fullpath)) + true; + } + return false; + } +} +//# sourceMappingURL=ignore.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.js new file mode 100644 index 0000000000000..7b270117e740a --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.js @@ -0,0 +1,56 @@ +import { escape, unescape } from 'minimatch'; +import { Glob } from './glob.js'; +import { hasMagic } from './has-magic.js'; +export function globStreamSync(pattern, options = {}) { + return new Glob(pattern, options).streamSync(); +} +export function globStream(pattern, options = {}) { + return new Glob(pattern, options).stream(); +} +export function globSync(pattern, options = {}) { + return new Glob(pattern, options).walkSync(); +} +async function glob_(pattern, options = {}) { + return new Glob(pattern, options).walk(); +} +export function globIterateSync(pattern, options = {}) { + return new Glob(pattern, options).iterateSync(); +} +export function globIterate(pattern, options = {}) { + return new Glob(pattern, options).iterate(); +} +// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc +export const streamSync = globStreamSync; +export const stream = Object.assign(globStream, { sync: globStreamSync }); +export const iterateSync = globIterateSync; +export const iterate = Object.assign(globIterate, { + sync: globIterateSync, +}); +export const sync = Object.assign(globSync, { + stream: globStreamSync, + iterate: globIterateSync, +}); +/* c8 ignore start */ +export { escape, unescape } from 'minimatch'; +export { Glob } from './glob.js'; +export { hasMagic } from './has-magic.js'; +/* c8 ignore stop */ +export const glob = Object.assign(glob_, { + glob: glob_, + globSync, + sync, + globStream, + stream, + globStreamSync, + streamSync, + globIterate, + iterate, + globIterateSync, + iterateSync, + Glob, + hasMagic, + escape, + unescape, +}); +glob.glob = glob; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/package.json new file mode 100644 index 0000000000000..5cc80943d565b --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/package.json @@ -0,0 +1,4 @@ +{ + "version": "10.3.3", + "type": "module" +} diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.js new file mode 100644 index 0000000000000..60aa415d92fd1 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.js @@ -0,0 +1,215 @@ +// this is just a very light wrapper around 2 arrays with an offset index +import { GLOBSTAR } from 'minimatch'; +const isPatternList = (pl) => pl.length >= 1; +const isGlobList = (gl) => gl.length >= 1; +/** + * An immutable-ish view on an array of glob parts and their parsed + * results + */ +export class Pattern { + #patternList; + #globList; + #index; + length; + #platform; + #rest; + #globString; + #isDrive; + #isUNC; + #isAbsolute; + #followGlobstar = true; + constructor(patternList, globList, index, platform) { + if (!isPatternList(patternList)) { + throw new TypeError('empty pattern list'); + } + if (!isGlobList(globList)) { + throw new TypeError('empty glob list'); + } + if (globList.length !== patternList.length) { + throw new TypeError('mismatched pattern list and glob list lengths'); + } + this.length = patternList.length; + if (index < 0 || index >= this.length) { + throw new TypeError('index out of range'); + } + this.#patternList = patternList; + this.#globList = globList; + this.#index = index; + this.#platform = platform; + // normalize root entries of absolute patterns on initial creation. + if (this.#index === 0) { + // c: => ['c:/'] + // C:/ => ['C:/'] + // C:/x => ['C:/', 'x'] + // //host/share => ['//host/share/'] + // //host/share/ => ['//host/share/'] + // //host/share/x => ['//host/share/', 'x'] + // /etc => ['/', 'etc'] + // / => ['/'] + if (this.isUNC()) { + // '' / '' / 'host' / 'share' + const [p0, p1, p2, p3, ...prest] = this.#patternList; + const [g0, g1, g2, g3, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = [p0, p1, p2, p3, ''].join('/'); + const g = [g0, g1, g2, g3, ''].join('/'); + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + else if (this.isDrive() || this.isAbsolute()) { + const [p1, ...prest] = this.#patternList; + const [g1, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = p1 + '/'; + const g = g1 + '/'; + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + } + } + /** + * The first entry in the parsed list of patterns + */ + pattern() { + return this.#patternList[this.#index]; + } + /** + * true of if pattern() returns a string + */ + isString() { + return typeof this.#patternList[this.#index] === 'string'; + } + /** + * true of if pattern() returns GLOBSTAR + */ + isGlobstar() { + return this.#patternList[this.#index] === GLOBSTAR; + } + /** + * true if pattern() returns a regexp + */ + isRegExp() { + return this.#patternList[this.#index] instanceof RegExp; + } + /** + * The /-joined set of glob parts that make up this pattern + */ + globString() { + return (this.#globString = + this.#globString || + (this.#index === 0 + ? this.isAbsolute() + ? this.#globList[0] + this.#globList.slice(1).join('/') + : this.#globList.join('/') + : this.#globList.slice(this.#index).join('/'))); + } + /** + * true if there are more pattern parts after this one + */ + hasMore() { + return this.length > this.#index + 1; + } + /** + * The rest of the pattern after this part, or null if this is the end + */ + rest() { + if (this.#rest !== undefined) + return this.#rest; + if (!this.hasMore()) + return (this.#rest = null); + this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform); + this.#rest.#isAbsolute = this.#isAbsolute; + this.#rest.#isUNC = this.#isUNC; + this.#rest.#isDrive = this.#isDrive; + return this.#rest; + } + /** + * true if the pattern represents a //unc/path/ on windows + */ + isUNC() { + const pl = this.#patternList; + return this.#isUNC !== undefined + ? this.#isUNC + : (this.#isUNC = + this.#platform === 'win32' && + this.#index === 0 && + pl[0] === '' && + pl[1] === '' && + typeof pl[2] === 'string' && + !!pl[2] && + typeof pl[3] === 'string' && + !!pl[3]); + } + // pattern like C:/... + // split = ['C:', ...] + // XXX: would be nice to handle patterns like `c:*` to test the cwd + // in c: for *, but I don't know of a way to even figure out what that + // cwd is without actually chdir'ing into it? + /** + * True if the pattern starts with a drive letter on Windows + */ + isDrive() { + const pl = this.#patternList; + return this.#isDrive !== undefined + ? this.#isDrive + : (this.#isDrive = + this.#platform === 'win32' && + this.#index === 0 && + this.length > 1 && + typeof pl[0] === 'string' && + /^[a-z]:$/i.test(pl[0])); + } + // pattern = '/' or '/...' or '/x/...' + // split = ['', ''] or ['', ...] or ['', 'x', ...] + // Drive and UNC both considered absolute on windows + /** + * True if the pattern is rooted on an absolute path + */ + isAbsolute() { + const pl = this.#patternList; + return this.#isAbsolute !== undefined + ? this.#isAbsolute + : (this.#isAbsolute = + (pl[0] === '' && pl.length > 1) || + this.isDrive() || + this.isUNC()); + } + /** + * consume the root of the pattern, and return it + */ + root() { + const p = this.#patternList[0]; + return typeof p === 'string' && this.isAbsolute() && this.#index === 0 + ? p + : ''; + } + /** + * Check to see if the current globstar pattern is allowed to follow + * a symbolic link. + */ + checkFollowGlobstar() { + return !(this.#index === 0 || + !this.isGlobstar() || + !this.#followGlobstar); + } + /** + * Mark that the current globstar pattern is following a symbolic link + */ + markFollowGlobstar() { + if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar) + return false; + this.#followGlobstar = false; + return true; + } +} +//# sourceMappingURL=pattern.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.js new file mode 100644 index 0000000000000..dd2228ad6761a --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.js @@ -0,0 +1,302 @@ +// synchronous utility for filtering entries and calculating subwalks +import { GLOBSTAR } from 'minimatch'; +/** + * A cache of which patterns have been processed for a given Path + */ +export class HasWalkedCache { + store; + constructor(store = new Map()) { + this.store = store; + } + copy() { + return new HasWalkedCache(new Map(this.store)); + } + hasWalked(target, pattern) { + return this.store.get(target.fullpath())?.has(pattern.globString()); + } + storeWalked(target, pattern) { + const fullpath = target.fullpath(); + const cached = this.store.get(fullpath); + if (cached) + cached.add(pattern.globString()); + else + this.store.set(fullpath, new Set([pattern.globString()])); + } +} +/** + * A record of which paths have been matched in a given walk step, + * and whether they only are considered a match if they are a directory, + * and whether their absolute or relative path should be returned. + */ +export class MatchRecord { + store = new Map(); + add(target, absolute, ifDir) { + const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0); + const current = this.store.get(target); + this.store.set(target, current === undefined ? n : n & current); + } + // match, absolute, ifdir + entries() { + return [...this.store.entries()].map(([path, n]) => [ + path, + !!(n & 2), + !!(n & 1), + ]); + } +} +/** + * A collection of patterns that must be processed in a subsequent step + * for a given path. + */ +export class SubWalks { + store = new Map(); + add(target, pattern) { + if (!target.canReaddir()) { + return; + } + const subs = this.store.get(target); + if (subs) { + if (!subs.find(p => p.globString() === pattern.globString())) { + subs.push(pattern); + } + } + else + this.store.set(target, [pattern]); + } + get(target) { + const subs = this.store.get(target); + /* c8 ignore start */ + if (!subs) { + throw new Error('attempting to walk unknown path'); + } + /* c8 ignore stop */ + return subs; + } + entries() { + return this.keys().map(k => [k, this.store.get(k)]); + } + keys() { + return [...this.store.keys()].filter(t => t.canReaddir()); + } +} +/** + * The class that processes patterns for a given path. + * + * Handles child entry filtering, and determining whether a path's + * directory contents must be read. + */ +export class Processor { + hasWalkedCache; + matches = new MatchRecord(); + subwalks = new SubWalks(); + patterns; + follow; + dot; + opts; + constructor(opts, hasWalkedCache) { + this.opts = opts; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.hasWalkedCache = hasWalkedCache + ? hasWalkedCache.copy() + : new HasWalkedCache(); + } + processPatterns(target, patterns) { + this.patterns = patterns; + const processingSet = patterns.map(p => [target, p]); + // map of paths to the magic-starting subwalks they need to walk + // first item in patterns is the filter + for (let [t, pattern] of processingSet) { + this.hasWalkedCache.storeWalked(t, pattern); + const root = pattern.root(); + const absolute = pattern.isAbsolute() && this.opts.absolute !== false; + // start absolute patterns at root + if (root) { + t = t.resolve(root === '/' && this.opts.root !== undefined + ? this.opts.root + : root); + const rest = pattern.rest(); + if (!rest) { + this.matches.add(t, true, false); + continue; + } + else { + pattern = rest; + } + } + if (t.isENOENT()) + continue; + let p; + let rest; + let changed = false; + while (typeof (p = pattern.pattern()) === 'string' && + (rest = pattern.rest())) { + const c = t.resolve(p); + // we can be reasonably sure that .. is a readable dir + if (c.isUnknown() && p !== '..') + break; + t = c; + pattern = rest; + changed = true; + } + p = pattern.pattern(); + rest = pattern.rest(); + if (changed) { + if (this.hasWalkedCache.hasWalked(t, pattern)) + continue; + this.hasWalkedCache.storeWalked(t, pattern); + } + // now we have either a final string for a known entry, + // more strings for an unknown entry, + // or a pattern starting with magic, mounted on t. + if (typeof p === 'string') { + // must be final entry + if (!rest) { + const ifDir = p === '..' || p === '' || p === '.'; + this.matches.add(t.resolve(p), absolute, ifDir); + } + else { + this.subwalks.add(t, pattern); + } + continue; + } + else if (p === GLOBSTAR) { + // if no rest, match and subwalk pattern + // if rest, process rest and subwalk pattern + // if it's a symlink, but we didn't get here by way of a + // globstar match (meaning it's the first time THIS globstar + // has traversed a symlink), then we follow it. Otherwise, stop. + if (!t.isSymbolicLink() || + this.follow || + pattern.checkFollowGlobstar()) { + this.subwalks.add(t, pattern); + } + const rp = rest?.pattern(); + const rrest = rest?.rest(); + if (!rest || ((rp === '' || rp === '.') && !rrest)) { + // only HAS to be a dir if it ends in **/ or **/. + // but ending in ** will match files as well. + this.matches.add(t, absolute, rp === '' || rp === '.'); + } + else { + if (rp === '..') { + // this would mean you're matching **/.. at the fs root, + // and no thanks, I'm not gonna test that specific case. + /* c8 ignore start */ + const tp = t.parent || t; + /* c8 ignore stop */ + if (!rrest) + this.matches.add(tp, absolute, true); + else if (!this.hasWalkedCache.hasWalked(tp, rrest)) { + this.subwalks.add(tp, rrest); + } + } + } + } + else if (p instanceof RegExp) { + this.subwalks.add(t, pattern); + } + } + return this; + } + subwalkTargets() { + return this.subwalks.keys(); + } + child() { + return new Processor(this.opts, this.hasWalkedCache); + } + // return a new Processor containing the subwalks for each + // child entry, and a set of matches, and + // a hasWalkedCache that's a copy of this one + // then we're going to call + filterEntries(parent, entries) { + const patterns = this.subwalks.get(parent); + // put matches and entry walks into the results processor + const results = this.child(); + for (const e of entries) { + for (const pattern of patterns) { + const absolute = pattern.isAbsolute(); + const p = pattern.pattern(); + const rest = pattern.rest(); + if (p === GLOBSTAR) { + results.testGlobstar(e, pattern, rest, absolute); + } + else if (p instanceof RegExp) { + results.testRegExp(e, p, rest, absolute); + } + else { + results.testString(e, p, rest, absolute); + } + } + } + return results; + } + testGlobstar(e, pattern, rest, absolute) { + if (this.dot || !e.name.startsWith('.')) { + if (!pattern.hasMore()) { + this.matches.add(e, absolute, false); + } + if (e.canReaddir()) { + // if we're in follow mode or it's not a symlink, just keep + // testing the same pattern. If there's more after the globstar, + // then this symlink consumes the globstar. If not, then we can + // follow at most ONE symlink along the way, so we mark it, which + // also checks to ensure that it wasn't already marked. + if (this.follow || !e.isSymbolicLink()) { + this.subwalks.add(e, pattern); + } + else if (e.isSymbolicLink()) { + if (rest && pattern.checkFollowGlobstar()) { + this.subwalks.add(e, rest); + } + else if (pattern.markFollowGlobstar()) { + this.subwalks.add(e, pattern); + } + } + } + } + // if the NEXT thing matches this entry, then also add + // the rest. + if (rest) { + const rp = rest.pattern(); + if (typeof rp === 'string' && + // dots and empty were handled already + rp !== '..' && + rp !== '' && + rp !== '.') { + this.testString(e, rp, rest.rest(), absolute); + } + else if (rp === '..') { + /* c8 ignore start */ + const ep = e.parent || e; + /* c8 ignore stop */ + this.subwalks.add(ep, rest); + } + else if (rp instanceof RegExp) { + this.testRegExp(e, rp, rest.rest(), absolute); + } + } + } + testRegExp(e, p, rest, absolute) { + if (!p.test(e.name)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } + testString(e, p, rest, absolute) { + // should never happen? + if (!e.isNamed(p)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } +} +//# sourceMappingURL=processor.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.js new file mode 100644 index 0000000000000..6f3358b0c39a3 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.js @@ -0,0 +1,352 @@ +/** + * Single-use utility classes to provide functionality to the {@link Glob} + * methods. + * + * @module + */ +import { Minipass } from 'minipass'; +import { Ignore } from './ignore.js'; +import { Processor } from './processor.js'; +const makeIgnore = (ignore, opts) => typeof ignore === 'string' + ? new Ignore([ignore], opts) + : Array.isArray(ignore) + ? new Ignore(ignore, opts) + : ignore; +/** + * basic walking utilities that all the glob walker types use + */ +export class GlobUtil { + path; + patterns; + opts; + seen = new Set(); + paused = false; + aborted = false; + #onResume = []; + #ignore; + #sep; + signal; + maxDepth; + constructor(patterns, path, opts) { + this.patterns = patterns; + this.path = path; + this.opts = opts; + this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/'; + if (opts.ignore) { + this.#ignore = makeIgnore(opts.ignore, opts); + } + // ignore, always set with maxDepth, but it's optional on the + // GlobOptions type + /* c8 ignore start */ + this.maxDepth = opts.maxDepth || Infinity; + /* c8 ignore stop */ + if (opts.signal) { + this.signal = opts.signal; + this.signal.addEventListener('abort', () => { + this.#onResume.length = 0; + }); + } + } + #ignored(path) { + return this.seen.has(path) || !!this.#ignore?.ignored?.(path); + } + #childrenIgnored(path) { + return !!this.#ignore?.childrenIgnored?.(path); + } + // backpressure mechanism + pause() { + this.paused = true; + } + resume() { + /* c8 ignore start */ + if (this.signal?.aborted) + return; + /* c8 ignore stop */ + this.paused = false; + let fn = undefined; + while (!this.paused && (fn = this.#onResume.shift())) { + fn(); + } + } + onResume(fn) { + if (this.signal?.aborted) + return; + /* c8 ignore start */ + if (!this.paused) { + fn(); + } + else { + /* c8 ignore stop */ + this.#onResume.push(fn); + } + } + // do the requisite realpath/stat checking, and return the path + // to add or undefined to filter it out. + async matchCheck(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || (await e.realpath()); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + return this.matchCheckTest(needStat ? await e.lstat() : e, ifDir); + } + matchCheckTest(e, ifDir) { + return e && + (this.maxDepth === Infinity || e.depth() <= this.maxDepth) && + (!ifDir || e.canReaddir()) && + (!this.opts.nodir || !e.isDirectory()) && + !this.#ignored(e) + ? e + : undefined; + } + matchCheckSync(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || e.realpathSync(); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + return this.matchCheckTest(needStat ? e.lstatSync() : e, ifDir); + } + matchFinish(e, absolute) { + if (this.#ignored(e)) + return; + const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute; + this.seen.add(e); + const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''; + // ok, we have what we need! + if (this.opts.withFileTypes) { + this.matchEmit(e); + } + else if (abs) { + const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath(); + this.matchEmit(abs + mark); + } + else { + const rel = this.opts.posix ? e.relativePosix() : e.relative(); + const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) + ? '.' + this.#sep + : ''; + this.matchEmit(!rel ? '.' + mark : pre + rel + mark); + } + } + async match(e, absolute, ifDir) { + const p = await this.matchCheck(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + matchSync(e, absolute, ifDir) { + const p = this.matchCheckSync(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + walkCB(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2(target, patterns, new Processor(this.opts), cb); + } + walkCB2(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const childrenCached = t.readdirCached(); + if (t.calledReaddir()) + this.walkCB3(t, childrenCached, processor, next); + else { + t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true); + } + } + next(); + } + walkCB3(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2(target, patterns, processor.child(), next); + } + next(); + } + walkCBSync(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2Sync(target, patterns, new Processor(this.opts), cb); + } + walkCB2Sync(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const children = t.readdirSync(); + this.walkCB3Sync(t, children, processor, next); + } + next(); + } + walkCB3Sync(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2Sync(target, patterns, processor.child(), next); + } + next(); + } +} +export class GlobWalker extends GlobUtil { + matches; + constructor(patterns, path, opts) { + super(patterns, path, opts); + this.matches = new Set(); + } + matchEmit(e) { + this.matches.add(e); + } + async walk() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + await this.path.lstat(); + } + await new Promise((res, rej) => { + this.walkCB(this.path, this.patterns, () => { + if (this.signal?.aborted) { + rej(this.signal.reason); + } + else { + res(this.matches); + } + }); + }); + return this.matches; + } + walkSync() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + // nothing for the callback to do, because this never pauses + this.walkCBSync(this.path, this.patterns, () => { + if (this.signal?.aborted) + throw this.signal.reason; + }); + return this.matches; + } +} +export class GlobStream extends GlobUtil { + results; + constructor(patterns, path, opts) { + super(patterns, path, opts); + this.results = new Minipass({ + signal: this.signal, + objectMode: true, + }); + this.results.on('drain', () => this.resume()); + this.results.on('resume', () => this.resume()); + } + matchEmit(e) { + this.results.write(e); + if (!this.results.flowing) + this.pause(); + } + stream() { + const target = this.path; + if (target.isUnknown()) { + target.lstat().then(() => { + this.walkCB(target, this.patterns, () => this.results.end()); + }); + } + else { + this.walkCB(target, this.patterns, () => this.results.end()); + } + return this.results; + } + streamSync() { + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + this.walkCBSync(this.path, this.patterns, () => this.results.end()); + return this.results; + } +} +//# sourceMappingURL=walker.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json new file mode 100644 index 0000000000000..2d25985d2bbb5 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json @@ -0,0 +1,98 @@ +{ + "author": "Isaac Z. Schlueter (https://blog.izs.me/)", + "name": "glob", + "description": "the most correct and second fastest glob implementation in JavaScript", + "version": "10.3.3", + "bin": "./dist/cjs/src/bin.js", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-glob.git" + }, + "main": "./dist/cjs/src/index.js", + "module": "./dist/mjs/index.js", + "types": "./dist/mjs/index.d.ts", + "exports": { + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/src/index.d.ts", + "default": "./dist/cjs/src/index.js" + } + } + }, + "files": [ + "dist" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts", + "prepublish": "npm run benchclean", + "profclean": "rm -f v8.log profile.txt", + "test-regen": "npm run profclean && TEST_REGEN=1 node --no-warnings --loader ts-node/esm test/00-setup.ts", + "prebench": "npm run prepare", + "bench": "bash benchmark.sh", + "preprof": "npm run prepare", + "prof": "bash prof.sh", + "benchclean": "node benchclean.js" + }, + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^2.0.3", + "minimatch": "^9.0.1", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", + "path-scurry": "^1.10.1" + }, + "devDependencies": { + "@types/node": "^20.3.2", + "@types/tap": "^15.0.7", + "c8": "^7.12.0", + "memfs": "^3.4.13", + "mkdirp": "^2.1.4", + "prettier": "^2.8.3", + "rimraf": "^4.1.3", + "tap": "^16.3.4", + "ts-node": "^10.9.1", + "typedoc": "^0.23.24", + "typescript": "^4.9.4" + }, + "tap": { + "before": "test/00-setup.ts", + "coverage": false, + "node-arg": [ + "--no-warnings", + "--loader", + "ts-node/esm" + ], + "ts": false + }, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } +} diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/LICENSE b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/LICENSE new file mode 100644 index 0000000000000..1493534e60dce --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/assert-valid-pattern.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/assert-valid-pattern.js new file mode 100644 index 0000000000000..5fc86bbd0116c --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/assert-valid-pattern.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.assertValidPattern = void 0; +const MAX_PATTERN_LENGTH = 1024 * 64; +const assertValidPattern = (pattern) => { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern'); + } + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long'); + } +}; +exports.assertValidPattern = assertValidPattern; +//# sourceMappingURL=assert-valid-pattern.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/ast.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/ast.js new file mode 100644 index 0000000000000..a98ae79b503b1 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/ast.js @@ -0,0 +1,589 @@ +"use strict"; +// parse a single path portion +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AST = void 0; +const brace_expressions_js_1 = require("./brace-expressions.js"); +const unescape_js_1 = require("./unescape.js"); +const types = new Set(['!', '?', '+', '*', '@']); +const isExtglobType = (c) => types.has(c); +// Patterns that get prepended to bind to the start of either the +// entire string, or just a single path portion, to prevent dots +// and/or traversal patterns, when needed. +// Exts don't need the ^ or / bit, because the root binds that already. +const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))'; +const startNoDot = '(?!\\.)'; +// characters that indicate a start of pattern needs the "no dots" bit, +// because a dot *might* be matched. ( is not in the list, because in +// the case of a child extglob, it will handle the prevention itself. +const addPatternStart = new Set(['[', '.']); +// cases where traversal is A-OK, no dot prevention needed +const justDots = new Set(['..', '.']); +const reSpecials = new Set('().*{}+?[]^$\\!'); +const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +// any single thing other than / +const qmark = '[^/]'; +// * => any number of characters +const star = qmark + '*?'; +// use + when we need to ensure that *something* matches, because the * is +// the only thing in the path portion. +const starNoEmpty = qmark + '+?'; +// remove the \ chars that we added if we end up doing a nonmagic compare +// const deslash = (s: string) => s.replace(/\\(.)/g, '$1') +class AST { + type; + #root; + #hasMagic; + #uflag = false; + #parts = []; + #parent; + #parentIndex; + #negs; + #filledNegs = false; + #options; + #toString; + // set to true if it's an extglob with no children + // (which really means one child of '') + #emptyExt = false; + constructor(type, parent, options = {}) { + this.type = type; + // extglobs are inherently magical + if (type) + this.#hasMagic = true; + this.#parent = parent; + this.#root = this.#parent ? this.#parent.#root : this; + this.#options = this.#root === this ? options : this.#root.#options; + this.#negs = this.#root === this ? [] : this.#root.#negs; + if (type === '!' && !this.#root.#filledNegs) + this.#negs.push(this); + this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0; + } + get hasMagic() { + /* c8 ignore start */ + if (this.#hasMagic !== undefined) + return this.#hasMagic; + /* c8 ignore stop */ + for (const p of this.#parts) { + if (typeof p === 'string') + continue; + if (p.type || p.hasMagic) + return (this.#hasMagic = true); + } + // note: will be undefined until we generate the regexp src and find out + return this.#hasMagic; + } + // reconstructs the pattern + toString() { + if (this.#toString !== undefined) + return this.#toString; + if (!this.type) { + return (this.#toString = this.#parts.map(p => String(p)).join('')); + } + else { + return (this.#toString = + this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')'); + } + } + #fillNegs() { + /* c8 ignore start */ + if (this !== this.#root) + throw new Error('should only call on root'); + if (this.#filledNegs) + return this; + /* c8 ignore stop */ + // call toString() once to fill this out + this.toString(); + this.#filledNegs = true; + let n; + while ((n = this.#negs.pop())) { + if (n.type !== '!') + continue; + // walk up the tree, appending everthing that comes AFTER parentIndex + let p = n; + let pp = p.#parent; + while (pp) { + for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) { + for (const part of n.#parts) { + /* c8 ignore start */ + if (typeof part === 'string') { + throw new Error('string part in extglob AST??'); + } + /* c8 ignore stop */ + part.copyIn(pp.#parts[i]); + } + } + p = pp; + pp = p.#parent; + } + } + return this; + } + push(...parts) { + for (const p of parts) { + if (p === '') + continue; + /* c8 ignore start */ + if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) { + throw new Error('invalid part: ' + p); + } + /* c8 ignore stop */ + this.#parts.push(p); + } + } + toJSON() { + const ret = this.type === null + ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON())) + : [this.type, ...this.#parts.map(p => p.toJSON())]; + if (this.isStart() && !this.type) + ret.unshift([]); + if (this.isEnd() && + (this === this.#root || + (this.#root.#filledNegs && this.#parent?.type === '!'))) { + ret.push({}); + } + return ret; + } + isStart() { + if (this.#root === this) + return true; + // if (this.type) return !!this.#parent?.isStart() + if (!this.#parent?.isStart()) + return false; + if (this.#parentIndex === 0) + return true; + // if everything AHEAD of this is a negation, then it's still the "start" + const p = this.#parent; + for (let i = 0; i < this.#parentIndex; i++) { + const pp = p.#parts[i]; + if (!(pp instanceof AST && pp.type === '!')) { + return false; + } + } + return true; + } + isEnd() { + if (this.#root === this) + return true; + if (this.#parent?.type === '!') + return true; + if (!this.#parent?.isEnd()) + return false; + if (!this.type) + return this.#parent?.isEnd(); + // if not root, it'll always have a parent + /* c8 ignore start */ + const pl = this.#parent ? this.#parent.#parts.length : 0; + /* c8 ignore stop */ + return this.#parentIndex === pl - 1; + } + copyIn(part) { + if (typeof part === 'string') + this.push(part); + else + this.push(part.clone(this)); + } + clone(parent) { + const c = new AST(this.type, parent); + for (const p of this.#parts) { + c.copyIn(p); + } + return c; + } + static #parseAST(str, ast, pos, opt) { + let escaping = false; + let inBrace = false; + let braceStart = -1; + let braceNeg = false; + if (ast.type === null) { + // outside of a extglob, append until we find a start + let i = pos; + let acc = ''; + while (i < str.length) { + const c = str.charAt(i++); + // still accumulate escapes at this point, but we do ignore + // starts that are escaped + if (escaping || c === '\\') { + escaping = !escaping; + acc += c; + continue; + } + if (inBrace) { + if (i === braceStart + 1) { + if (c === '^' || c === '!') { + braceNeg = true; + } + } + else if (c === ']' && !(i === braceStart + 2 && braceNeg)) { + inBrace = false; + } + acc += c; + continue; + } + else if (c === '[') { + inBrace = true; + braceStart = i; + braceNeg = false; + acc += c; + continue; + } + if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') { + ast.push(acc); + acc = ''; + const ext = new AST(c, ast); + i = AST.#parseAST(str, ext, i, opt); + ast.push(ext); + continue; + } + acc += c; + } + ast.push(acc); + return i; + } + // some kind of extglob, pos is at the ( + // find the next | or ) + let i = pos + 1; + let part = new AST(null, ast); + const parts = []; + let acc = ''; + while (i < str.length) { + const c = str.charAt(i++); + // still accumulate escapes at this point, but we do ignore + // starts that are escaped + if (escaping || c === '\\') { + escaping = !escaping; + acc += c; + continue; + } + if (inBrace) { + if (i === braceStart + 1) { + if (c === '^' || c === '!') { + braceNeg = true; + } + } + else if (c === ']' && !(i === braceStart + 2 && braceNeg)) { + inBrace = false; + } + acc += c; + continue; + } + else if (c === '[') { + inBrace = true; + braceStart = i; + braceNeg = false; + acc += c; + continue; + } + if (isExtglobType(c) && str.charAt(i) === '(') { + part.push(acc); + acc = ''; + const ext = new AST(c, part); + part.push(ext); + i = AST.#parseAST(str, ext, i, opt); + continue; + } + if (c === '|') { + part.push(acc); + acc = ''; + parts.push(part); + part = new AST(null, ast); + continue; + } + if (c === ')') { + if (acc === '' && ast.#parts.length === 0) { + ast.#emptyExt = true; + } + part.push(acc); + acc = ''; + ast.push(...parts, part); + return i; + } + acc += c; + } + // unfinished extglob + // if we got here, it was a malformed extglob! not an extglob, but + // maybe something else in there. + ast.type = null; + ast.#hasMagic = undefined; + ast.#parts = [str.substring(pos - 1)]; + return i; + } + static fromGlob(pattern, options = {}) { + const ast = new AST(null, undefined, options); + AST.#parseAST(pattern, ast, 0, options); + return ast; + } + // returns the regular expression if there's magic, or the unescaped + // string if not. + toMMPattern() { + // should only be called on root + /* c8 ignore start */ + if (this !== this.#root) + return this.#root.toMMPattern(); + /* c8 ignore stop */ + const glob = this.toString(); + const [re, body, hasMagic, uflag] = this.toRegExpSource(); + // if we're in nocase mode, and not nocaseMagicOnly, then we do + // still need a regular expression if we have to case-insensitively + // match capital/lowercase characters. + const anyMagic = hasMagic || + this.#hasMagic || + (this.#options.nocase && + !this.#options.nocaseMagicOnly && + glob.toUpperCase() !== glob.toLowerCase()); + if (!anyMagic) { + return body; + } + const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : ''); + return Object.assign(new RegExp(`^${re}$`, flags), { + _src: re, + _glob: glob, + }); + } + // returns the string match, the regexp source, whether there's magic + // in the regexp (so a regular expression is required) and whether or + // not the uflag is needed for the regular expression (for posix classes) + // TODO: instead of injecting the start/end at this point, just return + // the BODY of the regexp, along with the start/end portions suitable + // for binding the start/end in either a joined full-path makeRe context + // (where we bind to (^|/), or a standalone matchPart context (where + // we bind to ^, and not /). Otherwise slashes get duped! + // + // In part-matching mode, the start is: + // - if not isStart: nothing + // - if traversal possible, but not allowed: ^(?!\.\.?$) + // - if dots allowed or not possible: ^ + // - if dots possible and not allowed: ^(?!\.) + // end is: + // - if not isEnd(): nothing + // - else: $ + // + // In full-path matching mode, we put the slash at the START of the + // pattern, so start is: + // - if first pattern: same as part-matching mode + // - if not isStart(): nothing + // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/)) + // - if dots allowed or not possible: / + // - if dots possible and not allowed: /(?!\.) + // end is: + // - if last pattern, same as part-matching mode + // - else nothing + // + // Always put the (?:$|/) on negated tails, though, because that has to be + // there to bind the end of the negated pattern portion, and it's easier to + // just stick it in now rather than try to inject it later in the middle of + // the pattern. + // + // We can just always return the same end, and leave it up to the caller + // to know whether it's going to be used joined or in parts. + // And, if the start is adjusted slightly, can do the same there: + // - if not isStart: nothing + // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$) + // - if dots allowed or not possible: (?:/|^) + // - if dots possible and not allowed: (?:/|^)(?!\.) + // + // But it's better to have a simpler binding without a conditional, for + // performance, so probably better to return both start options. + // + // Then the caller just ignores the end if it's not the first pattern, + // and the start always gets applied. + // + // But that's always going to be $ if it's the ending pattern, or nothing, + // so the caller can just attach $ at the end of the pattern when building. + // + // So the todo is: + // - better detect what kind of start is needed + // - return both flavors of starting pattern + // - attach $ at the end of the pattern when creating the actual RegExp + // + // Ah, but wait, no, that all only applies to the root when the first pattern + // is not an extglob. If the first pattern IS an extglob, then we need all + // that dot prevention biz to live in the extglob portions, because eg + // +(*|.x*) can match .xy but not .yx. + // + // So, return the two flavors if it's #root and the first child is not an + // AST, otherwise leave it to the child AST to handle it, and there, + // use the (?:^|/) style of start binding. + // + // Even simplified further: + // - Since the start for a join is eg /(?!\.) and the start for a part + // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root + // or start or whatever) and prepend ^ or / at the Regexp construction. + toRegExpSource(allowDot) { + const dot = allowDot ?? !!this.#options.dot; + if (this.#root === this) + this.#fillNegs(); + if (!this.type) { + const noEmpty = this.isStart() && this.isEnd(); + const src = this.#parts + .map(p => { + const [re, _, hasMagic, uflag] = typeof p === 'string' + ? AST.#parseGlob(p, this.#hasMagic, noEmpty) + : p.toRegExpSource(allowDot); + this.#hasMagic = this.#hasMagic || hasMagic; + this.#uflag = this.#uflag || uflag; + return re; + }) + .join(''); + let start = ''; + if (this.isStart()) { + if (typeof this.#parts[0] === 'string') { + // this is the string that will match the start of the pattern, + // so we need to protect against dots and such. + // '.' and '..' cannot match unless the pattern is that exactly, + // even if it starts with . or dot:true is set. + const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]); + if (!dotTravAllowed) { + const aps = addPatternStart; + // check if we have a possibility of matching . or .., + // and prevent that. + const needNoTrav = + // dots are allowed, and the pattern starts with [ or . + (dot && aps.has(src.charAt(0))) || + // the pattern starts with \., and then [ or . + (src.startsWith('\\.') && aps.has(src.charAt(2))) || + // the pattern starts with \.\., and then [ or . + (src.startsWith('\\.\\.') && aps.has(src.charAt(4))); + // no need to prevent dots if it can't match a dot, or if a + // sub-pattern will be preventing it anyway. + const needNoDot = !dot && !allowDot && aps.has(src.charAt(0)); + start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : ''; + } + } + } + // append the "end of path portion" pattern to negation tails + let end = ''; + if (this.isEnd() && + this.#root.#filledNegs && + this.#parent?.type === '!') { + end = '(?:$|\\/)'; + } + const final = start + src + end; + return [ + final, + (0, unescape_js_1.unescape)(src), + (this.#hasMagic = !!this.#hasMagic), + this.#uflag, + ]; + } + // We need to calculate the body *twice* if it's a repeat pattern + // at the start, once in nodot mode, then again in dot mode, so a + // pattern like *(?) can match 'x.y' + const repeated = this.type === '*' || this.type === '+'; + // some kind of extglob + const start = this.type === '!' ? '(?:(?!(?:' : '(?:'; + let body = this.#partsToRegExp(dot); + if (this.isStart() && this.isEnd() && !body && this.type !== '!') { + // invalid extglob, has to at least be *something* present, if it's + // the entire path portion. + const s = this.toString(); + this.#parts = [s]; + this.type = null; + this.#hasMagic = undefined; + return [s, (0, unescape_js_1.unescape)(this.toString()), false, false]; + } + // XXX abstract out this map method + let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot + ? '' + : this.#partsToRegExp(true); + if (bodyDotAllowed === body) { + bodyDotAllowed = ''; + } + if (bodyDotAllowed) { + body = `(?:${body})(?:${bodyDotAllowed})*?`; + } + // an empty !() is exactly equivalent to a starNoEmpty + let final = ''; + if (this.type === '!' && this.#emptyExt) { + final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty; + } + else { + const close = this.type === '!' + ? // !() must match something,but !(x) can match '' + '))' + + (this.isStart() && !dot && !allowDot ? startNoDot : '') + + star + + ')' + : this.type === '@' + ? ')' + : this.type === '?' + ? ')?' + : this.type === '+' && bodyDotAllowed + ? ')' + : this.type === '*' && bodyDotAllowed + ? `)?` + : `)${this.type}`; + final = start + body + close; + } + return [ + final, + (0, unescape_js_1.unescape)(body), + (this.#hasMagic = !!this.#hasMagic), + this.#uflag, + ]; + } + #partsToRegExp(dot) { + return this.#parts + .map(p => { + // extglob ASTs should only contain parent ASTs + /* c8 ignore start */ + if (typeof p === 'string') { + throw new Error('string type in extglob ast??'); + } + /* c8 ignore stop */ + // can ignore hasMagic, because extglobs are already always magic + const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot); + this.#uflag = this.#uflag || uflag; + return re; + }) + .filter(p => !(this.isStart() && this.isEnd()) || !!p) + .join('|'); + } + static #parseGlob(glob, hasMagic, noEmpty = false) { + let escaping = false; + let re = ''; + let uflag = false; + for (let i = 0; i < glob.length; i++) { + const c = glob.charAt(i); + if (escaping) { + escaping = false; + re += (reSpecials.has(c) ? '\\' : '') + c; + continue; + } + if (c === '\\') { + if (i === glob.length - 1) { + re += '\\\\'; + } + else { + escaping = true; + } + continue; + } + if (c === '[') { + const [src, needUflag, consumed, magic] = (0, brace_expressions_js_1.parseClass)(glob, i); + if (consumed) { + re += src; + uflag = uflag || needUflag; + i += consumed - 1; + hasMagic = hasMagic || magic; + continue; + } + } + if (c === '*') { + if (noEmpty && glob === '*') + re += starNoEmpty; + else + re += star; + hasMagic = true; + continue; + } + if (c === '?') { + re += qmark; + hasMagic = true; + continue; + } + re += regExpEscape(c); + } + return [re, (0, unescape_js_1.unescape)(glob), !!hasMagic, uflag]; + } +} +exports.AST = AST; +//# sourceMappingURL=ast.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/brace-expressions.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/brace-expressions.js new file mode 100644 index 0000000000000..0e13eefc4cfee --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/brace-expressions.js @@ -0,0 +1,152 @@ +"use strict"; +// translate the various posix character classes into unicode properties +// this works across all unicode locales +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseClass = void 0; +// { : [, /u flag required, negated] +const posixClasses = { + '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true], + '[:alpha:]': ['\\p{L}\\p{Nl}', true], + '[:ascii:]': ['\\x' + '00-\\x' + '7f', false], + '[:blank:]': ['\\p{Zs}\\t', true], + '[:cntrl:]': ['\\p{Cc}', true], + '[:digit:]': ['\\p{Nd}', true], + '[:graph:]': ['\\p{Z}\\p{C}', true, true], + '[:lower:]': ['\\p{Ll}', true], + '[:print:]': ['\\p{C}', true], + '[:punct:]': ['\\p{P}', true], + '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true], + '[:upper:]': ['\\p{Lu}', true], + '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true], + '[:xdigit:]': ['A-Fa-f0-9', false], +}; +// only need to escape a few things inside of brace expressions +// escapes: [ \ ] - +const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&'); +// escape all regexp magic characters +const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +// everything has already been escaped, we just have to join +const rangesToString = (ranges) => ranges.join(''); +// takes a glob string at a posix brace expression, and returns +// an equivalent regular expression source, and boolean indicating +// whether the /u flag needs to be applied, and the number of chars +// consumed to parse the character class. +// This also removes out of order ranges, and returns ($.) if the +// entire class just no good. +const parseClass = (glob, position) => { + const pos = position; + /* c8 ignore start */ + if (glob.charAt(pos) !== '[') { + throw new Error('not in a brace expression'); + } + /* c8 ignore stop */ + const ranges = []; + const negs = []; + let i = pos + 1; + let sawStart = false; + let uflag = false; + let escaping = false; + let negate = false; + let endPos = pos; + let rangeStart = ''; + WHILE: while (i < glob.length) { + const c = glob.charAt(i); + if ((c === '!' || c === '^') && i === pos + 1) { + negate = true; + i++; + continue; + } + if (c === ']' && sawStart && !escaping) { + endPos = i + 1; + break; + } + sawStart = true; + if (c === '\\') { + if (!escaping) { + escaping = true; + i++; + continue; + } + // escaped \ char, fall through and treat like normal char + } + if (c === '[' && !escaping) { + // either a posix class, a collation equivalent, or just a [ + for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) { + if (glob.startsWith(cls, i)) { + // invalid, [a-[] is fine, but not [a-[:alpha]] + if (rangeStart) { + return ['$.', false, glob.length - pos, true]; + } + i += cls.length; + if (neg) + negs.push(unip); + else + ranges.push(unip); + uflag = uflag || u; + continue WHILE; + } + } + } + // now it's just a normal character, effectively + escaping = false; + if (rangeStart) { + // throw this range away if it's not valid, but others + // can still match. + if (c > rangeStart) { + ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c)); + } + else if (c === rangeStart) { + ranges.push(braceEscape(c)); + } + rangeStart = ''; + i++; + continue; + } + // now might be the start of a range. + // can be either c-d or c-] or c] or c] at this point + if (glob.startsWith('-]', i + 1)) { + ranges.push(braceEscape(c + '-')); + i += 2; + continue; + } + if (glob.startsWith('-', i + 1)) { + rangeStart = c; + i += 2; + continue; + } + // not the start of a range, just a single character + ranges.push(braceEscape(c)); + i++; + } + if (endPos < i) { + // didn't see the end of the class, not a valid class, + // but might still be valid as a literal match. + return ['', false, 0, false]; + } + // if we got no ranges and no negates, then we have a range that + // cannot possibly match anything, and that poisons the whole glob + if (!ranges.length && !negs.length) { + return ['$.', false, glob.length - pos, true]; + } + // if we got one positive range, and it's a single character, then that's + // not actually a magic pattern, it's just that one literal character. + // we should not treat that as "magic", we should just return the literal + // character. [_] is a perfectly valid way to escape glob magic chars. + if (negs.length === 0 && + ranges.length === 1 && + /^\\?.$/.test(ranges[0]) && + !negate) { + const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0]; + return [regexpEscape(r), false, endPos - pos, false]; + } + const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']'; + const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']'; + const comb = ranges.length && negs.length + ? '(' + sranges + '|' + snegs + ')' + : ranges.length + ? sranges + : snegs; + return [comb, uflag, endPos - pos, true]; +}; +exports.parseClass = parseClass; +//# sourceMappingURL=brace-expressions.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/escape.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/escape.js new file mode 100644 index 0000000000000..02a4f8a8e0a58 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/escape.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.escape = void 0; +/** + * Escape all magic characters in a glob pattern. + * + * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape} + * option is used, then characters are escaped by wrapping in `[]`, because + * a magic character wrapped in a character class can only be satisfied by + * that exact character. In this mode, `\` is _not_ escaped, because it is + * not interpreted as a magic character, but instead as a path separator. + */ +const escape = (s, { windowsPathsNoEscape = false, } = {}) => { + // don't need to escape +@! because we escape the parens + // that make those magic, and escaping ! as [!] isn't valid, + // because [!]] is a valid glob class meaning not ']'. + return windowsPathsNoEscape + ? s.replace(/[?*()[\]]/g, '[$&]') + : s.replace(/[?*()[\]\\]/g, '\\$&'); +}; +exports.escape = escape; +//# sourceMappingURL=escape.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/index.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/index.js new file mode 100644 index 0000000000000..d70e681fef5d7 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/index.js @@ -0,0 +1,1011 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.unescape = exports.escape = exports.AST = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0; +const brace_expansion_1 = __importDefault(require("brace-expansion")); +const assert_valid_pattern_js_1 = require("./assert-valid-pattern.js"); +const ast_js_1 = require("./ast.js"); +const escape_js_1 = require("./escape.js"); +const unescape_js_1 = require("./unescape.js"); +const minimatch = (p, pattern, options = {}) => { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false; + } + return new Minimatch(pattern, options).match(p); +}; +exports.minimatch = minimatch; +// Optimized checking for the most common glob patterns. +const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/; +const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext); +const starDotExtTestDot = (ext) => (f) => f.endsWith(ext); +const starDotExtTestNocase = (ext) => { + ext = ext.toLowerCase(); + return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext); +}; +const starDotExtTestNocaseDot = (ext) => { + ext = ext.toLowerCase(); + return (f) => f.toLowerCase().endsWith(ext); +}; +const starDotStarRE = /^\*+\.\*+$/; +const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.'); +const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.'); +const dotStarRE = /^\.\*+$/; +const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.'); +const starRE = /^\*+$/; +const starTest = (f) => f.length !== 0 && !f.startsWith('.'); +const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..'; +const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/; +const qmarksTestNocase = ([$0, ext = '']) => { + const noext = qmarksTestNoExt([$0]); + if (!ext) + return noext; + ext = ext.toLowerCase(); + return (f) => noext(f) && f.toLowerCase().endsWith(ext); +}; +const qmarksTestNocaseDot = ([$0, ext = '']) => { + const noext = qmarksTestNoExtDot([$0]); + if (!ext) + return noext; + ext = ext.toLowerCase(); + return (f) => noext(f) && f.toLowerCase().endsWith(ext); +}; +const qmarksTestDot = ([$0, ext = '']) => { + const noext = qmarksTestNoExtDot([$0]); + return !ext ? noext : (f) => noext(f) && f.endsWith(ext); +}; +const qmarksTest = ([$0, ext = '']) => { + const noext = qmarksTestNoExt([$0]); + return !ext ? noext : (f) => noext(f) && f.endsWith(ext); +}; +const qmarksTestNoExt = ([$0]) => { + const len = $0.length; + return (f) => f.length === len && !f.startsWith('.'); +}; +const qmarksTestNoExtDot = ([$0]) => { + const len = $0.length; + return (f) => f.length === len && f !== '.' && f !== '..'; +}; +/* c8 ignore start */ +const defaultPlatform = (typeof process === 'object' && process + ? (typeof process.env === 'object' && + process.env && + process.env.__MINIMATCH_TESTING_PLATFORM__) || + process.platform + : 'posix'); +const path = { + win32: { sep: '\\' }, + posix: { sep: '/' }, +}; +/* c8 ignore stop */ +exports.sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep; +exports.minimatch.sep = exports.sep; +exports.GLOBSTAR = Symbol('globstar **'); +exports.minimatch.GLOBSTAR = exports.GLOBSTAR; +// any single thing other than / +// don't need to escape / when using new RegExp() +const qmark = '[^/]'; +// * => any number of characters +const star = qmark + '*?'; +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?'; +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?'; +const filter = (pattern, options = {}) => (p) => (0, exports.minimatch)(p, pattern, options); +exports.filter = filter; +exports.minimatch.filter = exports.filter; +const ext = (a, b = {}) => Object.assign({}, a, b); +const defaults = (def) => { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return exports.minimatch; + } + const orig = exports.minimatch; + const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options)); + return Object.assign(m, { + Minimatch: class Minimatch extends orig.Minimatch { + constructor(pattern, options = {}) { + super(pattern, ext(def, options)); + } + static defaults(options) { + return orig.defaults(ext(def, options)).Minimatch; + } + }, + AST: class AST extends orig.AST { + /* c8 ignore start */ + constructor(type, parent, options = {}) { + super(type, parent, ext(def, options)); + } + /* c8 ignore stop */ + static fromGlob(pattern, options = {}) { + return orig.AST.fromGlob(pattern, ext(def, options)); + } + }, + unescape: (s, options = {}) => orig.unescape(s, ext(def, options)), + escape: (s, options = {}) => orig.escape(s, ext(def, options)), + filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)), + defaults: (options) => orig.defaults(ext(def, options)), + makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)), + braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)), + match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)), + sep: orig.sep, + GLOBSTAR: exports.GLOBSTAR, + }); +}; +exports.defaults = defaults; +exports.minimatch.defaults = exports.defaults; +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +const braceExpand = (pattern, options = {}) => { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + // Thanks to Yeting Li for + // improving this regexp to avoid a ReDOS vulnerability. + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern]; + } + return (0, brace_expansion_1.default)(pattern); +}; +exports.braceExpand = braceExpand; +exports.minimatch.braceExpand = exports.braceExpand; +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe(); +exports.makeRe = makeRe; +exports.minimatch.makeRe = exports.makeRe; +const match = (list, pattern, options = {}) => { + const mm = new Minimatch(pattern, options); + list = list.filter(f => mm.match(f)); + if (mm.options.nonull && !list.length) { + list.push(pattern); + } + return list; +}; +exports.match = match; +exports.minimatch.match = exports.match; +// replace stuff like \* with * +const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/; +const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +class Minimatch { + options; + set; + pattern; + windowsPathsNoEscape; + nonegate; + negate; + comment; + empty; + preserveMultipleSlashes; + partial; + globSet; + globParts; + nocase; + isWindows; + platform; + windowsNoMagicRoot; + regexp; + constructor(pattern, options = {}) { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + options = options || {}; + this.options = options; + this.pattern = pattern; + this.platform = options.platform || defaultPlatform; + this.isWindows = this.platform === 'win32'; + this.windowsPathsNoEscape = + !!options.windowsPathsNoEscape || options.allowWindowsEscape === false; + if (this.windowsPathsNoEscape) { + this.pattern = this.pattern.replace(/\\/g, '/'); + } + this.preserveMultipleSlashes = !!options.preserveMultipleSlashes; + this.regexp = null; + this.negate = false; + this.nonegate = !!options.nonegate; + this.comment = false; + this.empty = false; + this.partial = !!options.partial; + this.nocase = !!this.options.nocase; + this.windowsNoMagicRoot = + options.windowsNoMagicRoot !== undefined + ? options.windowsNoMagicRoot + : !!(this.isWindows && this.nocase); + this.globSet = []; + this.globParts = []; + this.set = []; + // make the set of regexps etc. + this.make(); + } + hasMagic() { + if (this.options.magicalBraces && this.set.length > 1) { + return true; + } + for (const pattern of this.set) { + for (const part of pattern) { + if (typeof part !== 'string') + return true; + } + } + return false; + } + debug(..._) { } + make() { + const pattern = this.pattern; + const options = this.options; + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true; + return; + } + if (!pattern) { + this.empty = true; + return; + } + // step 1: figure out negation, etc. + this.parseNegate(); + // step 2: expand braces + this.globSet = [...new Set(this.braceExpand())]; + if (options.debug) { + this.debug = (...args) => console.error(...args); + } + this.debug(this.pattern, this.globSet); + // step 3: now we have a set, so turn each one into a series of + // path-portion matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + // + // First, we preprocess to make the glob pattern sets a bit simpler + // and deduped. There are some perf-killing patterns that can cause + // problems with a glob walk, but we can simplify them down a bit. + const rawGlobParts = this.globSet.map(s => this.slashSplit(s)); + this.globParts = this.preprocess(rawGlobParts); + this.debug(this.pattern, this.globParts); + // glob --> regexps + let set = this.globParts.map((s, _, __) => { + if (this.isWindows && this.windowsNoMagicRoot) { + // check if it's a drive or unc path. + const isUNC = s[0] === '' && + s[1] === '' && + (s[2] === '?' || !globMagic.test(s[2])) && + !globMagic.test(s[3]); + const isDrive = /^[a-z]:/i.test(s[0]); + if (isUNC) { + return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))]; + } + else if (isDrive) { + return [s[0], ...s.slice(1).map(ss => this.parse(ss))]; + } + } + return s.map(ss => this.parse(ss)); + }); + this.debug(this.pattern, set); + // filter out everything that didn't compile properly. + this.set = set.filter(s => s.indexOf(false) === -1); + // do not treat the ? in UNC paths as magic + if (this.isWindows) { + for (let i = 0; i < this.set.length; i++) { + const p = this.set[i]; + if (p[0] === '' && + p[1] === '' && + this.globParts[i][2] === '?' && + typeof p[3] === 'string' && + /^[a-z]:$/i.test(p[3])) { + p[2] = '?'; + } + } + } + this.debug(this.pattern, this.set); + } + // various transforms to equivalent pattern sets that are + // faster to process in a filesystem walk. The goal is to + // eliminate what we can, and push all ** patterns as far + // to the right as possible, even if it increases the number + // of patterns that we have to process. + preprocess(globParts) { + // if we're not in globstar mode, then turn all ** into * + if (this.options.noglobstar) { + for (let i = 0; i < globParts.length; i++) { + for (let j = 0; j < globParts[i].length; j++) { + if (globParts[i][j] === '**') { + globParts[i][j] = '*'; + } + } + } + } + const { optimizationLevel = 1 } = this.options; + if (optimizationLevel >= 2) { + // aggressive optimization for the purpose of fs walking + globParts = this.firstPhasePreProcess(globParts); + globParts = this.secondPhasePreProcess(globParts); + } + else if (optimizationLevel >= 1) { + // just basic optimizations to remove some .. parts + globParts = this.levelOneOptimize(globParts); + } + else { + globParts = this.adjascentGlobstarOptimize(globParts); + } + return globParts; + } + // just get rid of adjascent ** portions + adjascentGlobstarOptimize(globParts) { + return globParts.map(parts => { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let i = gs; + while (parts[i + 1] === '**') { + i++; + } + if (i !== gs) { + parts.splice(gs, i - gs); + } + } + return parts; + }); + } + // get rid of adjascent ** and resolve .. portions + levelOneOptimize(globParts) { + return globParts.map(parts => { + parts = parts.reduce((set, part) => { + const prev = set[set.length - 1]; + if (part === '**' && prev === '**') { + return set; + } + if (part === '..') { + if (prev && prev !== '..' && prev !== '.' && prev !== '**') { + set.pop(); + return set; + } + } + set.push(part); + return set; + }, []); + return parts.length === 0 ? [''] : parts; + }); + } + levelTwoFileOptimize(parts) { + if (!Array.isArray(parts)) { + parts = this.slashSplit(parts); + } + let didSomething = false; + do { + didSomething = false; + //
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (!matched)
+                    continue;
+                globParts[i] = matched;
+                globParts[j] = [];
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === exports.GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return (0, exports.braceExpand)(this.pattern, this.options);
+    }
+    parse(pattern) {
+        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return exports.GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = ast_js_1.AST.fromGlob(pattern, this.options).toMMPattern();
+        return fastTest ? Object.assign(re, { test: fastTest }) : re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === exports.GLOBSTAR
+                        ? exports.GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== exports.GLOBSTAR || prev === exports.GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== exports.GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== exports.GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = exports.GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== exports.GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return exports.minimatch.defaults(def).Minimatch;
+    }
+}
+exports.Minimatch = Minimatch;
+/* c8 ignore start */
+var ast_js_2 = require("./ast.js");
+Object.defineProperty(exports, "AST", { enumerable: true, get: function () { return ast_js_2.AST; } });
+var escape_js_2 = require("./escape.js");
+Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return escape_js_2.escape; } });
+var unescape_js_2 = require("./unescape.js");
+Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return unescape_js_2.unescape; } });
+/* c8 ignore stop */
+exports.minimatch.AST = ast_js_1.AST;
+exports.minimatch.Minimatch = Minimatch;
+exports.minimatch.escape = escape_js_1.escape;
+exports.minimatch.unescape = unescape_js_1.unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/unescape.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/unescape.js
new file mode 100644
index 0000000000000..47c36bcee5a02
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/unescape.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.unescape = void 0;
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+exports.unescape = unescape;
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/assert-valid-pattern.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/assert-valid-pattern.js
new file mode 100644
index 0000000000000..7b534fc30200b
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/assert-valid-pattern.js
@@ -0,0 +1,10 @@
+const MAX_PATTERN_LENGTH = 1024 * 64;
+export const assertValidPattern = (pattern) => {
+    if (typeof pattern !== 'string') {
+        throw new TypeError('invalid pattern');
+    }
+    if (pattern.length > MAX_PATTERN_LENGTH) {
+        throw new TypeError('pattern is too long');
+    }
+};
+//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/ast.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/ast.js
new file mode 100644
index 0000000000000..9f9835e06a7d5
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/ast.js
@@ -0,0 +1,585 @@
+// parse a single path portion
+import { parseClass } from './brace-expressions.js';
+import { unescape } from './unescape.js';
+const types = new Set(['!', '?', '+', '*', '@']);
+const isExtglobType = (c) => types.has(c);
+// Patterns that get prepended to bind to the start of either the
+// entire string, or just a single path portion, to prevent dots
+// and/or traversal patterns, when needed.
+// Exts don't need the ^ or / bit, because the root binds that already.
+const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
+const startNoDot = '(?!\\.)';
+// characters that indicate a start of pattern needs the "no dots" bit,
+// because a dot *might* be matched. ( is not in the list, because in
+// the case of a child extglob, it will handle the prevention itself.
+const addPatternStart = new Set(['[', '.']);
+// cases where traversal is A-OK, no dot prevention needed
+const justDots = new Set(['..', '.']);
+const reSpecials = new Set('().*{}+?[]^$\\!');
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// any single thing other than /
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// use + when we need to ensure that *something* matches, because the * is
+// the only thing in the path portion.
+const starNoEmpty = qmark + '+?';
+// remove the \ chars that we added if we end up doing a nonmagic compare
+// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
+export class AST {
+    type;
+    #root;
+    #hasMagic;
+    #uflag = false;
+    #parts = [];
+    #parent;
+    #parentIndex;
+    #negs;
+    #filledNegs = false;
+    #options;
+    #toString;
+    // set to true if it's an extglob with no children
+    // (which really means one child of '')
+    #emptyExt = false;
+    constructor(type, parent, options = {}) {
+        this.type = type;
+        // extglobs are inherently magical
+        if (type)
+            this.#hasMagic = true;
+        this.#parent = parent;
+        this.#root = this.#parent ? this.#parent.#root : this;
+        this.#options = this.#root === this ? options : this.#root.#options;
+        this.#negs = this.#root === this ? [] : this.#root.#negs;
+        if (type === '!' && !this.#root.#filledNegs)
+            this.#negs.push(this);
+        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
+    }
+    get hasMagic() {
+        /* c8 ignore start */
+        if (this.#hasMagic !== undefined)
+            return this.#hasMagic;
+        /* c8 ignore stop */
+        for (const p of this.#parts) {
+            if (typeof p === 'string')
+                continue;
+            if (p.type || p.hasMagic)
+                return (this.#hasMagic = true);
+        }
+        // note: will be undefined until we generate the regexp src and find out
+        return this.#hasMagic;
+    }
+    // reconstructs the pattern
+    toString() {
+        if (this.#toString !== undefined)
+            return this.#toString;
+        if (!this.type) {
+            return (this.#toString = this.#parts.map(p => String(p)).join(''));
+        }
+        else {
+            return (this.#toString =
+                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
+        }
+    }
+    #fillNegs() {
+        /* c8 ignore start */
+        if (this !== this.#root)
+            throw new Error('should only call on root');
+        if (this.#filledNegs)
+            return this;
+        /* c8 ignore stop */
+        // call toString() once to fill this out
+        this.toString();
+        this.#filledNegs = true;
+        let n;
+        while ((n = this.#negs.pop())) {
+            if (n.type !== '!')
+                continue;
+            // walk up the tree, appending everthing that comes AFTER parentIndex
+            let p = n;
+            let pp = p.#parent;
+            while (pp) {
+                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
+                    for (const part of n.#parts) {
+                        /* c8 ignore start */
+                        if (typeof part === 'string') {
+                            throw new Error('string part in extglob AST??');
+                        }
+                        /* c8 ignore stop */
+                        part.copyIn(pp.#parts[i]);
+                    }
+                }
+                p = pp;
+                pp = p.#parent;
+            }
+        }
+        return this;
+    }
+    push(...parts) {
+        for (const p of parts) {
+            if (p === '')
+                continue;
+            /* c8 ignore start */
+            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
+                throw new Error('invalid part: ' + p);
+            }
+            /* c8 ignore stop */
+            this.#parts.push(p);
+        }
+    }
+    toJSON() {
+        const ret = this.type === null
+            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
+            : [this.type, ...this.#parts.map(p => p.toJSON())];
+        if (this.isStart() && !this.type)
+            ret.unshift([]);
+        if (this.isEnd() &&
+            (this === this.#root ||
+                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
+            ret.push({});
+        }
+        return ret;
+    }
+    isStart() {
+        if (this.#root === this)
+            return true;
+        // if (this.type) return !!this.#parent?.isStart()
+        if (!this.#parent?.isStart())
+            return false;
+        if (this.#parentIndex === 0)
+            return true;
+        // if everything AHEAD of this is a negation, then it's still the "start"
+        const p = this.#parent;
+        for (let i = 0; i < this.#parentIndex; i++) {
+            const pp = p.#parts[i];
+            if (!(pp instanceof AST && pp.type === '!')) {
+                return false;
+            }
+        }
+        return true;
+    }
+    isEnd() {
+        if (this.#root === this)
+            return true;
+        if (this.#parent?.type === '!')
+            return true;
+        if (!this.#parent?.isEnd())
+            return false;
+        if (!this.type)
+            return this.#parent?.isEnd();
+        // if not root, it'll always have a parent
+        /* c8 ignore start */
+        const pl = this.#parent ? this.#parent.#parts.length : 0;
+        /* c8 ignore stop */
+        return this.#parentIndex === pl - 1;
+    }
+    copyIn(part) {
+        if (typeof part === 'string')
+            this.push(part);
+        else
+            this.push(part.clone(this));
+    }
+    clone(parent) {
+        const c = new AST(this.type, parent);
+        for (const p of this.#parts) {
+            c.copyIn(p);
+        }
+        return c;
+    }
+    static #parseAST(str, ast, pos, opt) {
+        let escaping = false;
+        let inBrace = false;
+        let braceStart = -1;
+        let braceNeg = false;
+        if (ast.type === null) {
+            // outside of a extglob, append until we find a start
+            let i = pos;
+            let acc = '';
+            while (i < str.length) {
+                const c = str.charAt(i++);
+                // still accumulate escapes at this point, but we do ignore
+                // starts that are escaped
+                if (escaping || c === '\\') {
+                    escaping = !escaping;
+                    acc += c;
+                    continue;
+                }
+                if (inBrace) {
+                    if (i === braceStart + 1) {
+                        if (c === '^' || c === '!') {
+                            braceNeg = true;
+                        }
+                    }
+                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                        inBrace = false;
+                    }
+                    acc += c;
+                    continue;
+                }
+                else if (c === '[') {
+                    inBrace = true;
+                    braceStart = i;
+                    braceNeg = false;
+                    acc += c;
+                    continue;
+                }
+                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
+                    ast.push(acc);
+                    acc = '';
+                    const ext = new AST(c, ast);
+                    i = AST.#parseAST(str, ext, i, opt);
+                    ast.push(ext);
+                    continue;
+                }
+                acc += c;
+            }
+            ast.push(acc);
+            return i;
+        }
+        // some kind of extglob, pos is at the (
+        // find the next | or )
+        let i = pos + 1;
+        let part = new AST(null, ast);
+        const parts = [];
+        let acc = '';
+        while (i < str.length) {
+            const c = str.charAt(i++);
+            // still accumulate escapes at this point, but we do ignore
+            // starts that are escaped
+            if (escaping || c === '\\') {
+                escaping = !escaping;
+                acc += c;
+                continue;
+            }
+            if (inBrace) {
+                if (i === braceStart + 1) {
+                    if (c === '^' || c === '!') {
+                        braceNeg = true;
+                    }
+                }
+                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                    inBrace = false;
+                }
+                acc += c;
+                continue;
+            }
+            else if (c === '[') {
+                inBrace = true;
+                braceStart = i;
+                braceNeg = false;
+                acc += c;
+                continue;
+            }
+            if (isExtglobType(c) && str.charAt(i) === '(') {
+                part.push(acc);
+                acc = '';
+                const ext = new AST(c, part);
+                part.push(ext);
+                i = AST.#parseAST(str, ext, i, opt);
+                continue;
+            }
+            if (c === '|') {
+                part.push(acc);
+                acc = '';
+                parts.push(part);
+                part = new AST(null, ast);
+                continue;
+            }
+            if (c === ')') {
+                if (acc === '' && ast.#parts.length === 0) {
+                    ast.#emptyExt = true;
+                }
+                part.push(acc);
+                acc = '';
+                ast.push(...parts, part);
+                return i;
+            }
+            acc += c;
+        }
+        // unfinished extglob
+        // if we got here, it was a malformed extglob! not an extglob, but
+        // maybe something else in there.
+        ast.type = null;
+        ast.#hasMagic = undefined;
+        ast.#parts = [str.substring(pos - 1)];
+        return i;
+    }
+    static fromGlob(pattern, options = {}) {
+        const ast = new AST(null, undefined, options);
+        AST.#parseAST(pattern, ast, 0, options);
+        return ast;
+    }
+    // returns the regular expression if there's magic, or the unescaped
+    // string if not.
+    toMMPattern() {
+        // should only be called on root
+        /* c8 ignore start */
+        if (this !== this.#root)
+            return this.#root.toMMPattern();
+        /* c8 ignore stop */
+        const glob = this.toString();
+        const [re, body, hasMagic, uflag] = this.toRegExpSource();
+        // if we're in nocase mode, and not nocaseMagicOnly, then we do
+        // still need a regular expression if we have to case-insensitively
+        // match capital/lowercase characters.
+        const anyMagic = hasMagic ||
+            this.#hasMagic ||
+            (this.#options.nocase &&
+                !this.#options.nocaseMagicOnly &&
+                glob.toUpperCase() !== glob.toLowerCase());
+        if (!anyMagic) {
+            return body;
+        }
+        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
+        return Object.assign(new RegExp(`^${re}$`, flags), {
+            _src: re,
+            _glob: glob,
+        });
+    }
+    // returns the string match, the regexp source, whether there's magic
+    // in the regexp (so a regular expression is required) and whether or
+    // not the uflag is needed for the regular expression (for posix classes)
+    // TODO: instead of injecting the start/end at this point, just return
+    // the BODY of the regexp, along with the start/end portions suitable
+    // for binding the start/end in either a joined full-path makeRe context
+    // (where we bind to (^|/), or a standalone matchPart context (where
+    // we bind to ^, and not /).  Otherwise slashes get duped!
+    //
+    // In part-matching mode, the start is:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: ^(?!\.\.?$)
+    // - if dots allowed or not possible: ^
+    // - if dots possible and not allowed: ^(?!\.)
+    // end is:
+    // - if not isEnd(): nothing
+    // - else: $
+    //
+    // In full-path matching mode, we put the slash at the START of the
+    // pattern, so start is:
+    // - if first pattern: same as part-matching mode
+    // - if not isStart(): nothing
+    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
+    // - if dots allowed or not possible: /
+    // - if dots possible and not allowed: /(?!\.)
+    // end is:
+    // - if last pattern, same as part-matching mode
+    // - else nothing
+    //
+    // Always put the (?:$|/) on negated tails, though, because that has to be
+    // there to bind the end of the negated pattern portion, and it's easier to
+    // just stick it in now rather than try to inject it later in the middle of
+    // the pattern.
+    //
+    // We can just always return the same end, and leave it up to the caller
+    // to know whether it's going to be used joined or in parts.
+    // And, if the start is adjusted slightly, can do the same there:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
+    // - if dots allowed or not possible: (?:/|^)
+    // - if dots possible and not allowed: (?:/|^)(?!\.)
+    //
+    // But it's better to have a simpler binding without a conditional, for
+    // performance, so probably better to return both start options.
+    //
+    // Then the caller just ignores the end if it's not the first pattern,
+    // and the start always gets applied.
+    //
+    // But that's always going to be $ if it's the ending pattern, or nothing,
+    // so the caller can just attach $ at the end of the pattern when building.
+    //
+    // So the todo is:
+    // - better detect what kind of start is needed
+    // - return both flavors of starting pattern
+    // - attach $ at the end of the pattern when creating the actual RegExp
+    //
+    // Ah, but wait, no, that all only applies to the root when the first pattern
+    // is not an extglob. If the first pattern IS an extglob, then we need all
+    // that dot prevention biz to live in the extglob portions, because eg
+    // +(*|.x*) can match .xy but not .yx.
+    //
+    // So, return the two flavors if it's #root and the first child is not an
+    // AST, otherwise leave it to the child AST to handle it, and there,
+    // use the (?:^|/) style of start binding.
+    //
+    // Even simplified further:
+    // - Since the start for a join is eg /(?!\.) and the start for a part
+    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
+    // or start or whatever) and prepend ^ or / at the Regexp construction.
+    toRegExpSource(allowDot) {
+        const dot = allowDot ?? !!this.#options.dot;
+        if (this.#root === this)
+            this.#fillNegs();
+        if (!this.type) {
+            const noEmpty = this.isStart() && this.isEnd();
+            const src = this.#parts
+                .map(p => {
+                const [re, _, hasMagic, uflag] = typeof p === 'string'
+                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
+                    : p.toRegExpSource(allowDot);
+                this.#hasMagic = this.#hasMagic || hasMagic;
+                this.#uflag = this.#uflag || uflag;
+                return re;
+            })
+                .join('');
+            let start = '';
+            if (this.isStart()) {
+                if (typeof this.#parts[0] === 'string') {
+                    // this is the string that will match the start of the pattern,
+                    // so we need to protect against dots and such.
+                    // '.' and '..' cannot match unless the pattern is that exactly,
+                    // even if it starts with . or dot:true is set.
+                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
+                    if (!dotTravAllowed) {
+                        const aps = addPatternStart;
+                        // check if we have a possibility of matching . or ..,
+                        // and prevent that.
+                        const needNoTrav = 
+                        // dots are allowed, and the pattern starts with [ or .
+                        (dot && aps.has(src.charAt(0))) ||
+                            // the pattern starts with \., and then [ or .
+                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
+                            // the pattern starts with \.\., and then [ or .
+                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
+                        // no need to prevent dots if it can't match a dot, or if a
+                        // sub-pattern will be preventing it anyway.
+                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
+                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
+                    }
+                }
+            }
+            // append the "end of path portion" pattern to negation tails
+            let end = '';
+            if (this.isEnd() &&
+                this.#root.#filledNegs &&
+                this.#parent?.type === '!') {
+                end = '(?:$|\\/)';
+            }
+            const final = start + src + end;
+            return [
+                final,
+                unescape(src),
+                (this.#hasMagic = !!this.#hasMagic),
+                this.#uflag,
+            ];
+        }
+        // We need to calculate the body *twice* if it's a repeat pattern
+        // at the start, once in nodot mode, then again in dot mode, so a
+        // pattern like *(?) can match 'x.y'
+        const repeated = this.type === '*' || this.type === '+';
+        // some kind of extglob
+        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
+        let body = this.#partsToRegExp(dot);
+        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
+            // invalid extglob, has to at least be *something* present, if it's
+            // the entire path portion.
+            const s = this.toString();
+            this.#parts = [s];
+            this.type = null;
+            this.#hasMagic = undefined;
+            return [s, unescape(this.toString()), false, false];
+        }
+        // XXX abstract out this map method
+        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
+            ? ''
+            : this.#partsToRegExp(true);
+        if (bodyDotAllowed === body) {
+            bodyDotAllowed = '';
+        }
+        if (bodyDotAllowed) {
+            body = `(?:${body})(?:${bodyDotAllowed})*?`;
+        }
+        // an empty !() is exactly equivalent to a starNoEmpty
+        let final = '';
+        if (this.type === '!' && this.#emptyExt) {
+            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
+        }
+        else {
+            const close = this.type === '!'
+                ? // !() must match something,but !(x) can match ''
+                    '))' +
+                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
+                        star +
+                        ')'
+                : this.type === '@'
+                    ? ')'
+                    : this.type === '?'
+                        ? ')?'
+                        : this.type === '+' && bodyDotAllowed
+                            ? ')'
+                            : this.type === '*' && bodyDotAllowed
+                                ? `)?`
+                                : `)${this.type}`;
+            final = start + body + close;
+        }
+        return [
+            final,
+            unescape(body),
+            (this.#hasMagic = !!this.#hasMagic),
+            this.#uflag,
+        ];
+    }
+    #partsToRegExp(dot) {
+        return this.#parts
+            .map(p => {
+            // extglob ASTs should only contain parent ASTs
+            /* c8 ignore start */
+            if (typeof p === 'string') {
+                throw new Error('string type in extglob ast??');
+            }
+            /* c8 ignore stop */
+            // can ignore hasMagic, because extglobs are already always magic
+            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
+            this.#uflag = this.#uflag || uflag;
+            return re;
+        })
+            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
+            .join('|');
+    }
+    static #parseGlob(glob, hasMagic, noEmpty = false) {
+        let escaping = false;
+        let re = '';
+        let uflag = false;
+        for (let i = 0; i < glob.length; i++) {
+            const c = glob.charAt(i);
+            if (escaping) {
+                escaping = false;
+                re += (reSpecials.has(c) ? '\\' : '') + c;
+                continue;
+            }
+            if (c === '\\') {
+                if (i === glob.length - 1) {
+                    re += '\\\\';
+                }
+                else {
+                    escaping = true;
+                }
+                continue;
+            }
+            if (c === '[') {
+                const [src, needUflag, consumed, magic] = parseClass(glob, i);
+                if (consumed) {
+                    re += src;
+                    uflag = uflag || needUflag;
+                    i += consumed - 1;
+                    hasMagic = hasMagic || magic;
+                    continue;
+                }
+            }
+            if (c === '*') {
+                if (noEmpty && glob === '*')
+                    re += starNoEmpty;
+                else
+                    re += star;
+                hasMagic = true;
+                continue;
+            }
+            if (c === '?') {
+                re += qmark;
+                hasMagic = true;
+                continue;
+            }
+            re += regExpEscape(c);
+        }
+        return [re, unescape(glob), !!hasMagic, uflag];
+    }
+}
+//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/brace-expressions.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/brace-expressions.js
new file mode 100644
index 0000000000000..c629d6ae816e2
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/brace-expressions.js
@@ -0,0 +1,148 @@
+// translate the various posix character classes into unicode properties
+// this works across all unicode locales
+// { : [, /u flag required, negated]
+const posixClasses = {
+    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
+    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
+    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
+    '[:blank:]': ['\\p{Zs}\\t', true],
+    '[:cntrl:]': ['\\p{Cc}', true],
+    '[:digit:]': ['\\p{Nd}', true],
+    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
+    '[:lower:]': ['\\p{Ll}', true],
+    '[:print:]': ['\\p{C}', true],
+    '[:punct:]': ['\\p{P}', true],
+    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
+    '[:upper:]': ['\\p{Lu}', true],
+    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
+    '[:xdigit:]': ['A-Fa-f0-9', false],
+};
+// only need to escape a few things inside of brace expressions
+// escapes: [ \ ] -
+const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
+// escape all regexp magic characters
+const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// everything has already been escaped, we just have to join
+const rangesToString = (ranges) => ranges.join('');
+// takes a glob string at a posix brace expression, and returns
+// an equivalent regular expression source, and boolean indicating
+// whether the /u flag needs to be applied, and the number of chars
+// consumed to parse the character class.
+// This also removes out of order ranges, and returns ($.) if the
+// entire class just no good.
+export const parseClass = (glob, position) => {
+    const pos = position;
+    /* c8 ignore start */
+    if (glob.charAt(pos) !== '[') {
+        throw new Error('not in a brace expression');
+    }
+    /* c8 ignore stop */
+    const ranges = [];
+    const negs = [];
+    let i = pos + 1;
+    let sawStart = false;
+    let uflag = false;
+    let escaping = false;
+    let negate = false;
+    let endPos = pos;
+    let rangeStart = '';
+    WHILE: while (i < glob.length) {
+        const c = glob.charAt(i);
+        if ((c === '!' || c === '^') && i === pos + 1) {
+            negate = true;
+            i++;
+            continue;
+        }
+        if (c === ']' && sawStart && !escaping) {
+            endPos = i + 1;
+            break;
+        }
+        sawStart = true;
+        if (c === '\\') {
+            if (!escaping) {
+                escaping = true;
+                i++;
+                continue;
+            }
+            // escaped \ char, fall through and treat like normal char
+        }
+        if (c === '[' && !escaping) {
+            // either a posix class, a collation equivalent, or just a [
+            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
+                if (glob.startsWith(cls, i)) {
+                    // invalid, [a-[] is fine, but not [a-[:alpha]]
+                    if (rangeStart) {
+                        return ['$.', false, glob.length - pos, true];
+                    }
+                    i += cls.length;
+                    if (neg)
+                        negs.push(unip);
+                    else
+                        ranges.push(unip);
+                    uflag = uflag || u;
+                    continue WHILE;
+                }
+            }
+        }
+        // now it's just a normal character, effectively
+        escaping = false;
+        if (rangeStart) {
+            // throw this range away if it's not valid, but others
+            // can still match.
+            if (c > rangeStart) {
+                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
+            }
+            else if (c === rangeStart) {
+                ranges.push(braceEscape(c));
+            }
+            rangeStart = '';
+            i++;
+            continue;
+        }
+        // now might be the start of a range.
+        // can be either c-d or c-] or c] or c] at this point
+        if (glob.startsWith('-]', i + 1)) {
+            ranges.push(braceEscape(c + '-'));
+            i += 2;
+            continue;
+        }
+        if (glob.startsWith('-', i + 1)) {
+            rangeStart = c;
+            i += 2;
+            continue;
+        }
+        // not the start of a range, just a single character
+        ranges.push(braceEscape(c));
+        i++;
+    }
+    if (endPos < i) {
+        // didn't see the end of the class, not a valid class,
+        // but might still be valid as a literal match.
+        return ['', false, 0, false];
+    }
+    // if we got no ranges and no negates, then we have a range that
+    // cannot possibly match anything, and that poisons the whole glob
+    if (!ranges.length && !negs.length) {
+        return ['$.', false, glob.length - pos, true];
+    }
+    // if we got one positive range, and it's a single character, then that's
+    // not actually a magic pattern, it's just that one literal character.
+    // we should not treat that as "magic", we should just return the literal
+    // character. [_] is a perfectly valid way to escape glob magic chars.
+    if (negs.length === 0 &&
+        ranges.length === 1 &&
+        /^\\?.$/.test(ranges[0]) &&
+        !negate) {
+        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
+        return [regexpEscape(r), false, endPos - pos, false];
+    }
+    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
+    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
+    const comb = ranges.length && negs.length
+        ? '(' + sranges + '|' + snegs + ')'
+        : ranges.length
+            ? sranges
+            : snegs;
+    return [comb, uflag, endPos - pos, true];
+};
+//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/escape.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/escape.js
new file mode 100644
index 0000000000000..16f7c8c7bdc64
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/escape.js
@@ -0,0 +1,18 @@
+/**
+ * Escape all magic characters in a glob pattern.
+ *
+ * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
+ * option is used, then characters are escaped by wrapping in `[]`, because
+ * a magic character wrapped in a character class can only be satisfied by
+ * that exact character.  In this mode, `\` is _not_ escaped, because it is
+ * not interpreted as a magic character, but instead as a path separator.
+ */
+export const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    // don't need to escape +@! because we escape the parens
+    // that make those magic, and escaping ! as [!] isn't valid,
+    // because [!]] is a valid glob class meaning not ']'.
+    return windowsPathsNoEscape
+        ? s.replace(/[?*()[\]]/g, '[$&]')
+        : s.replace(/[?*()[\]\\]/g, '\\$&');
+};
+//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/index.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/index.js
new file mode 100644
index 0000000000000..831b6a67f63fb
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/index.js
@@ -0,0 +1,995 @@
+import expand from 'brace-expansion';
+import { assertValidPattern } from './assert-valid-pattern.js';
+import { AST } from './ast.js';
+import { escape } from './escape.js';
+import { unescape } from './unescape.js';
+export const minimatch = (p, pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // shortcut: comments match nothing.
+    if (!options.nocomment && pattern.charAt(0) === '#') {
+        return false;
+    }
+    return new Minimatch(pattern, options).match(p);
+};
+// Optimized checking for the most common glob patterns.
+const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
+const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
+const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
+const starDotExtTestNocase = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
+};
+const starDotExtTestNocaseDot = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => f.toLowerCase().endsWith(ext);
+};
+const starDotStarRE = /^\*+\.\*+$/;
+const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
+const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
+const dotStarRE = /^\.\*+$/;
+const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
+const starRE = /^\*+$/;
+const starTest = (f) => f.length !== 0 && !f.startsWith('.');
+const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
+const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
+const qmarksTestNocase = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestNocaseDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTest = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTestNoExt = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && !f.startsWith('.');
+};
+const qmarksTestNoExtDot = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && f !== '.' && f !== '..';
+};
+/* c8 ignore start */
+const defaultPlatform = (typeof process === 'object' && process
+    ? (typeof process.env === 'object' &&
+        process.env &&
+        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
+        process.platform
+    : 'posix');
+const path = {
+    win32: { sep: '\\' },
+    posix: { sep: '/' },
+};
+/* c8 ignore stop */
+export const sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
+minimatch.sep = sep;
+export const GLOBSTAR = Symbol('globstar **');
+minimatch.GLOBSTAR = GLOBSTAR;
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// ** when dots are allowed.  Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
+export const filter = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
+minimatch.filter = filter;
+const ext = (a, b = {}) => Object.assign({}, a, b);
+export const defaults = (def) => {
+    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
+        return minimatch;
+    }
+    const orig = minimatch;
+    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
+    return Object.assign(m, {
+        Minimatch: class Minimatch extends orig.Minimatch {
+            constructor(pattern, options = {}) {
+                super(pattern, ext(def, options));
+            }
+            static defaults(options) {
+                return orig.defaults(ext(def, options)).Minimatch;
+            }
+        },
+        AST: class AST extends orig.AST {
+            /* c8 ignore start */
+            constructor(type, parent, options = {}) {
+                super(type, parent, ext(def, options));
+            }
+            /* c8 ignore stop */
+            static fromGlob(pattern, options = {}) {
+                return orig.AST.fromGlob(pattern, ext(def, options));
+            }
+        },
+        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
+        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
+        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
+        defaults: (options) => orig.defaults(ext(def, options)),
+        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
+        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
+        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
+        sep: orig.sep,
+        GLOBSTAR: GLOBSTAR,
+    });
+};
+minimatch.defaults = defaults;
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+export const braceExpand = (pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // Thanks to Yeting Li  for
+    // improving this regexp to avoid a ReDOS vulnerability.
+    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
+        // shortcut. no need to expand.
+        return [pattern];
+    }
+    return expand(pattern);
+};
+minimatch.braceExpand = braceExpand;
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion.  Otherwise, any series
+// of * is equivalent to a single *.  Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+export const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
+minimatch.makeRe = makeRe;
+export const match = (list, pattern, options = {}) => {
+    const mm = new Minimatch(pattern, options);
+    list = list.filter(f => mm.match(f));
+    if (mm.options.nonull && !list.length) {
+        list.push(pattern);
+    }
+    return list;
+};
+minimatch.match = match;
+// replace stuff like \* with *
+const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+export class Minimatch {
+    options;
+    set;
+    pattern;
+    windowsPathsNoEscape;
+    nonegate;
+    negate;
+    comment;
+    empty;
+    preserveMultipleSlashes;
+    partial;
+    globSet;
+    globParts;
+    nocase;
+    isWindows;
+    platform;
+    windowsNoMagicRoot;
+    regexp;
+    constructor(pattern, options = {}) {
+        assertValidPattern(pattern);
+        options = options || {};
+        this.options = options;
+        this.pattern = pattern;
+        this.platform = options.platform || defaultPlatform;
+        this.isWindows = this.platform === 'win32';
+        this.windowsPathsNoEscape =
+            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
+        if (this.windowsPathsNoEscape) {
+            this.pattern = this.pattern.replace(/\\/g, '/');
+        }
+        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
+        this.regexp = null;
+        this.negate = false;
+        this.nonegate = !!options.nonegate;
+        this.comment = false;
+        this.empty = false;
+        this.partial = !!options.partial;
+        this.nocase = !!this.options.nocase;
+        this.windowsNoMagicRoot =
+            options.windowsNoMagicRoot !== undefined
+                ? options.windowsNoMagicRoot
+                : !!(this.isWindows && this.nocase);
+        this.globSet = [];
+        this.globParts = [];
+        this.set = [];
+        // make the set of regexps etc.
+        this.make();
+    }
+    hasMagic() {
+        if (this.options.magicalBraces && this.set.length > 1) {
+            return true;
+        }
+        for (const pattern of this.set) {
+            for (const part of pattern) {
+                if (typeof part !== 'string')
+                    return true;
+            }
+        }
+        return false;
+    }
+    debug(..._) { }
+    make() {
+        const pattern = this.pattern;
+        const options = this.options;
+        // empty patterns and comments match nothing.
+        if (!options.nocomment && pattern.charAt(0) === '#') {
+            this.comment = true;
+            return;
+        }
+        if (!pattern) {
+            this.empty = true;
+            return;
+        }
+        // step 1: figure out negation, etc.
+        this.parseNegate();
+        // step 2: expand braces
+        this.globSet = [...new Set(this.braceExpand())];
+        if (options.debug) {
+            this.debug = (...args) => console.error(...args);
+        }
+        this.debug(this.pattern, this.globSet);
+        // step 3: now we have a set, so turn each one into a series of
+        // path-portion matching patterns.
+        // These will be regexps, except in the case of "**", which is
+        // set to the GLOBSTAR object for globstar behavior,
+        // and will not contain any / characters
+        //
+        // First, we preprocess to make the glob pattern sets a bit simpler
+        // and deduped.  There are some perf-killing patterns that can cause
+        // problems with a glob walk, but we can simplify them down a bit.
+        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
+        this.globParts = this.preprocess(rawGlobParts);
+        this.debug(this.pattern, this.globParts);
+        // glob --> regexps
+        let set = this.globParts.map((s, _, __) => {
+            if (this.isWindows && this.windowsNoMagicRoot) {
+                // check if it's a drive or unc path.
+                const isUNC = s[0] === '' &&
+                    s[1] === '' &&
+                    (s[2] === '?' || !globMagic.test(s[2])) &&
+                    !globMagic.test(s[3]);
+                const isDrive = /^[a-z]:/i.test(s[0]);
+                if (isUNC) {
+                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
+                }
+                else if (isDrive) {
+                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
+                }
+            }
+            return s.map(ss => this.parse(ss));
+        });
+        this.debug(this.pattern, set);
+        // filter out everything that didn't compile properly.
+        this.set = set.filter(s => s.indexOf(false) === -1);
+        // do not treat the ? in UNC paths as magic
+        if (this.isWindows) {
+            for (let i = 0; i < this.set.length; i++) {
+                const p = this.set[i];
+                if (p[0] === '' &&
+                    p[1] === '' &&
+                    this.globParts[i][2] === '?' &&
+                    typeof p[3] === 'string' &&
+                    /^[a-z]:$/i.test(p[3])) {
+                    p[2] = '?';
+                }
+            }
+        }
+        this.debug(this.pattern, this.set);
+    }
+    // various transforms to equivalent pattern sets that are
+    // faster to process in a filesystem walk.  The goal is to
+    // eliminate what we can, and push all ** patterns as far
+    // to the right as possible, even if it increases the number
+    // of patterns that we have to process.
+    preprocess(globParts) {
+        // if we're not in globstar mode, then turn all ** into *
+        if (this.options.noglobstar) {
+            for (let i = 0; i < globParts.length; i++) {
+                for (let j = 0; j < globParts[i].length; j++) {
+                    if (globParts[i][j] === '**') {
+                        globParts[i][j] = '*';
+                    }
+                }
+            }
+        }
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            // aggressive optimization for the purpose of fs walking
+            globParts = this.firstPhasePreProcess(globParts);
+            globParts = this.secondPhasePreProcess(globParts);
+        }
+        else if (optimizationLevel >= 1) {
+            // just basic optimizations to remove some .. parts
+            globParts = this.levelOneOptimize(globParts);
+        }
+        else {
+            globParts = this.adjascentGlobstarOptimize(globParts);
+        }
+        return globParts;
+    }
+    // just get rid of adjascent ** portions
+    adjascentGlobstarOptimize(globParts) {
+        return globParts.map(parts => {
+            let gs = -1;
+            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
+                let i = gs;
+                while (parts[i + 1] === '**') {
+                    i++;
+                }
+                if (i !== gs) {
+                    parts.splice(gs, i - gs);
+                }
+            }
+            return parts;
+        });
+    }
+    // get rid of adjascent ** and resolve .. portions
+    levelOneOptimize(globParts) {
+        return globParts.map(parts => {
+            parts = parts.reduce((set, part) => {
+                const prev = set[set.length - 1];
+                if (part === '**' && prev === '**') {
+                    return set;
+                }
+                if (part === '..') {
+                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
+                        set.pop();
+                        return set;
+                    }
+                }
+                set.push(part);
+                return set;
+            }, []);
+            return parts.length === 0 ? [''] : parts;
+        });
+    }
+    levelTwoFileOptimize(parts) {
+        if (!Array.isArray(parts)) {
+            parts = this.slashSplit(parts);
+        }
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (!matched)
+                    continue;
+                globParts[i] = matched;
+                globParts[j] = [];
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return braceExpand(this.pattern, this.options);
+    }
+    parse(pattern) {
+        assertValidPattern(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = AST.fromGlob(pattern, this.options).toMMPattern();
+        return fastTest ? Object.assign(re, { test: fastTest }) : re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === GLOBSTAR
+                        ? GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== GLOBSTAR || prev === GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return minimatch.defaults(def).Minimatch;
+    }
+}
+/* c8 ignore start */
+export { AST } from './ast.js';
+export { escape } from './escape.js';
+export { unescape } from './unescape.js';
+/* c8 ignore stop */
+minimatch.AST = AST;
+minimatch.Minimatch = Minimatch;
+minimatch.escape = escape;
+minimatch.unescape = unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/unescape.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/unescape.js
new file mode 100644
index 0000000000000..0faf9a2b7306f
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/unescape.js
@@ -0,0 +1,20 @@
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+export const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json
new file mode 100644
index 0000000000000..061c3b9f34330
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json
@@ -0,0 +1,86 @@
+{
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me)",
+  "name": "minimatch",
+  "description": "a glob matcher in javascript",
+  "version": "9.0.3",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/minimatch.git"
+  },
+  "main": "./dist/cjs/index.js",
+  "module": "./dist/mjs/index.js",
+  "types": "./dist/cjs/index.d.ts",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/mjs/index.d.ts",
+        "default": "./dist/mjs/index.js"
+      },
+      "require": {
+        "types": "./dist/cjs/index.d.ts",
+        "default": "./dist/cjs/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "preprepare": "rm -rf dist",
+    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
+    "postprepare": "bash fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "c8 tap",
+    "snap": "c8 tap",
+    "format": "prettier --write . --loglevel warn",
+    "benchmark": "node benchmark/index.js",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 80,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "engines": {
+    "node": ">=16 || 14 >=14.17"
+  },
+  "dependencies": {
+    "brace-expansion": "^2.0.1"
+  },
+  "devDependencies": {
+    "@types/brace-expansion": "^1.1.0",
+    "@types/node": "^18.15.11",
+    "@types/tap": "^15.0.8",
+    "c8": "^7.12.0",
+    "eslint-config-prettier": "^8.6.0",
+    "mkdirp": "1",
+    "prettier": "^2.8.2",
+    "tap": "^16.3.7",
+    "ts-node": "^10.9.1",
+    "typedoc": "^0.23.21",
+    "typescript": "^4.9.3"
+  },
+  "tap": {
+    "coverage": false,
+    "node-arg": [
+      "--no-warnings",
+      "--loader",
+      "ts-node/esm"
+    ],
+    "ts": false
+  },
+  "funding": {
+    "url": "https://github.com/sponsors/isaacs"
+  },
+  "license": "ISC"
+}
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/LICENSE b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/LICENSE
new file mode 100644
index 0000000000000..97f8e32ed82e4
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/index.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
new file mode 100644
index 0000000000000..b6cdae8eb514b
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
@@ -0,0 +1,1028 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0;
+const proc = typeof process === 'object' && process
+    ? process
+    : {
+        stdout: null,
+        stderr: null,
+    };
+const events_1 = require("events");
+const stream_1 = __importDefault(require("stream"));
+const string_decoder_1 = require("string_decoder");
+/**
+ * Return true if the argument is a Minipass stream, Node stream, or something
+ * else that Minipass can interact with.
+ */
+const isStream = (s) => !!s &&
+    typeof s === 'object' &&
+    (s instanceof Minipass ||
+        s instanceof stream_1.default ||
+        (0, exports.isReadable)(s) ||
+        (0, exports.isWritable)(s));
+exports.isStream = isStream;
+/**
+ * Return true if the argument is a valid {@link Minipass.Readable}
+ */
+const isReadable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof events_1.EventEmitter &&
+    typeof s.pipe === 'function' &&
+    // node core Writable streams have a pipe() method, but it throws
+    s.pipe !== stream_1.default.Writable.prototype.pipe;
+exports.isReadable = isReadable;
+/**
+ * Return true if the argument is a valid {@link Minipass.Writable}
+ */
+const isWritable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof events_1.EventEmitter &&
+    typeof s.write === 'function' &&
+    typeof s.end === 'function';
+exports.isWritable = isWritable;
+const EOF = Symbol('EOF');
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
+const EMITTED_END = Symbol('emittedEnd');
+const EMITTING_END = Symbol('emittingEnd');
+const EMITTED_ERROR = Symbol('emittedError');
+const CLOSED = Symbol('closed');
+const READ = Symbol('read');
+const FLUSH = Symbol('flush');
+const FLUSHCHUNK = Symbol('flushChunk');
+const ENCODING = Symbol('encoding');
+const DECODER = Symbol('decoder');
+const FLOWING = Symbol('flowing');
+const PAUSED = Symbol('paused');
+const RESUME = Symbol('resume');
+const BUFFER = Symbol('buffer');
+const PIPES = Symbol('pipes');
+const BUFFERLENGTH = Symbol('bufferLength');
+const BUFFERPUSH = Symbol('bufferPush');
+const BUFFERSHIFT = Symbol('bufferShift');
+const OBJECTMODE = Symbol('objectMode');
+// internal event when stream is destroyed
+const DESTROYED = Symbol('destroyed');
+// internal event when stream has an error
+const ERROR = Symbol('error');
+const EMITDATA = Symbol('emitData');
+const EMITEND = Symbol('emitEnd');
+const EMITEND2 = Symbol('emitEnd2');
+const ASYNC = Symbol('async');
+const ABORT = Symbol('abort');
+const ABORTED = Symbol('aborted');
+const SIGNAL = Symbol('signal');
+const DATALISTENERS = Symbol('dataListeners');
+const DISCARDED = Symbol('discarded');
+const defer = (fn) => Promise.resolve().then(fn);
+const nodefer = (fn) => fn();
+const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
+const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
+    (!!b &&
+        typeof b === 'object' &&
+        b.constructor &&
+        b.constructor.name === 'ArrayBuffer' &&
+        b.byteLength >= 0);
+const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
+/**
+ * Internal class representing a pipe to a destination stream.
+ *
+ * @internal
+ */
+class Pipe {
+    src;
+    dest;
+    opts;
+    ondrain;
+    constructor(src, dest, opts) {
+        this.src = src;
+        this.dest = dest;
+        this.opts = opts;
+        this.ondrain = () => src[RESUME]();
+        this.dest.on('drain', this.ondrain);
+    }
+    unpipe() {
+        this.dest.removeListener('drain', this.ondrain);
+    }
+    // only here for the prototype
+    /* c8 ignore start */
+    proxyErrors(_er) { }
+    /* c8 ignore stop */
+    end() {
+        this.unpipe();
+        if (this.opts.end)
+            this.dest.end();
+    }
+}
+/**
+ * Internal class representing a pipe to a destination stream where
+ * errors are proxied.
+ *
+ * @internal
+ */
+class PipeProxyErrors extends Pipe {
+    unpipe() {
+        this.src.removeListener('error', this.proxyErrors);
+        super.unpipe();
+    }
+    constructor(src, dest, opts) {
+        super(src, dest, opts);
+        this.proxyErrors = er => dest.emit('error', er);
+        src.on('error', this.proxyErrors);
+    }
+}
+const isObjectModeOptions = (o) => !!o.objectMode;
+const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
+/**
+ * Main export, the Minipass class
+ *
+ * `RType` is the type of data emitted, defaults to Buffer
+ *
+ * `WType` is the type of data to be written, if RType is buffer or string,
+ * then any {@link Minipass.ContiguousData} is allowed.
+ *
+ * `Events` is the set of event handler signatures that this object
+ * will emit, see {@link Minipass.Events}
+ */
+class Minipass extends events_1.EventEmitter {
+    [FLOWING] = false;
+    [PAUSED] = false;
+    [PIPES] = [];
+    [BUFFER] = [];
+    [OBJECTMODE];
+    [ENCODING];
+    [ASYNC];
+    [DECODER];
+    [EOF] = false;
+    [EMITTED_END] = false;
+    [EMITTING_END] = false;
+    [CLOSED] = false;
+    [EMITTED_ERROR] = null;
+    [BUFFERLENGTH] = 0;
+    [DESTROYED] = false;
+    [SIGNAL];
+    [ABORTED] = false;
+    [DATALISTENERS] = 0;
+    [DISCARDED] = false;
+    /**
+     * true if the stream can be written
+     */
+    writable = true;
+    /**
+     * true if the stream can be read
+     */
+    readable = true;
+    /**
+     * If `RType` is Buffer, then options do not need to be provided.
+     * Otherwise, an options object must be provided to specify either
+     * {@link Minipass.SharedOptions.objectMode} or
+     * {@link Minipass.SharedOptions.encoding}, as appropriate.
+     */
+    constructor(...args) {
+        const options = (args[0] ||
+            {});
+        super();
+        if (options.objectMode && typeof options.encoding === 'string') {
+            throw new TypeError('Encoding and objectMode may not be used together');
+        }
+        if (isObjectModeOptions(options)) {
+            this[OBJECTMODE] = true;
+            this[ENCODING] = null;
+        }
+        else if (isEncodingOptions(options)) {
+            this[ENCODING] = options.encoding;
+            this[OBJECTMODE] = false;
+        }
+        else {
+            this[OBJECTMODE] = false;
+            this[ENCODING] = null;
+        }
+        this[ASYNC] = !!options.async;
+        this[DECODER] = this[ENCODING]
+            ? new string_decoder_1.StringDecoder(this[ENCODING])
+            : null;
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposeBuffer === true) {
+            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
+        }
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposePipes === true) {
+            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
+        }
+        const { signal } = options;
+        if (signal) {
+            this[SIGNAL] = signal;
+            if (signal.aborted) {
+                this[ABORT]();
+            }
+            else {
+                signal.addEventListener('abort', () => this[ABORT]());
+            }
+        }
+    }
+    /**
+     * The amount of data stored in the buffer waiting to be read.
+     *
+     * For Buffer strings, this will be the total byte length.
+     * For string encoding streams, this will be the string character length,
+     * according to JavaScript's `string.length` logic.
+     * For objectMode streams, this is a count of the items waiting to be
+     * emitted.
+     */
+    get bufferLength() {
+        return this[BUFFERLENGTH];
+    }
+    /**
+     * The `BufferEncoding` currently in use, or `null`
+     */
+    get encoding() {
+        return this[ENCODING];
+    }
+    /**
+     * @deprecated - This is a read only property
+     */
+    set encoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * @deprecated - Encoding may only be set at instantiation time
+     */
+    setEncoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * True if this is an objectMode stream
+     */
+    get objectMode() {
+        return this[OBJECTMODE];
+    }
+    /**
+     * @deprecated - This is a read-only property
+     */
+    set objectMode(_om) {
+        throw new Error('objectMode must be set at instantiation time');
+    }
+    /**
+     * true if this is an async stream
+     */
+    get ['async']() {
+        return this[ASYNC];
+    }
+    /**
+     * Set to true to make this stream async.
+     *
+     * Once set, it cannot be unset, as this would potentially cause incorrect
+     * behavior.  Ie, a sync stream can be made async, but an async stream
+     * cannot be safely made sync.
+     */
+    set ['async'](a) {
+        this[ASYNC] = this[ASYNC] || !!a;
+    }
+    // drop everything and get out of the flow completely
+    [ABORT]() {
+        this[ABORTED] = true;
+        this.emit('abort', this[SIGNAL]?.reason);
+        this.destroy(this[SIGNAL]?.reason);
+    }
+    /**
+     * True if the stream has been aborted.
+     */
+    get aborted() {
+        return this[ABORTED];
+    }
+    /**
+     * No-op setter. Stream aborted status is set via the AbortSignal provided
+     * in the constructor options.
+     */
+    set aborted(_) { }
+    write(chunk, encoding, cb) {
+        if (this[ABORTED])
+            return false;
+        if (this[EOF])
+            throw new Error('write after end');
+        if (this[DESTROYED]) {
+            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
+            return true;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (!encoding)
+            encoding = 'utf8';
+        const fn = this[ASYNC] ? defer : nodefer;
+        // convert array buffers and typed array views into buffers
+        // at some point in the future, we may want to do the opposite!
+        // leave strings and buffers as-is
+        // anything is only allowed if in object mode, so throw
+        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+            if (isArrayBufferView(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
+            }
+            else if (isArrayBufferLike(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk);
+            }
+            else if (typeof chunk !== 'string') {
+                throw new Error('Non-contiguous data written to non-objectMode stream');
+            }
+        }
+        // handle object mode up front, since it's simpler
+        // this yields better performance, fewer checks later.
+        if (this[OBJECTMODE]) {
+            // maybe impossible?
+            /* c8 ignore start */
+            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+                this[FLUSH](true);
+            /* c8 ignore stop */
+            if (this[FLOWING])
+                this.emit('data', chunk);
+            else
+                this[BUFFERPUSH](chunk);
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // at this point the chunk is a buffer or string
+        // don't buffer it up or send it to the decoder
+        if (!chunk.length) {
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // fast-path writing strings of same encoding to a stream with
+        // an empty buffer, skipping the buffer/decoder dance
+        if (typeof chunk === 'string' &&
+            // unless it is a string already ready for us to use
+            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = Buffer.from(chunk, encoding);
+        }
+        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = this[DECODER].write(chunk);
+        }
+        // Note: flushing CAN potentially switch us into not-flowing mode
+        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+            this[FLUSH](true);
+        if (this[FLOWING])
+            this.emit('data', chunk);
+        else
+            this[BUFFERPUSH](chunk);
+        if (this[BUFFERLENGTH] !== 0)
+            this.emit('readable');
+        if (cb)
+            fn(cb);
+        return this[FLOWING];
+    }
+    /**
+     * Low-level explicit read method.
+     *
+     * In objectMode, the argument is ignored, and one item is returned if
+     * available.
+     *
+     * `n` is the number of bytes (or in the case of encoding streams,
+     * characters) to consume. If `n` is not provided, then the entire buffer
+     * is returned, or `null` is returned if no data is available.
+     *
+     * If `n` is greater that the amount of data in the internal buffer,
+     * then `null` is returned.
+     */
+    read(n) {
+        if (this[DESTROYED])
+            return null;
+        this[DISCARDED] = false;
+        if (this[BUFFERLENGTH] === 0 ||
+            n === 0 ||
+            (n && n > this[BUFFERLENGTH])) {
+            this[MAYBE_EMIT_END]();
+            return null;
+        }
+        if (this[OBJECTMODE])
+            n = null;
+        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
+            // not object mode, so if we have an encoding, then RType is string
+            // otherwise, must be Buffer
+            this[BUFFER] = [
+                (this[ENCODING]
+                    ? this[BUFFER].join('')
+                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
+            ];
+        }
+        const ret = this[READ](n || null, this[BUFFER][0]);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [READ](n, chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERSHIFT]();
+        else {
+            const c = chunk;
+            if (n === c.length || n === null)
+                this[BUFFERSHIFT]();
+            else if (typeof c === 'string') {
+                this[BUFFER][0] = c.slice(n);
+                chunk = c.slice(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+            else {
+                this[BUFFER][0] = c.subarray(n);
+                chunk = c.subarray(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+        }
+        this.emit('data', chunk);
+        if (!this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+        return chunk;
+    }
+    end(chunk, encoding, cb) {
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (chunk !== undefined)
+            this.write(chunk, encoding);
+        if (cb)
+            this.once('end', cb);
+        this[EOF] = true;
+        this.writable = false;
+        // if we haven't written anything, then go ahead and emit,
+        // even if we're not reading.
+        // we'll re-emit if a new 'end' listener is added anyway.
+        // This makes MP more suitable to write-only use cases.
+        if (this[FLOWING] || !this[PAUSED])
+            this[MAYBE_EMIT_END]();
+        return this;
+    }
+    // don't let the internal resume be overwritten
+    [RESUME]() {
+        if (this[DESTROYED])
+            return;
+        if (!this[DATALISTENERS] && !this[PIPES].length) {
+            this[DISCARDED] = true;
+        }
+        this[PAUSED] = false;
+        this[FLOWING] = true;
+        this.emit('resume');
+        if (this[BUFFER].length)
+            this[FLUSH]();
+        else if (this[EOF])
+            this[MAYBE_EMIT_END]();
+        else
+            this.emit('drain');
+    }
+    /**
+     * Resume the stream if it is currently in a paused state
+     *
+     * If called when there are no pipe destinations or `data` event listeners,
+     * this will place the stream in a "discarded" state, where all data will
+     * be thrown away. The discarded state is removed if a pipe destination or
+     * data handler is added, if pause() is called, or if any synchronous or
+     * asynchronous iteration is started.
+     */
+    resume() {
+        return this[RESUME]();
+    }
+    /**
+     * Pause the stream
+     */
+    pause() {
+        this[FLOWING] = false;
+        this[PAUSED] = true;
+        this[DISCARDED] = false;
+    }
+    /**
+     * true if the stream has been forcibly destroyed
+     */
+    get destroyed() {
+        return this[DESTROYED];
+    }
+    /**
+     * true if the stream is currently in a flowing state, meaning that
+     * any writes will be immediately emitted.
+     */
+    get flowing() {
+        return this[FLOWING];
+    }
+    /**
+     * true if the stream is currently in a paused state
+     */
+    get paused() {
+        return this[PAUSED];
+    }
+    [BUFFERPUSH](chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] += 1;
+        else
+            this[BUFFERLENGTH] += chunk.length;
+        this[BUFFER].push(chunk);
+    }
+    [BUFFERSHIFT]() {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] -= 1;
+        else
+            this[BUFFERLENGTH] -= this[BUFFER][0].length;
+        return this[BUFFER].shift();
+    }
+    [FLUSH](noDrain = false) {
+        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
+            this[BUFFER].length);
+        if (!noDrain && !this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+    }
+    [FLUSHCHUNK](chunk) {
+        this.emit('data', chunk);
+        return this[FLOWING];
+    }
+    /**
+     * Pipe all data emitted by this stream into the destination provided.
+     *
+     * Triggers the flow of data.
+     */
+    pipe(dest, opts) {
+        if (this[DESTROYED])
+            return dest;
+        this[DISCARDED] = false;
+        const ended = this[EMITTED_END];
+        opts = opts || {};
+        if (dest === proc.stdout || dest === proc.stderr)
+            opts.end = false;
+        else
+            opts.end = opts.end !== false;
+        opts.proxyErrors = !!opts.proxyErrors;
+        // piping an ended stream ends immediately
+        if (ended) {
+            if (opts.end)
+                dest.end();
+        }
+        else {
+            // "as" here just ignores the WType, which pipes don't care about,
+            // since they're only consuming from us, and writing to the dest
+            this[PIPES].push(!opts.proxyErrors
+                ? new Pipe(this, dest, opts)
+                : new PipeProxyErrors(this, dest, opts));
+            if (this[ASYNC])
+                defer(() => this[RESUME]());
+            else
+                this[RESUME]();
+        }
+        return dest;
+    }
+    /**
+     * Fully unhook a piped destination stream.
+     *
+     * If the destination stream was the only consumer of this stream (ie,
+     * there are no other piped destinations or `'data'` event listeners)
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    unpipe(dest) {
+        const p = this[PIPES].find(p => p.dest === dest);
+        if (p) {
+            if (this[PIPES].length === 1) {
+                if (this[FLOWING] && this[DATALISTENERS] === 0) {
+                    this[FLOWING] = false;
+                }
+                this[PIPES] = [];
+            }
+            else
+                this[PIPES].splice(this[PIPES].indexOf(p), 1);
+            p.unpipe();
+        }
+    }
+    /**
+     * Alias for {@link Minipass#on}
+     */
+    addListener(ev, handler) {
+        return this.on(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.on`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * - Adding a 'data' event handler will trigger the flow of data
+     *
+     * - Adding a 'readable' event handler when there is data waiting to be read
+     *   will cause 'readable' to be emitted immediately.
+     *
+     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
+     *   already passed will cause the event to be emitted immediately and all
+     *   handlers removed.
+     *
+     * - Adding an 'error' event handler after an error has been emitted will
+     *   cause the event to be re-emitted immediately with the error previously
+     *   raised.
+     */
+    on(ev, handler) {
+        const ret = super.on(ev, handler);
+        if (ev === 'data') {
+            this[DISCARDED] = false;
+            this[DATALISTENERS]++;
+            if (!this[PIPES].length && !this[FLOWING]) {
+                this[RESUME]();
+            }
+        }
+        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
+            super.emit('readable');
+        }
+        else if (isEndish(ev) && this[EMITTED_END]) {
+            super.emit(ev);
+            this.removeAllListeners(ev);
+        }
+        else if (ev === 'error' && this[EMITTED_ERROR]) {
+            const h = handler;
+            if (this[ASYNC])
+                defer(() => h.call(this, this[EMITTED_ERROR]));
+            else
+                h.call(this, this[EMITTED_ERROR]);
+        }
+        return ret;
+    }
+    /**
+     * Alias for {@link Minipass#off}
+     */
+    removeListener(ev, handler) {
+        return this.off(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.off`
+     *
+     * If a 'data' event handler is removed, and it was the last consumer
+     * (ie, there are no pipe destinations or other 'data' event listeners),
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    off(ev, handler) {
+        const ret = super.off(ev, handler);
+        // if we previously had listeners, and now we don't, and we don't
+        // have any pipes, then stop the flow, unless it's been explicitly
+        // put in a discarded flowing state via stream.resume().
+        if (ev === 'data') {
+            this[DATALISTENERS] = this.listeners('data').length;
+            if (this[DATALISTENERS] === 0 &&
+                !this[DISCARDED] &&
+                !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * Mostly identical to `EventEmitter.removeAllListeners`
+     *
+     * If all 'data' event handlers are removed, and they were the last consumer
+     * (ie, there are no pipe destinations), then the flow of data will stop
+     * until there is another consumer or {@link Minipass#resume} is explicitly
+     * called.
+     */
+    removeAllListeners(ev) {
+        const ret = super.removeAllListeners(ev);
+        if (ev === 'data' || ev === undefined) {
+            this[DATALISTENERS] = 0;
+            if (!this[DISCARDED] && !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * true if the 'end' event has been emitted
+     */
+    get emittedEnd() {
+        return this[EMITTED_END];
+    }
+    [MAYBE_EMIT_END]() {
+        if (!this[EMITTING_END] &&
+            !this[EMITTED_END] &&
+            !this[DESTROYED] &&
+            this[BUFFER].length === 0 &&
+            this[EOF]) {
+            this[EMITTING_END] = true;
+            this.emit('end');
+            this.emit('prefinish');
+            this.emit('finish');
+            if (this[CLOSED])
+                this.emit('close');
+            this[EMITTING_END] = false;
+        }
+    }
+    /**
+     * Mostly identical to `EventEmitter.emit`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * If the stream has been destroyed, and the event is something other
+     * than 'close' or 'error', then `false` is returned and no handlers
+     * are called.
+     *
+     * If the event is 'end', and has already been emitted, then the event
+     * is ignored. If the stream is in a paused or non-flowing state, then
+     * the event will be deferred until data flow resumes. If the stream is
+     * async, then handlers will be called on the next tick rather than
+     * immediately.
+     *
+     * If the event is 'close', and 'end' has not yet been emitted, then
+     * the event will be deferred until after 'end' is emitted.
+     *
+     * If the event is 'error', and an AbortSignal was provided for the stream,
+     * and there are no listeners, then the event is ignored, matching the
+     * behavior of node core streams in the presense of an AbortSignal.
+     *
+     * If the event is 'finish' or 'prefinish', then all listeners will be
+     * removed after emitting the event, to prevent double-firing.
+     */
+    emit(ev, ...args) {
+        const data = args[0];
+        // error and close are only events allowed after calling destroy()
+        if (ev !== 'error' &&
+            ev !== 'close' &&
+            ev !== DESTROYED &&
+            this[DESTROYED]) {
+            return false;
+        }
+        else if (ev === 'data') {
+            return !this[OBJECTMODE] && !data
+                ? false
+                : this[ASYNC]
+                    ? (defer(() => this[EMITDATA](data)), true)
+                    : this[EMITDATA](data);
+        }
+        else if (ev === 'end') {
+            return this[EMITEND]();
+        }
+        else if (ev === 'close') {
+            this[CLOSED] = true;
+            // don't emit close before 'end' and 'finish'
+            if (!this[EMITTED_END] && !this[DESTROYED])
+                return false;
+            const ret = super.emit('close');
+            this.removeAllListeners('close');
+            return ret;
+        }
+        else if (ev === 'error') {
+            this[EMITTED_ERROR] = data;
+            super.emit(ERROR, data);
+            const ret = !this[SIGNAL] || this.listeners('error').length
+                ? super.emit('error', data)
+                : false;
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'resume') {
+            const ret = super.emit('resume');
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'finish' || ev === 'prefinish') {
+            const ret = super.emit(ev);
+            this.removeAllListeners(ev);
+            return ret;
+        }
+        // Some other unknown event
+        const ret = super.emit(ev, ...args);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITDATA](data) {
+        for (const p of this[PIPES]) {
+            if (p.dest.write(data) === false)
+                this.pause();
+        }
+        const ret = this[DISCARDED] ? false : super.emit('data', data);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITEND]() {
+        if (this[EMITTED_END])
+            return false;
+        this[EMITTED_END] = true;
+        this.readable = false;
+        return this[ASYNC]
+            ? (defer(() => this[EMITEND2]()), true)
+            : this[EMITEND2]();
+    }
+    [EMITEND2]() {
+        if (this[DECODER]) {
+            const data = this[DECODER].end();
+            if (data) {
+                for (const p of this[PIPES]) {
+                    p.dest.write(data);
+                }
+                if (!this[DISCARDED])
+                    super.emit('data', data);
+            }
+        }
+        for (const p of this[PIPES]) {
+            p.end();
+        }
+        const ret = super.emit('end');
+        this.removeAllListeners('end');
+        return ret;
+    }
+    /**
+     * Return a Promise that resolves to an array of all emitted data once
+     * the stream ends.
+     */
+    async collect() {
+        const buf = Object.assign([], {
+            dataLength: 0,
+        });
+        if (!this[OBJECTMODE])
+            buf.dataLength = 0;
+        // set the promise first, in case an error is raised
+        // by triggering the flow here.
+        const p = this.promise();
+        this.on('data', c => {
+            buf.push(c);
+            if (!this[OBJECTMODE])
+                buf.dataLength += c.length;
+        });
+        await p;
+        return buf;
+    }
+    /**
+     * Return a Promise that resolves to the concatenation of all emitted data
+     * once the stream ends.
+     *
+     * Not allowed on objectMode streams.
+     */
+    async concat() {
+        if (this[OBJECTMODE]) {
+            throw new Error('cannot concat in objectMode');
+        }
+        const buf = await this.collect();
+        return (this[ENCODING]
+            ? buf.join('')
+            : Buffer.concat(buf, buf.dataLength));
+    }
+    /**
+     * Return a void Promise that resolves once the stream ends.
+     */
+    async promise() {
+        return new Promise((resolve, reject) => {
+            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
+            this.on('error', er => reject(er));
+            this.on('end', () => resolve());
+        });
+    }
+    /**
+     * Asynchronous `for await of` iteration.
+     *
+     * This will continue emitting all chunks until the stream terminates.
+     */
+    [Symbol.asyncIterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = async () => {
+            this.pause();
+            stopped = true;
+            return { value: undefined, done: true };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const res = this.read();
+            if (res !== null)
+                return Promise.resolve({ done: false, value: res });
+            if (this[EOF])
+                return stop();
+            let resolve;
+            let reject;
+            const onerr = (er) => {
+                this.off('data', ondata);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                reject(er);
+            };
+            const ondata = (value) => {
+                this.off('error', onerr);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                this.pause();
+                resolve({ value, done: !!this[EOF] });
+            };
+            const onend = () => {
+                this.off('error', onerr);
+                this.off('data', ondata);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                resolve({ done: true, value: undefined });
+            };
+            const ondestroy = () => onerr(new Error('stream destroyed'));
+            return new Promise((res, rej) => {
+                reject = rej;
+                resolve = res;
+                this.once(DESTROYED, ondestroy);
+                this.once('error', onerr);
+                this.once('end', onend);
+                this.once('data', ondata);
+            });
+        };
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Synchronous `for of` iteration.
+     *
+     * The iteration will terminate when the internal buffer runs out, even
+     * if the stream has not yet terminated.
+     */
+    [Symbol.iterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = () => {
+            this.pause();
+            this.off(ERROR, stop);
+            this.off(DESTROYED, stop);
+            this.off('end', stop);
+            stopped = true;
+            return { done: true, value: undefined };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const value = this.read();
+            return value === null ? stop() : { done: false, value };
+        };
+        this.once('end', stop);
+        this.once(ERROR, stop);
+        this.once(DESTROYED, stop);
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.iterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Destroy a stream, preventing it from being used for any further purpose.
+     *
+     * If the stream has a `close()` method, then it will be called on
+     * destruction.
+     *
+     * After destruction, any attempt to write data, read data, or emit most
+     * events will be ignored.
+     *
+     * If an error argument is provided, then it will be emitted in an
+     * 'error' event.
+     */
+    destroy(er) {
+        if (this[DESTROYED]) {
+            if (er)
+                this.emit('error', er);
+            else
+                this.emit(DESTROYED);
+            return this;
+        }
+        this[DESTROYED] = true;
+        this[DISCARDED] = true;
+        // throw away all buffered data, it's never coming out
+        this[BUFFER].length = 0;
+        this[BUFFERLENGTH] = 0;
+        const wc = this;
+        if (typeof wc.close === 'function' && !this[CLOSED])
+            wc.close();
+        if (er)
+            this.emit('error', er);
+        // if no error to emit, still reject pending promises
+        else
+            this.emit(DESTROYED);
+        return this;
+    }
+    /**
+     * Alias for {@link isStream}
+     *
+     * Former export location, maintained for backwards compatibility.
+     *
+     * @deprecated
+     */
+    static get isStream() {
+        return exports.isStream;
+    }
+}
+exports.Minipass = Minipass;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/index.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
new file mode 100644
index 0000000000000..b65fafbae43a4
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
@@ -0,0 +1,1018 @@
+const proc = typeof process === 'object' && process
+    ? process
+    : {
+        stdout: null,
+        stderr: null,
+    };
+import { EventEmitter } from 'events';
+import Stream from 'stream';
+import { StringDecoder } from 'string_decoder';
+/**
+ * Return true if the argument is a Minipass stream, Node stream, or something
+ * else that Minipass can interact with.
+ */
+export const isStream = (s) => !!s &&
+    typeof s === 'object' &&
+    (s instanceof Minipass ||
+        s instanceof Stream ||
+        isReadable(s) ||
+        isWritable(s));
+/**
+ * Return true if the argument is a valid {@link Minipass.Readable}
+ */
+export const isReadable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof EventEmitter &&
+    typeof s.pipe === 'function' &&
+    // node core Writable streams have a pipe() method, but it throws
+    s.pipe !== Stream.Writable.prototype.pipe;
+/**
+ * Return true if the argument is a valid {@link Minipass.Writable}
+ */
+export const isWritable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof EventEmitter &&
+    typeof s.write === 'function' &&
+    typeof s.end === 'function';
+const EOF = Symbol('EOF');
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
+const EMITTED_END = Symbol('emittedEnd');
+const EMITTING_END = Symbol('emittingEnd');
+const EMITTED_ERROR = Symbol('emittedError');
+const CLOSED = Symbol('closed');
+const READ = Symbol('read');
+const FLUSH = Symbol('flush');
+const FLUSHCHUNK = Symbol('flushChunk');
+const ENCODING = Symbol('encoding');
+const DECODER = Symbol('decoder');
+const FLOWING = Symbol('flowing');
+const PAUSED = Symbol('paused');
+const RESUME = Symbol('resume');
+const BUFFER = Symbol('buffer');
+const PIPES = Symbol('pipes');
+const BUFFERLENGTH = Symbol('bufferLength');
+const BUFFERPUSH = Symbol('bufferPush');
+const BUFFERSHIFT = Symbol('bufferShift');
+const OBJECTMODE = Symbol('objectMode');
+// internal event when stream is destroyed
+const DESTROYED = Symbol('destroyed');
+// internal event when stream has an error
+const ERROR = Symbol('error');
+const EMITDATA = Symbol('emitData');
+const EMITEND = Symbol('emitEnd');
+const EMITEND2 = Symbol('emitEnd2');
+const ASYNC = Symbol('async');
+const ABORT = Symbol('abort');
+const ABORTED = Symbol('aborted');
+const SIGNAL = Symbol('signal');
+const DATALISTENERS = Symbol('dataListeners');
+const DISCARDED = Symbol('discarded');
+const defer = (fn) => Promise.resolve().then(fn);
+const nodefer = (fn) => fn();
+const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
+const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
+    (!!b &&
+        typeof b === 'object' &&
+        b.constructor &&
+        b.constructor.name === 'ArrayBuffer' &&
+        b.byteLength >= 0);
+const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
+/**
+ * Internal class representing a pipe to a destination stream.
+ *
+ * @internal
+ */
+class Pipe {
+    src;
+    dest;
+    opts;
+    ondrain;
+    constructor(src, dest, opts) {
+        this.src = src;
+        this.dest = dest;
+        this.opts = opts;
+        this.ondrain = () => src[RESUME]();
+        this.dest.on('drain', this.ondrain);
+    }
+    unpipe() {
+        this.dest.removeListener('drain', this.ondrain);
+    }
+    // only here for the prototype
+    /* c8 ignore start */
+    proxyErrors(_er) { }
+    /* c8 ignore stop */
+    end() {
+        this.unpipe();
+        if (this.opts.end)
+            this.dest.end();
+    }
+}
+/**
+ * Internal class representing a pipe to a destination stream where
+ * errors are proxied.
+ *
+ * @internal
+ */
+class PipeProxyErrors extends Pipe {
+    unpipe() {
+        this.src.removeListener('error', this.proxyErrors);
+        super.unpipe();
+    }
+    constructor(src, dest, opts) {
+        super(src, dest, opts);
+        this.proxyErrors = er => dest.emit('error', er);
+        src.on('error', this.proxyErrors);
+    }
+}
+const isObjectModeOptions = (o) => !!o.objectMode;
+const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
+/**
+ * Main export, the Minipass class
+ *
+ * `RType` is the type of data emitted, defaults to Buffer
+ *
+ * `WType` is the type of data to be written, if RType is buffer or string,
+ * then any {@link Minipass.ContiguousData} is allowed.
+ *
+ * `Events` is the set of event handler signatures that this object
+ * will emit, see {@link Minipass.Events}
+ */
+export class Minipass extends EventEmitter {
+    [FLOWING] = false;
+    [PAUSED] = false;
+    [PIPES] = [];
+    [BUFFER] = [];
+    [OBJECTMODE];
+    [ENCODING];
+    [ASYNC];
+    [DECODER];
+    [EOF] = false;
+    [EMITTED_END] = false;
+    [EMITTING_END] = false;
+    [CLOSED] = false;
+    [EMITTED_ERROR] = null;
+    [BUFFERLENGTH] = 0;
+    [DESTROYED] = false;
+    [SIGNAL];
+    [ABORTED] = false;
+    [DATALISTENERS] = 0;
+    [DISCARDED] = false;
+    /**
+     * true if the stream can be written
+     */
+    writable = true;
+    /**
+     * true if the stream can be read
+     */
+    readable = true;
+    /**
+     * If `RType` is Buffer, then options do not need to be provided.
+     * Otherwise, an options object must be provided to specify either
+     * {@link Minipass.SharedOptions.objectMode} or
+     * {@link Minipass.SharedOptions.encoding}, as appropriate.
+     */
+    constructor(...args) {
+        const options = (args[0] ||
+            {});
+        super();
+        if (options.objectMode && typeof options.encoding === 'string') {
+            throw new TypeError('Encoding and objectMode may not be used together');
+        }
+        if (isObjectModeOptions(options)) {
+            this[OBJECTMODE] = true;
+            this[ENCODING] = null;
+        }
+        else if (isEncodingOptions(options)) {
+            this[ENCODING] = options.encoding;
+            this[OBJECTMODE] = false;
+        }
+        else {
+            this[OBJECTMODE] = false;
+            this[ENCODING] = null;
+        }
+        this[ASYNC] = !!options.async;
+        this[DECODER] = this[ENCODING]
+            ? new StringDecoder(this[ENCODING])
+            : null;
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposeBuffer === true) {
+            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
+        }
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposePipes === true) {
+            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
+        }
+        const { signal } = options;
+        if (signal) {
+            this[SIGNAL] = signal;
+            if (signal.aborted) {
+                this[ABORT]();
+            }
+            else {
+                signal.addEventListener('abort', () => this[ABORT]());
+            }
+        }
+    }
+    /**
+     * The amount of data stored in the buffer waiting to be read.
+     *
+     * For Buffer strings, this will be the total byte length.
+     * For string encoding streams, this will be the string character length,
+     * according to JavaScript's `string.length` logic.
+     * For objectMode streams, this is a count of the items waiting to be
+     * emitted.
+     */
+    get bufferLength() {
+        return this[BUFFERLENGTH];
+    }
+    /**
+     * The `BufferEncoding` currently in use, or `null`
+     */
+    get encoding() {
+        return this[ENCODING];
+    }
+    /**
+     * @deprecated - This is a read only property
+     */
+    set encoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * @deprecated - Encoding may only be set at instantiation time
+     */
+    setEncoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * True if this is an objectMode stream
+     */
+    get objectMode() {
+        return this[OBJECTMODE];
+    }
+    /**
+     * @deprecated - This is a read-only property
+     */
+    set objectMode(_om) {
+        throw new Error('objectMode must be set at instantiation time');
+    }
+    /**
+     * true if this is an async stream
+     */
+    get ['async']() {
+        return this[ASYNC];
+    }
+    /**
+     * Set to true to make this stream async.
+     *
+     * Once set, it cannot be unset, as this would potentially cause incorrect
+     * behavior.  Ie, a sync stream can be made async, but an async stream
+     * cannot be safely made sync.
+     */
+    set ['async'](a) {
+        this[ASYNC] = this[ASYNC] || !!a;
+    }
+    // drop everything and get out of the flow completely
+    [ABORT]() {
+        this[ABORTED] = true;
+        this.emit('abort', this[SIGNAL]?.reason);
+        this.destroy(this[SIGNAL]?.reason);
+    }
+    /**
+     * True if the stream has been aborted.
+     */
+    get aborted() {
+        return this[ABORTED];
+    }
+    /**
+     * No-op setter. Stream aborted status is set via the AbortSignal provided
+     * in the constructor options.
+     */
+    set aborted(_) { }
+    write(chunk, encoding, cb) {
+        if (this[ABORTED])
+            return false;
+        if (this[EOF])
+            throw new Error('write after end');
+        if (this[DESTROYED]) {
+            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
+            return true;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (!encoding)
+            encoding = 'utf8';
+        const fn = this[ASYNC] ? defer : nodefer;
+        // convert array buffers and typed array views into buffers
+        // at some point in the future, we may want to do the opposite!
+        // leave strings and buffers as-is
+        // anything is only allowed if in object mode, so throw
+        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+            if (isArrayBufferView(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
+            }
+            else if (isArrayBufferLike(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk);
+            }
+            else if (typeof chunk !== 'string') {
+                throw new Error('Non-contiguous data written to non-objectMode stream');
+            }
+        }
+        // handle object mode up front, since it's simpler
+        // this yields better performance, fewer checks later.
+        if (this[OBJECTMODE]) {
+            // maybe impossible?
+            /* c8 ignore start */
+            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+                this[FLUSH](true);
+            /* c8 ignore stop */
+            if (this[FLOWING])
+                this.emit('data', chunk);
+            else
+                this[BUFFERPUSH](chunk);
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // at this point the chunk is a buffer or string
+        // don't buffer it up or send it to the decoder
+        if (!chunk.length) {
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // fast-path writing strings of same encoding to a stream with
+        // an empty buffer, skipping the buffer/decoder dance
+        if (typeof chunk === 'string' &&
+            // unless it is a string already ready for us to use
+            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = Buffer.from(chunk, encoding);
+        }
+        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = this[DECODER].write(chunk);
+        }
+        // Note: flushing CAN potentially switch us into not-flowing mode
+        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+            this[FLUSH](true);
+        if (this[FLOWING])
+            this.emit('data', chunk);
+        else
+            this[BUFFERPUSH](chunk);
+        if (this[BUFFERLENGTH] !== 0)
+            this.emit('readable');
+        if (cb)
+            fn(cb);
+        return this[FLOWING];
+    }
+    /**
+     * Low-level explicit read method.
+     *
+     * In objectMode, the argument is ignored, and one item is returned if
+     * available.
+     *
+     * `n` is the number of bytes (or in the case of encoding streams,
+     * characters) to consume. If `n` is not provided, then the entire buffer
+     * is returned, or `null` is returned if no data is available.
+     *
+     * If `n` is greater that the amount of data in the internal buffer,
+     * then `null` is returned.
+     */
+    read(n) {
+        if (this[DESTROYED])
+            return null;
+        this[DISCARDED] = false;
+        if (this[BUFFERLENGTH] === 0 ||
+            n === 0 ||
+            (n && n > this[BUFFERLENGTH])) {
+            this[MAYBE_EMIT_END]();
+            return null;
+        }
+        if (this[OBJECTMODE])
+            n = null;
+        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
+            // not object mode, so if we have an encoding, then RType is string
+            // otherwise, must be Buffer
+            this[BUFFER] = [
+                (this[ENCODING]
+                    ? this[BUFFER].join('')
+                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
+            ];
+        }
+        const ret = this[READ](n || null, this[BUFFER][0]);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [READ](n, chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERSHIFT]();
+        else {
+            const c = chunk;
+            if (n === c.length || n === null)
+                this[BUFFERSHIFT]();
+            else if (typeof c === 'string') {
+                this[BUFFER][0] = c.slice(n);
+                chunk = c.slice(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+            else {
+                this[BUFFER][0] = c.subarray(n);
+                chunk = c.subarray(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+        }
+        this.emit('data', chunk);
+        if (!this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+        return chunk;
+    }
+    end(chunk, encoding, cb) {
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (chunk !== undefined)
+            this.write(chunk, encoding);
+        if (cb)
+            this.once('end', cb);
+        this[EOF] = true;
+        this.writable = false;
+        // if we haven't written anything, then go ahead and emit,
+        // even if we're not reading.
+        // we'll re-emit if a new 'end' listener is added anyway.
+        // This makes MP more suitable to write-only use cases.
+        if (this[FLOWING] || !this[PAUSED])
+            this[MAYBE_EMIT_END]();
+        return this;
+    }
+    // don't let the internal resume be overwritten
+    [RESUME]() {
+        if (this[DESTROYED])
+            return;
+        if (!this[DATALISTENERS] && !this[PIPES].length) {
+            this[DISCARDED] = true;
+        }
+        this[PAUSED] = false;
+        this[FLOWING] = true;
+        this.emit('resume');
+        if (this[BUFFER].length)
+            this[FLUSH]();
+        else if (this[EOF])
+            this[MAYBE_EMIT_END]();
+        else
+            this.emit('drain');
+    }
+    /**
+     * Resume the stream if it is currently in a paused state
+     *
+     * If called when there are no pipe destinations or `data` event listeners,
+     * this will place the stream in a "discarded" state, where all data will
+     * be thrown away. The discarded state is removed if a pipe destination or
+     * data handler is added, if pause() is called, or if any synchronous or
+     * asynchronous iteration is started.
+     */
+    resume() {
+        return this[RESUME]();
+    }
+    /**
+     * Pause the stream
+     */
+    pause() {
+        this[FLOWING] = false;
+        this[PAUSED] = true;
+        this[DISCARDED] = false;
+    }
+    /**
+     * true if the stream has been forcibly destroyed
+     */
+    get destroyed() {
+        return this[DESTROYED];
+    }
+    /**
+     * true if the stream is currently in a flowing state, meaning that
+     * any writes will be immediately emitted.
+     */
+    get flowing() {
+        return this[FLOWING];
+    }
+    /**
+     * true if the stream is currently in a paused state
+     */
+    get paused() {
+        return this[PAUSED];
+    }
+    [BUFFERPUSH](chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] += 1;
+        else
+            this[BUFFERLENGTH] += chunk.length;
+        this[BUFFER].push(chunk);
+    }
+    [BUFFERSHIFT]() {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] -= 1;
+        else
+            this[BUFFERLENGTH] -= this[BUFFER][0].length;
+        return this[BUFFER].shift();
+    }
+    [FLUSH](noDrain = false) {
+        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
+            this[BUFFER].length);
+        if (!noDrain && !this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+    }
+    [FLUSHCHUNK](chunk) {
+        this.emit('data', chunk);
+        return this[FLOWING];
+    }
+    /**
+     * Pipe all data emitted by this stream into the destination provided.
+     *
+     * Triggers the flow of data.
+     */
+    pipe(dest, opts) {
+        if (this[DESTROYED])
+            return dest;
+        this[DISCARDED] = false;
+        const ended = this[EMITTED_END];
+        opts = opts || {};
+        if (dest === proc.stdout || dest === proc.stderr)
+            opts.end = false;
+        else
+            opts.end = opts.end !== false;
+        opts.proxyErrors = !!opts.proxyErrors;
+        // piping an ended stream ends immediately
+        if (ended) {
+            if (opts.end)
+                dest.end();
+        }
+        else {
+            // "as" here just ignores the WType, which pipes don't care about,
+            // since they're only consuming from us, and writing to the dest
+            this[PIPES].push(!opts.proxyErrors
+                ? new Pipe(this, dest, opts)
+                : new PipeProxyErrors(this, dest, opts));
+            if (this[ASYNC])
+                defer(() => this[RESUME]());
+            else
+                this[RESUME]();
+        }
+        return dest;
+    }
+    /**
+     * Fully unhook a piped destination stream.
+     *
+     * If the destination stream was the only consumer of this stream (ie,
+     * there are no other piped destinations or `'data'` event listeners)
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    unpipe(dest) {
+        const p = this[PIPES].find(p => p.dest === dest);
+        if (p) {
+            if (this[PIPES].length === 1) {
+                if (this[FLOWING] && this[DATALISTENERS] === 0) {
+                    this[FLOWING] = false;
+                }
+                this[PIPES] = [];
+            }
+            else
+                this[PIPES].splice(this[PIPES].indexOf(p), 1);
+            p.unpipe();
+        }
+    }
+    /**
+     * Alias for {@link Minipass#on}
+     */
+    addListener(ev, handler) {
+        return this.on(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.on`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * - Adding a 'data' event handler will trigger the flow of data
+     *
+     * - Adding a 'readable' event handler when there is data waiting to be read
+     *   will cause 'readable' to be emitted immediately.
+     *
+     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
+     *   already passed will cause the event to be emitted immediately and all
+     *   handlers removed.
+     *
+     * - Adding an 'error' event handler after an error has been emitted will
+     *   cause the event to be re-emitted immediately with the error previously
+     *   raised.
+     */
+    on(ev, handler) {
+        const ret = super.on(ev, handler);
+        if (ev === 'data') {
+            this[DISCARDED] = false;
+            this[DATALISTENERS]++;
+            if (!this[PIPES].length && !this[FLOWING]) {
+                this[RESUME]();
+            }
+        }
+        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
+            super.emit('readable');
+        }
+        else if (isEndish(ev) && this[EMITTED_END]) {
+            super.emit(ev);
+            this.removeAllListeners(ev);
+        }
+        else if (ev === 'error' && this[EMITTED_ERROR]) {
+            const h = handler;
+            if (this[ASYNC])
+                defer(() => h.call(this, this[EMITTED_ERROR]));
+            else
+                h.call(this, this[EMITTED_ERROR]);
+        }
+        return ret;
+    }
+    /**
+     * Alias for {@link Minipass#off}
+     */
+    removeListener(ev, handler) {
+        return this.off(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.off`
+     *
+     * If a 'data' event handler is removed, and it was the last consumer
+     * (ie, there are no pipe destinations or other 'data' event listeners),
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    off(ev, handler) {
+        const ret = super.off(ev, handler);
+        // if we previously had listeners, and now we don't, and we don't
+        // have any pipes, then stop the flow, unless it's been explicitly
+        // put in a discarded flowing state via stream.resume().
+        if (ev === 'data') {
+            this[DATALISTENERS] = this.listeners('data').length;
+            if (this[DATALISTENERS] === 0 &&
+                !this[DISCARDED] &&
+                !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * Mostly identical to `EventEmitter.removeAllListeners`
+     *
+     * If all 'data' event handlers are removed, and they were the last consumer
+     * (ie, there are no pipe destinations), then the flow of data will stop
+     * until there is another consumer or {@link Minipass#resume} is explicitly
+     * called.
+     */
+    removeAllListeners(ev) {
+        const ret = super.removeAllListeners(ev);
+        if (ev === 'data' || ev === undefined) {
+            this[DATALISTENERS] = 0;
+            if (!this[DISCARDED] && !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * true if the 'end' event has been emitted
+     */
+    get emittedEnd() {
+        return this[EMITTED_END];
+    }
+    [MAYBE_EMIT_END]() {
+        if (!this[EMITTING_END] &&
+            !this[EMITTED_END] &&
+            !this[DESTROYED] &&
+            this[BUFFER].length === 0 &&
+            this[EOF]) {
+            this[EMITTING_END] = true;
+            this.emit('end');
+            this.emit('prefinish');
+            this.emit('finish');
+            if (this[CLOSED])
+                this.emit('close');
+            this[EMITTING_END] = false;
+        }
+    }
+    /**
+     * Mostly identical to `EventEmitter.emit`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * If the stream has been destroyed, and the event is something other
+     * than 'close' or 'error', then `false` is returned and no handlers
+     * are called.
+     *
+     * If the event is 'end', and has already been emitted, then the event
+     * is ignored. If the stream is in a paused or non-flowing state, then
+     * the event will be deferred until data flow resumes. If the stream is
+     * async, then handlers will be called on the next tick rather than
+     * immediately.
+     *
+     * If the event is 'close', and 'end' has not yet been emitted, then
+     * the event will be deferred until after 'end' is emitted.
+     *
+     * If the event is 'error', and an AbortSignal was provided for the stream,
+     * and there are no listeners, then the event is ignored, matching the
+     * behavior of node core streams in the presense of an AbortSignal.
+     *
+     * If the event is 'finish' or 'prefinish', then all listeners will be
+     * removed after emitting the event, to prevent double-firing.
+     */
+    emit(ev, ...args) {
+        const data = args[0];
+        // error and close are only events allowed after calling destroy()
+        if (ev !== 'error' &&
+            ev !== 'close' &&
+            ev !== DESTROYED &&
+            this[DESTROYED]) {
+            return false;
+        }
+        else if (ev === 'data') {
+            return !this[OBJECTMODE] && !data
+                ? false
+                : this[ASYNC]
+                    ? (defer(() => this[EMITDATA](data)), true)
+                    : this[EMITDATA](data);
+        }
+        else if (ev === 'end') {
+            return this[EMITEND]();
+        }
+        else if (ev === 'close') {
+            this[CLOSED] = true;
+            // don't emit close before 'end' and 'finish'
+            if (!this[EMITTED_END] && !this[DESTROYED])
+                return false;
+            const ret = super.emit('close');
+            this.removeAllListeners('close');
+            return ret;
+        }
+        else if (ev === 'error') {
+            this[EMITTED_ERROR] = data;
+            super.emit(ERROR, data);
+            const ret = !this[SIGNAL] || this.listeners('error').length
+                ? super.emit('error', data)
+                : false;
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'resume') {
+            const ret = super.emit('resume');
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'finish' || ev === 'prefinish') {
+            const ret = super.emit(ev);
+            this.removeAllListeners(ev);
+            return ret;
+        }
+        // Some other unknown event
+        const ret = super.emit(ev, ...args);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITDATA](data) {
+        for (const p of this[PIPES]) {
+            if (p.dest.write(data) === false)
+                this.pause();
+        }
+        const ret = this[DISCARDED] ? false : super.emit('data', data);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITEND]() {
+        if (this[EMITTED_END])
+            return false;
+        this[EMITTED_END] = true;
+        this.readable = false;
+        return this[ASYNC]
+            ? (defer(() => this[EMITEND2]()), true)
+            : this[EMITEND2]();
+    }
+    [EMITEND2]() {
+        if (this[DECODER]) {
+            const data = this[DECODER].end();
+            if (data) {
+                for (const p of this[PIPES]) {
+                    p.dest.write(data);
+                }
+                if (!this[DISCARDED])
+                    super.emit('data', data);
+            }
+        }
+        for (const p of this[PIPES]) {
+            p.end();
+        }
+        const ret = super.emit('end');
+        this.removeAllListeners('end');
+        return ret;
+    }
+    /**
+     * Return a Promise that resolves to an array of all emitted data once
+     * the stream ends.
+     */
+    async collect() {
+        const buf = Object.assign([], {
+            dataLength: 0,
+        });
+        if (!this[OBJECTMODE])
+            buf.dataLength = 0;
+        // set the promise first, in case an error is raised
+        // by triggering the flow here.
+        const p = this.promise();
+        this.on('data', c => {
+            buf.push(c);
+            if (!this[OBJECTMODE])
+                buf.dataLength += c.length;
+        });
+        await p;
+        return buf;
+    }
+    /**
+     * Return a Promise that resolves to the concatenation of all emitted data
+     * once the stream ends.
+     *
+     * Not allowed on objectMode streams.
+     */
+    async concat() {
+        if (this[OBJECTMODE]) {
+            throw new Error('cannot concat in objectMode');
+        }
+        const buf = await this.collect();
+        return (this[ENCODING]
+            ? buf.join('')
+            : Buffer.concat(buf, buf.dataLength));
+    }
+    /**
+     * Return a void Promise that resolves once the stream ends.
+     */
+    async promise() {
+        return new Promise((resolve, reject) => {
+            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
+            this.on('error', er => reject(er));
+            this.on('end', () => resolve());
+        });
+    }
+    /**
+     * Asynchronous `for await of` iteration.
+     *
+     * This will continue emitting all chunks until the stream terminates.
+     */
+    [Symbol.asyncIterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = async () => {
+            this.pause();
+            stopped = true;
+            return { value: undefined, done: true };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const res = this.read();
+            if (res !== null)
+                return Promise.resolve({ done: false, value: res });
+            if (this[EOF])
+                return stop();
+            let resolve;
+            let reject;
+            const onerr = (er) => {
+                this.off('data', ondata);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                reject(er);
+            };
+            const ondata = (value) => {
+                this.off('error', onerr);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                this.pause();
+                resolve({ value, done: !!this[EOF] });
+            };
+            const onend = () => {
+                this.off('error', onerr);
+                this.off('data', ondata);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                resolve({ done: true, value: undefined });
+            };
+            const ondestroy = () => onerr(new Error('stream destroyed'));
+            return new Promise((res, rej) => {
+                reject = rej;
+                resolve = res;
+                this.once(DESTROYED, ondestroy);
+                this.once('error', onerr);
+                this.once('end', onend);
+                this.once('data', ondata);
+            });
+        };
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Synchronous `for of` iteration.
+     *
+     * The iteration will terminate when the internal buffer runs out, even
+     * if the stream has not yet terminated.
+     */
+    [Symbol.iterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = () => {
+            this.pause();
+            this.off(ERROR, stop);
+            this.off(DESTROYED, stop);
+            this.off('end', stop);
+            stopped = true;
+            return { done: true, value: undefined };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const value = this.read();
+            return value === null ? stop() : { done: false, value };
+        };
+        this.once('end', stop);
+        this.once(ERROR, stop);
+        this.once(DESTROYED, stop);
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.iterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Destroy a stream, preventing it from being used for any further purpose.
+     *
+     * If the stream has a `close()` method, then it will be called on
+     * destruction.
+     *
+     * After destruction, any attempt to write data, read data, or emit most
+     * events will be ignored.
+     *
+     * If an error argument is provided, then it will be emitted in an
+     * 'error' event.
+     */
+    destroy(er) {
+        if (this[DESTROYED]) {
+            if (er)
+                this.emit('error', er);
+            else
+                this.emit(DESTROYED);
+            return this;
+        }
+        this[DESTROYED] = true;
+        this[DISCARDED] = true;
+        // throw away all buffered data, it's never coming out
+        this[BUFFER].length = 0;
+        this[BUFFERLENGTH] = 0;
+        const wc = this;
+        if (typeof wc.close === 'function' && !this[CLOSED])
+            wc.close();
+        if (er)
+            this.emit('error', er);
+        // if no error to emit, still reject pending promises
+        else
+            this.emit(DESTROYED);
+        return this;
+    }
+    /**
+     * Alias for {@link isStream}
+     *
+     * Former export location, maintained for backwards compatibility.
+     *
+     * @deprecated
+     */
+    static get isStream() {
+        return isStream;
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/package.json
new file mode 100644
index 0000000000000..6faaa247a5bc6
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/package.json
@@ -0,0 +1,82 @@
+{
+  "name": "minipass",
+  "version": "7.0.3",
+  "description": "minimal implementation of a PassThrough stream",
+  "main": "./dist/cjs/index.js",
+  "module": "./dist/mjs/index.js",
+  "types": "./dist/cjs/index.js",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/mjs/index.d.ts",
+        "default": "./dist/mjs/index.js"
+      },
+      "require": {
+        "types": "./dist/cjs/index.d.ts",
+        "default": "./dist/cjs/index.js"
+      }
+    },
+    "./package.json": "./package.json"
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "preprepare": "rm -rf dist",
+    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "c8 tap",
+    "snap": "c8 tap",
+    "format": "prettier --write . --loglevel warn",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+  },
+  "tap": {
+    "coverage": false,
+    "node-arg": [
+      "--enable-source-maps",
+      "--no-warnings",
+      "--loader",
+      "ts-node/esm"
+    ],
+    "ts": false
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "devDependencies": {
+    "@types/node": "^20.1.2",
+    "@types/tap": "^15.0.8",
+    "c8": "^7.13.0",
+    "prettier": "^2.6.2",
+    "tap": "^16.3.0",
+    "ts-node": "^10.9.1",
+    "typedoc": "^0.24.8",
+    "typescript": "^5.1.3",
+    "end-of-stream": "^1.4.0",
+    "node-abort-controller": "^3.1.1",
+    "sync-content": "^1.0.2",
+    "through2": "^2.0.3"
+  },
+  "repository": "https://github.com/isaacs/minipass",
+  "keywords": [
+    "passthrough",
+    "stream"
+  ],
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "ISC",
+  "engines": {
+    "node": ">=16 || 14 >=14.17"
+  }
+}
diff --git a/node_modules/node-gyp/node_modules/cacache/package.json b/node_modules/node-gyp/node_modules/cacache/package.json
new file mode 100644
index 0000000000000..ab58cb8b7c50f
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/package.json
@@ -0,0 +1,82 @@
+{
+  "name": "cacache",
+  "version": "17.1.4",
+  "cache-version": {
+    "content": "2",
+    "index": "5"
+  },
+  "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "coverage": "tap",
+    "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
+    "lint": "eslint \"**/*.js\"",
+    "npmclilint": "npmcli-lint",
+    "lintfix": "npm run lint -- --fix",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "posttest": "npm run lint",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/cacache.git"
+  },
+  "keywords": [
+    "cache",
+    "caching",
+    "content-addressable",
+    "sri",
+    "sri hash",
+    "subresource integrity",
+    "cache",
+    "storage",
+    "store",
+    "file store",
+    "filesystem",
+    "disk cache",
+    "disk storage"
+  ],
+  "license": "ISC",
+  "dependencies": {
+    "@npmcli/fs": "^3.1.0",
+    "fs-minipass": "^3.0.0",
+    "glob": "^10.2.2",
+    "lru-cache": "^7.7.1",
+    "minipass": "^7.0.3",
+    "minipass-collect": "^1.0.2",
+    "minipass-flush": "^1.0.5",
+    "minipass-pipeline": "^1.2.4",
+    "p-map": "^4.0.0",
+    "ssri": "^10.0.0",
+    "tar": "^6.1.11",
+    "unique-filename": "^3.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "tap": "^16.0.0"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "windowsCI": false,
+    "version": "4.18.0",
+    "publish": "true"
+  },
+  "author": "GitHub Inc.",
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/node_modules/pacote/node_modules/cacache/LICENSE.md b/node_modules/pacote/node_modules/cacache/LICENSE.md
new file mode 100644
index 0000000000000..8d28acf866d93
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/LICENSE.md
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/path.js b/node_modules/pacote/node_modules/cacache/lib/content/path.js
new file mode 100644
index 0000000000000..ad5a76a4f73f2
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/content/path.js
@@ -0,0 +1,29 @@
+'use strict'
+
+const contentVer = require('../../package.json')['cache-version'].content
+const hashToSegments = require('../util/hash-to-segments')
+const path = require('path')
+const ssri = require('ssri')
+
+// Current format of content file path:
+//
+// sha512-BaSE64Hex= ->
+// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
+//
+module.exports = contentPath
+
+function contentPath (cache, integrity) {
+  const sri = ssri.parse(integrity, { single: true })
+  // contentPath is the *strongest* algo given
+  return path.join(
+    contentDir(cache),
+    sri.algorithm,
+    ...hashToSegments(sri.hexDigest())
+  )
+}
+
+module.exports.contentDir = contentDir
+
+function contentDir (cache) {
+  return path.join(cache, `content-v${contentVer}`)
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/read.js b/node_modules/pacote/node_modules/cacache/lib/content/read.js
new file mode 100644
index 0000000000000..f41b539df65dc
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/content/read.js
@@ -0,0 +1,166 @@
+'use strict'
+
+const fs = require('fs/promises')
+const fsm = require('fs-minipass')
+const ssri = require('ssri')
+const contentPath = require('./path')
+const Pipeline = require('minipass-pipeline')
+
+module.exports = read
+
+const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
+async function read (cache, integrity, opts = {}) {
+  const { size } = opts
+  const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
+    // get size
+    const stat = await fs.stat(cpath)
+    return { stat, cpath, sri }
+  })
+  if (typeof size === 'number' && stat.size !== size) {
+    throw sizeError(size, stat.size)
+  }
+
+  if (stat.size > MAX_SINGLE_READ_SIZE) {
+    return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
+  }
+
+  const data = await fs.readFile(cpath, { encoding: null })
+  if (!ssri.checkData(data, sri)) {
+    throw integrityError(sri, cpath)
+  }
+
+  return data
+}
+
+const readPipeline = (cpath, size, sri, stream) => {
+  stream.push(
+    new fsm.ReadStream(cpath, {
+      size,
+      readSize: MAX_SINGLE_READ_SIZE,
+    }),
+    ssri.integrityStream({
+      integrity: sri,
+      size,
+    })
+  )
+  return stream
+}
+
+module.exports.stream = readStream
+module.exports.readStream = readStream
+
+function readStream (cache, integrity, opts = {}) {
+  const { size } = opts
+  const stream = new Pipeline()
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
+      // just stat to ensure it exists
+      const stat = await fs.stat(cpath)
+      return { stat, cpath, sri }
+    })
+    if (typeof size === 'number' && size !== stat.size) {
+      return stream.emit('error', sizeError(size, stat.size))
+    }
+
+    return readPipeline(cpath, stat.size, sri, stream)
+  }).catch(err => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.copy = copy
+
+function copy (cache, integrity, dest) {
+  return withContentSri(cache, integrity, (cpath, sri) => {
+    return fs.copyFile(cpath, dest)
+  })
+}
+
+module.exports.hasContent = hasContent
+
+async function hasContent (cache, integrity) {
+  if (!integrity) {
+    return false
+  }
+
+  try {
+    return await withContentSri(cache, integrity, async (cpath, sri) => {
+      const stat = await fs.stat(cpath)
+      return { size: stat.size, sri, stat }
+    })
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return false
+    }
+
+    if (err.code === 'EPERM') {
+      /* istanbul ignore else */
+      if (process.platform !== 'win32') {
+        throw err
+      } else {
+        return false
+      }
+    }
+  }
+}
+
+async function withContentSri (cache, integrity, fn) {
+  const sri = ssri.parse(integrity)
+  // If `integrity` has multiple entries, pick the first digest
+  // with available local data.
+  const algo = sri.pickAlgorithm()
+  const digests = sri[algo]
+
+  if (digests.length <= 1) {
+    const cpath = contentPath(cache, digests[0])
+    return fn(cpath, digests[0])
+  } else {
+    // Can't use race here because a generic error can happen before
+    // a ENOENT error, and can happen before a valid result
+    const results = await Promise.all(digests.map(async (meta) => {
+      try {
+        return await withContentSri(cache, meta, fn)
+      } catch (err) {
+        if (err.code === 'ENOENT') {
+          return Object.assign(
+            new Error('No matching content found for ' + sri.toString()),
+            { code: 'ENOENT' }
+          )
+        }
+        return err
+      }
+    }))
+    // Return the first non error if it is found
+    const result = results.find((r) => !(r instanceof Error))
+    if (result) {
+      return result
+    }
+
+    // Throw the No matching content found error
+    const enoentError = results.find((r) => r.code === 'ENOENT')
+    if (enoentError) {
+      throw enoentError
+    }
+
+    // Throw generic error
+    throw results.find((r) => r instanceof Error)
+  }
+}
+
+function sizeError (expected, found) {
+  /* eslint-disable-next-line max-len */
+  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+  err.expected = expected
+  err.found = found
+  err.code = 'EBADSIZE'
+  return err
+}
+
+function integrityError (sri, path) {
+  const err = new Error(`Integrity verification failed for ${sri} (${path})`)
+  err.code = 'EINTEGRITY'
+  err.sri = sri
+  err.path = path
+  return err
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/rm.js b/node_modules/pacote/node_modules/cacache/lib/content/rm.js
new file mode 100644
index 0000000000000..ce58d679e4cb2
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/content/rm.js
@@ -0,0 +1,18 @@
+'use strict'
+
+const fs = require('fs/promises')
+const contentPath = require('./path')
+const { hasContent } = require('./read')
+
+module.exports = rm
+
+async function rm (cache, integrity) {
+  const content = await hasContent(cache, integrity)
+  // ~pretty~ sure we can't end up with a content lacking sri, but be safe
+  if (content && content.sri) {
+    await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
+    return true
+  } else {
+    return false
+  }
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/write.js b/node_modules/pacote/node_modules/cacache/lib/content/write.js
new file mode 100644
index 0000000000000..7146146581287
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/content/write.js
@@ -0,0 +1,205 @@
+'use strict'
+
+const events = require('events')
+
+const contentPath = require('./path')
+const fs = require('fs/promises')
+const { moveFile } = require('@npmcli/fs')
+const { Minipass } = require('minipass')
+const Pipeline = require('minipass-pipeline')
+const Flush = require('minipass-flush')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+const fsm = require('fs-minipass')
+
+module.exports = write
+
+// Cache of move operations in process so we don't duplicate
+const moveOperations = new Map()
+
+async function write (cache, data, opts = {}) {
+  const { algorithms, size, integrity } = opts
+
+  if (typeof size === 'number' && data.length !== size) {
+    throw sizeError(size, data.length)
+  }
+
+  const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
+  if (integrity && !ssri.checkData(data, integrity, opts)) {
+    throw checksumError(integrity, sri)
+  }
+
+  for (const algo in sri) {
+    const tmp = await makeTmp(cache, opts)
+    const hash = sri[algo].toString()
+    try {
+      await fs.writeFile(tmp.target, data, { flag: 'wx' })
+      await moveToDestination(tmp, cache, hash, opts)
+    } finally {
+      if (!tmp.moved) {
+        await fs.rm(tmp.target, { recursive: true, force: true })
+      }
+    }
+  }
+  return { integrity: sri, size: data.length }
+}
+
+module.exports.stream = writeStream
+
+// writes proxied to the 'inputStream' that is passed to the Promise
+// 'end' is deferred until content is handled.
+class CacacheWriteStream extends Flush {
+  constructor (cache, opts) {
+    super()
+    this.opts = opts
+    this.cache = cache
+    this.inputStream = new Minipass()
+    this.inputStream.on('error', er => this.emit('error', er))
+    this.inputStream.on('drain', () => this.emit('drain'))
+    this.handleContentP = null
+  }
+
+  write (chunk, encoding, cb) {
+    if (!this.handleContentP) {
+      this.handleContentP = handleContent(
+        this.inputStream,
+        this.cache,
+        this.opts
+      )
+    }
+    return this.inputStream.write(chunk, encoding, cb)
+  }
+
+  flush (cb) {
+    this.inputStream.end(() => {
+      if (!this.handleContentP) {
+        const e = new Error('Cache input stream was empty')
+        e.code = 'ENODATA'
+        // empty streams are probably emitting end right away.
+        // defer this one tick by rejecting a promise on it.
+        return Promise.reject(e).catch(cb)
+      }
+      // eslint-disable-next-line promise/catch-or-return
+      this.handleContentP.then(
+        (res) => {
+          res.integrity && this.emit('integrity', res.integrity)
+          // eslint-disable-next-line promise/always-return
+          res.size !== null && this.emit('size', res.size)
+          cb()
+        },
+        (er) => cb(er)
+      )
+    })
+  }
+}
+
+function writeStream (cache, opts = {}) {
+  return new CacacheWriteStream(cache, opts)
+}
+
+async function handleContent (inputStream, cache, opts) {
+  const tmp = await makeTmp(cache, opts)
+  try {
+    const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
+    await moveToDestination(
+      tmp,
+      cache,
+      res.integrity,
+      opts
+    )
+    return res
+  } finally {
+    if (!tmp.moved) {
+      await fs.rm(tmp.target, { recursive: true, force: true })
+    }
+  }
+}
+
+async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
+  const outStream = new fsm.WriteStream(tmpTarget, {
+    flags: 'wx',
+  })
+
+  if (opts.integrityEmitter) {
+    // we need to create these all simultaneously since they can fire in any order
+    const [integrity, size] = await Promise.all([
+      events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
+      events.once(opts.integrityEmitter, 'size').then(res => res[0]),
+      new Pipeline(inputStream, outStream).promise(),
+    ])
+    return { integrity, size }
+  }
+
+  let integrity
+  let size
+  const hashStream = ssri.integrityStream({
+    integrity: opts.integrity,
+    algorithms: opts.algorithms,
+    size: opts.size,
+  })
+  hashStream.on('integrity', i => {
+    integrity = i
+  })
+  hashStream.on('size', s => {
+    size = s
+  })
+
+  const pipeline = new Pipeline(inputStream, hashStream, outStream)
+  await pipeline.promise()
+  return { integrity, size }
+}
+
+async function makeTmp (cache, opts) {
+  const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+  await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
+  return {
+    target: tmpTarget,
+    moved: false,
+  }
+}
+
+async function moveToDestination (tmp, cache, sri, opts) {
+  const destination = contentPath(cache, sri)
+  const destDir = path.dirname(destination)
+  if (moveOperations.has(destination)) {
+    return moveOperations.get(destination)
+  }
+  moveOperations.set(
+    destination,
+    fs.mkdir(destDir, { recursive: true })
+      .then(async () => {
+        await moveFile(tmp.target, destination, { overwrite: false })
+        tmp.moved = true
+        return tmp.moved
+      })
+      .catch(err => {
+        if (!err.message.startsWith('The destination file exists')) {
+          throw Object.assign(err, { code: 'EEXIST' })
+        }
+      }).finally(() => {
+        moveOperations.delete(destination)
+      })
+
+  )
+  return moveOperations.get(destination)
+}
+
+function sizeError (expected, found) {
+  /* eslint-disable-next-line max-len */
+  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+  err.expected = expected
+  err.found = found
+  err.code = 'EBADSIZE'
+  return err
+}
+
+function checksumError (expected, found) {
+  const err = new Error(`Integrity check failed:
+  Wanted: ${expected}
+   Found: ${found}`)
+  err.code = 'EINTEGRITY'
+  err.expected = expected
+  err.found = found
+  return err
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/entry-index.js b/node_modules/pacote/node_modules/cacache/lib/entry-index.js
new file mode 100644
index 0000000000000..722a37af5ce15
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/entry-index.js
@@ -0,0 +1,330 @@
+'use strict'
+
+const crypto = require('crypto')
+const {
+  appendFile,
+  mkdir,
+  readFile,
+  readdir,
+  rm,
+  writeFile,
+} = require('fs/promises')
+const { Minipass } = require('minipass')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+
+const contentPath = require('./content/path')
+const hashToSegments = require('./util/hash-to-segments')
+const indexV = require('../package.json')['cache-version'].index
+const { moveFile } = require('@npmcli/fs')
+
+module.exports.NotFoundError = class NotFoundError extends Error {
+  constructor (cache, key) {
+    super(`No cache entry for ${key} found in ${cache}`)
+    this.code = 'ENOENT'
+    this.cache = cache
+    this.key = key
+  }
+}
+
+module.exports.compact = compact
+
+async function compact (cache, key, matchFn, opts = {}) {
+  const bucket = bucketPath(cache, key)
+  const entries = await bucketEntries(bucket)
+  const newEntries = []
+  // we loop backwards because the bottom-most result is the newest
+  // since we add new entries with appendFile
+  for (let i = entries.length - 1; i >= 0; --i) {
+    const entry = entries[i]
+    // a null integrity could mean either a delete was appended
+    // or the user has simply stored an index that does not map
+    // to any content. we determine if the user wants to keep the
+    // null integrity based on the validateEntry function passed in options.
+    // if the integrity is null and no validateEntry is provided, we break
+    // as we consider the null integrity to be a deletion of everything
+    // that came before it.
+    if (entry.integrity === null && !opts.validateEntry) {
+      break
+    }
+
+    // if this entry is valid, and it is either the first entry or
+    // the newEntries array doesn't already include an entry that
+    // matches this one based on the provided matchFn, then we add
+    // it to the beginning of our list
+    if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
+      (newEntries.length === 0 ||
+        !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
+      newEntries.unshift(entry)
+    }
+  }
+
+  const newIndex = '\n' + newEntries.map((entry) => {
+    const stringified = JSON.stringify(entry)
+    const hash = hashEntry(stringified)
+    return `${hash}\t${stringified}`
+  }).join('\n')
+
+  const setup = async () => {
+    const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+    await mkdir(path.dirname(target), { recursive: true })
+    return {
+      target,
+      moved: false,
+    }
+  }
+
+  const teardown = async (tmp) => {
+    if (!tmp.moved) {
+      return rm(tmp.target, { recursive: true, force: true })
+    }
+  }
+
+  const write = async (tmp) => {
+    await writeFile(tmp.target, newIndex, { flag: 'wx' })
+    await mkdir(path.dirname(bucket), { recursive: true })
+    // we use @npmcli/move-file directly here because we
+    // want to overwrite the existing file
+    await moveFile(tmp.target, bucket)
+    tmp.moved = true
+  }
+
+  // write the file atomically
+  const tmp = await setup()
+  try {
+    await write(tmp)
+  } finally {
+    await teardown(tmp)
+  }
+
+  // we reverse the list we generated such that the newest
+  // entries come first in order to make looping through them easier
+  // the true passed to formatEntry tells it to keep null
+  // integrity values, if they made it this far it's because
+  // validateEntry returned true, and as such we should return it
+  return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
+}
+
+module.exports.insert = insert
+
+async function insert (cache, key, integrity, opts = {}) {
+  const { metadata, size, time } = opts
+  const bucket = bucketPath(cache, key)
+  const entry = {
+    key,
+    integrity: integrity && ssri.stringify(integrity),
+    time: time || Date.now(),
+    size,
+    metadata,
+  }
+  try {
+    await mkdir(path.dirname(bucket), { recursive: true })
+    const stringified = JSON.stringify(entry)
+    // NOTE - Cleverness ahoy!
+    //
+    // This works because it's tremendously unlikely for an entry to corrupt
+    // another while still preserving the string length of the JSON in
+    // question. So, we just slap the length in there and verify it on read.
+    //
+    // Thanks to @isaacs for the whiteboarding session that ended up with
+    // this.
+    await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return undefined
+    }
+
+    throw err
+  }
+  return formatEntry(cache, entry)
+}
+
+module.exports.find = find
+
+async function find (cache, key) {
+  const bucket = bucketPath(cache, key)
+  try {
+    const entries = await bucketEntries(bucket)
+    return entries.reduce((latest, next) => {
+      if (next && next.key === key) {
+        return formatEntry(cache, next)
+      } else {
+        return latest
+      }
+    }, null)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return null
+    } else {
+      throw err
+    }
+  }
+}
+
+module.exports.delete = del
+
+function del (cache, key, opts = {}) {
+  if (!opts.removeFully) {
+    return insert(cache, key, null, opts)
+  }
+
+  const bucket = bucketPath(cache, key)
+  return rm(bucket, { recursive: true, force: true })
+}
+
+module.exports.lsStream = lsStream
+
+function lsStream (cache) {
+  const indexDir = bucketDir(cache)
+  const stream = new Minipass({ objectMode: true })
+
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const buckets = await readdirOrEmpty(indexDir)
+    await Promise.all(buckets.map(async (bucket) => {
+      const bucketPath = path.join(indexDir, bucket)
+      const subbuckets = await readdirOrEmpty(bucketPath)
+      await Promise.all(subbuckets.map(async (subbucket) => {
+        const subbucketPath = path.join(bucketPath, subbucket)
+
+        // "/cachename//./*"
+        const subbucketEntries = await readdirOrEmpty(subbucketPath)
+        await Promise.all(subbucketEntries.map(async (entry) => {
+          const entryPath = path.join(subbucketPath, entry)
+          try {
+            const entries = await bucketEntries(entryPath)
+            // using a Map here prevents duplicate keys from showing up
+            // twice, I guess?
+            const reduced = entries.reduce((acc, entry) => {
+              acc.set(entry.key, entry)
+              return acc
+            }, new Map())
+            // reduced is a map of key => entry
+            for (const entry of reduced.values()) {
+              const formatted = formatEntry(cache, entry)
+              if (formatted) {
+                stream.write(formatted)
+              }
+            }
+          } catch (err) {
+            if (err.code === 'ENOENT') {
+              return undefined
+            }
+            throw err
+          }
+        }))
+      }))
+    }))
+    stream.end()
+    return stream
+  }).catch(err => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.ls = ls
+
+async function ls (cache) {
+  const entries = await lsStream(cache).collect()
+  return entries.reduce((acc, xs) => {
+    acc[xs.key] = xs
+    return acc
+  }, {})
+}
+
+module.exports.bucketEntries = bucketEntries
+
+async function bucketEntries (bucket, filter) {
+  const data = await readFile(bucket, 'utf8')
+  return _bucketEntries(data, filter)
+}
+
+function _bucketEntries (data, filter) {
+  const entries = []
+  data.split('\n').forEach((entry) => {
+    if (!entry) {
+      return
+    }
+
+    const pieces = entry.split('\t')
+    if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
+      // Hash is no good! Corruption or malice? Doesn't matter!
+      // EJECT EJECT
+      return
+    }
+    let obj
+    try {
+      obj = JSON.parse(pieces[1])
+    } catch (_) {
+      // eslint-ignore-next-line no-empty-block
+    }
+    // coverage disabled here, no need to test with an entry that parses to something falsey
+    // istanbul ignore else
+    if (obj) {
+      entries.push(obj)
+    }
+  })
+  return entries
+}
+
+module.exports.bucketDir = bucketDir
+
+function bucketDir (cache) {
+  return path.join(cache, `index-v${indexV}`)
+}
+
+module.exports.bucketPath = bucketPath
+
+function bucketPath (cache, key) {
+  const hashed = hashKey(key)
+  return path.join.apply(
+    path,
+    [bucketDir(cache)].concat(hashToSegments(hashed))
+  )
+}
+
+module.exports.hashKey = hashKey
+
+function hashKey (key) {
+  return hash(key, 'sha256')
+}
+
+module.exports.hashEntry = hashEntry
+
+function hashEntry (str) {
+  return hash(str, 'sha1')
+}
+
+function hash (str, digest) {
+  return crypto
+    .createHash(digest)
+    .update(str)
+    .digest('hex')
+}
+
+function formatEntry (cache, entry, keepAll) {
+  // Treat null digests as deletions. They'll shadow any previous entries.
+  if (!entry.integrity && !keepAll) {
+    return null
+  }
+
+  return {
+    key: entry.key,
+    integrity: entry.integrity,
+    path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
+    size: entry.size,
+    time: entry.time,
+    metadata: entry.metadata,
+  }
+}
+
+function readdirOrEmpty (dir) {
+  return readdir(dir).catch((err) => {
+    if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
+      return []
+    }
+
+    throw err
+  })
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/get.js b/node_modules/pacote/node_modules/cacache/lib/get.js
new file mode 100644
index 0000000000000..80ec206c7ecaa
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/get.js
@@ -0,0 +1,170 @@
+'use strict'
+
+const Collect = require('minipass-collect')
+const { Minipass } = require('minipass')
+const Pipeline = require('minipass-pipeline')
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const read = require('./content/read')
+
+async function getData (cache, key, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return {
+      metadata: memoized.entry.metadata,
+      data: memoized.data,
+      integrity: memoized.entry.integrity,
+      size: memoized.entry.size,
+    }
+  }
+
+  const entry = await index.find(cache, key, opts)
+  if (!entry) {
+    throw new index.NotFoundError(cache, key)
+  }
+  const data = await read(cache, entry.integrity, { integrity, size })
+  if (memoize) {
+    memo.put(cache, entry, data, opts)
+  }
+
+  return {
+    data,
+    metadata: entry.metadata,
+    size: entry.size,
+    integrity: entry.integrity,
+  }
+}
+module.exports = getData
+
+async function getDataByDigest (cache, key, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get.byDigest(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return memoized
+  }
+
+  const res = await read(cache, key, { integrity, size })
+  if (memoize) {
+    memo.put.byDigest(cache, key, res, opts)
+  }
+  return res
+}
+module.exports.byDigest = getDataByDigest
+
+const getMemoizedStream = (memoized) => {
+  const stream = new Minipass()
+  stream.on('newListener', function (ev, cb) {
+    ev === 'metadata' && cb(memoized.entry.metadata)
+    ev === 'integrity' && cb(memoized.entry.integrity)
+    ev === 'size' && cb(memoized.entry.size)
+  })
+  stream.end(memoized.data)
+  return stream
+}
+
+function getStream (cache, key, opts = {}) {
+  const { memoize, size } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return getMemoizedStream(memoized)
+  }
+
+  const stream = new Pipeline()
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const entry = await index.find(cache, key)
+    if (!entry) {
+      throw new index.NotFoundError(cache, key)
+    }
+
+    stream.emit('metadata', entry.metadata)
+    stream.emit('integrity', entry.integrity)
+    stream.emit('size', entry.size)
+    stream.on('newListener', function (ev, cb) {
+      ev === 'metadata' && cb(entry.metadata)
+      ev === 'integrity' && cb(entry.integrity)
+      ev === 'size' && cb(entry.size)
+    })
+
+    const src = read.readStream(
+      cache,
+      entry.integrity,
+      { ...opts, size: typeof size !== 'number' ? entry.size : size }
+    )
+
+    if (memoize) {
+      const memoStream = new Collect.PassThrough()
+      memoStream.on('collect', data => memo.put(cache, entry, data, opts))
+      stream.unshift(memoStream)
+    }
+    stream.unshift(src)
+    return stream
+  }).catch((err) => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.stream = getStream
+
+function getStreamDigest (cache, integrity, opts = {}) {
+  const { memoize } = opts
+  const memoized = memo.get.byDigest(cache, integrity, opts)
+  if (memoized && memoize !== false) {
+    const stream = new Minipass()
+    stream.end(memoized)
+    return stream
+  } else {
+    const stream = read.readStream(cache, integrity, opts)
+    if (!memoize) {
+      return stream
+    }
+
+    const memoStream = new Collect.PassThrough()
+    memoStream.on('collect', data => memo.put.byDigest(
+      cache,
+      integrity,
+      data,
+      opts
+    ))
+    return new Pipeline(stream, memoStream)
+  }
+}
+
+module.exports.stream.byDigest = getStreamDigest
+
+function info (cache, key, opts = {}) {
+  const { memoize } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return Promise.resolve(memoized.entry)
+  } else {
+    return index.find(cache, key)
+  }
+}
+module.exports.info = info
+
+async function copy (cache, key, dest, opts = {}) {
+  const entry = await index.find(cache, key, opts)
+  if (!entry) {
+    throw new index.NotFoundError(cache, key)
+  }
+  await read.copy(cache, entry.integrity, dest, opts)
+  return {
+    metadata: entry.metadata,
+    size: entry.size,
+    integrity: entry.integrity,
+  }
+}
+
+module.exports.copy = copy
+
+async function copyByDigest (cache, key, dest, opts = {}) {
+  await read.copy(cache, key, dest, opts)
+  return key
+}
+
+module.exports.copy.byDigest = copyByDigest
+
+module.exports.hasContent = read.hasContent
diff --git a/node_modules/pacote/node_modules/cacache/lib/index.js b/node_modules/pacote/node_modules/cacache/lib/index.js
new file mode 100644
index 0000000000000..c9b0da5f3a271
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/index.js
@@ -0,0 +1,42 @@
+'use strict'
+
+const get = require('./get.js')
+const put = require('./put.js')
+const rm = require('./rm.js')
+const verify = require('./verify.js')
+const { clearMemoized } = require('./memoization.js')
+const tmp = require('./util/tmp.js')
+const index = require('./entry-index.js')
+
+module.exports.index = {}
+module.exports.index.compact = index.compact
+module.exports.index.insert = index.insert
+
+module.exports.ls = index.ls
+module.exports.ls.stream = index.lsStream
+
+module.exports.get = get
+module.exports.get.byDigest = get.byDigest
+module.exports.get.stream = get.stream
+module.exports.get.stream.byDigest = get.stream.byDigest
+module.exports.get.copy = get.copy
+module.exports.get.copy.byDigest = get.copy.byDigest
+module.exports.get.info = get.info
+module.exports.get.hasContent = get.hasContent
+
+module.exports.put = put
+module.exports.put.stream = put.stream
+
+module.exports.rm = rm.entry
+module.exports.rm.all = rm.all
+module.exports.rm.entry = module.exports.rm
+module.exports.rm.content = rm.content
+
+module.exports.clearMemoized = clearMemoized
+
+module.exports.tmp = {}
+module.exports.tmp.mkdir = tmp.mkdir
+module.exports.tmp.withTmp = tmp.withTmp
+
+module.exports.verify = verify
+module.exports.verify.lastRun = verify.lastRun
diff --git a/node_modules/pacote/node_modules/cacache/lib/memoization.js b/node_modules/pacote/node_modules/cacache/lib/memoization.js
new file mode 100644
index 0000000000000..0ff604a479c9c
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/memoization.js
@@ -0,0 +1,72 @@
+'use strict'
+
+const LRU = require('lru-cache')
+
+const MEMOIZED = new LRU({
+  max: 500,
+  maxSize: 50 * 1024 * 1024, // 50MB
+  ttl: 3 * 60 * 1000, // 3 minutes
+  sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
+})
+
+module.exports.clearMemoized = clearMemoized
+
+function clearMemoized () {
+  const old = {}
+  MEMOIZED.forEach((v, k) => {
+    old[k] = v
+  })
+  MEMOIZED.clear()
+  return old
+}
+
+module.exports.put = put
+
+function put (cache, entry, data, opts) {
+  pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
+  putDigest(cache, entry.integrity, data, opts)
+}
+
+module.exports.put.byDigest = putDigest
+
+function putDigest (cache, integrity, data, opts) {
+  pickMem(opts).set(`digest:${cache}:${integrity}`, data)
+}
+
+module.exports.get = get
+
+function get (cache, key, opts) {
+  return pickMem(opts).get(`key:${cache}:${key}`)
+}
+
+module.exports.get.byDigest = getDigest
+
+function getDigest (cache, integrity, opts) {
+  return pickMem(opts).get(`digest:${cache}:${integrity}`)
+}
+
+class ObjProxy {
+  constructor (obj) {
+    this.obj = obj
+  }
+
+  get (key) {
+    return this.obj[key]
+  }
+
+  set (key, val) {
+    this.obj[key] = val
+  }
+}
+
+function pickMem (opts) {
+  if (!opts || !opts.memoize) {
+    return MEMOIZED
+  } else if (opts.memoize.get && opts.memoize.set) {
+    return opts.memoize
+  } else if (typeof opts.memoize === 'object') {
+    return new ObjProxy(opts.memoize)
+  } else {
+    return MEMOIZED
+  }
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/put.js b/node_modules/pacote/node_modules/cacache/lib/put.js
new file mode 100644
index 0000000000000..9fc932d5f6dec
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/put.js
@@ -0,0 +1,80 @@
+'use strict'
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const write = require('./content/write')
+const Flush = require('minipass-flush')
+const { PassThrough } = require('minipass-collect')
+const Pipeline = require('minipass-pipeline')
+
+const putOpts = (opts) => ({
+  algorithms: ['sha512'],
+  ...opts,
+})
+
+module.exports = putData
+
+async function putData (cache, key, data, opts = {}) {
+  const { memoize } = opts
+  opts = putOpts(opts)
+  const res = await write(cache, data, opts)
+  const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
+  if (memoize) {
+    memo.put(cache, entry, data, opts)
+  }
+
+  return res.integrity
+}
+
+module.exports.stream = putStream
+
+function putStream (cache, key, opts = {}) {
+  const { memoize } = opts
+  opts = putOpts(opts)
+  let integrity
+  let size
+  let error
+
+  let memoData
+  const pipeline = new Pipeline()
+  // first item in the pipeline is the memoizer, because we need
+  // that to end first and get the collected data.
+  if (memoize) {
+    const memoizer = new PassThrough().on('collect', data => {
+      memoData = data
+    })
+    pipeline.push(memoizer)
+  }
+
+  // contentStream is a write-only, not a passthrough
+  // no data comes out of it.
+  const contentStream = write.stream(cache, opts)
+    .on('integrity', (int) => {
+      integrity = int
+    })
+    .on('size', (s) => {
+      size = s
+    })
+    .on('error', (err) => {
+      error = err
+    })
+
+  pipeline.push(contentStream)
+
+  // last but not least, we write the index and emit hash and size,
+  // and memoize if we're doing that
+  pipeline.push(new Flush({
+    async flush () {
+      if (!error) {
+        const entry = await index.insert(cache, key, integrity, { ...opts, size })
+        if (memoize && memoData) {
+          memo.put(cache, entry, memoData, opts)
+        }
+        pipeline.emit('integrity', integrity)
+        pipeline.emit('size', size)
+      }
+    },
+  }))
+
+  return pipeline
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/rm.js b/node_modules/pacote/node_modules/cacache/lib/rm.js
new file mode 100644
index 0000000000000..a94760c7cf243
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/rm.js
@@ -0,0 +1,31 @@
+'use strict'
+
+const { rm } = require('fs/promises')
+const glob = require('./util/glob.js')
+const index = require('./entry-index')
+const memo = require('./memoization')
+const path = require('path')
+const rmContent = require('./content/rm')
+
+module.exports = entry
+module.exports.entry = entry
+
+function entry (cache, key, opts) {
+  memo.clearMemoized()
+  return index.delete(cache, key, opts)
+}
+
+module.exports.content = content
+
+function content (cache, integrity) {
+  memo.clearMemoized()
+  return rmContent(cache, integrity)
+}
+
+module.exports.all = all
+
+async function all (cache) {
+  memo.clearMemoized()
+  const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
+  return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/util/glob.js b/node_modules/pacote/node_modules/cacache/lib/util/glob.js
new file mode 100644
index 0000000000000..8500c1c16a429
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/util/glob.js
@@ -0,0 +1,7 @@
+'use strict'
+
+const { glob } = require('glob')
+const path = require('path')
+
+const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep)
+module.exports = (path, options) => glob(globify(path), options)
diff --git a/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js
new file mode 100644
index 0000000000000..445599b503808
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js
@@ -0,0 +1,7 @@
+'use strict'
+
+module.exports = hashToSegments
+
+function hashToSegments (hash) {
+  return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/util/tmp.js b/node_modules/pacote/node_modules/cacache/lib/util/tmp.js
new file mode 100644
index 0000000000000..0bf5302136ebe
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/util/tmp.js
@@ -0,0 +1,26 @@
+'use strict'
+
+const { withTempDir } = require('@npmcli/fs')
+const fs = require('fs/promises')
+const path = require('path')
+
+module.exports.mkdir = mktmpdir
+
+async function mktmpdir (cache, opts = {}) {
+  const { tmpPrefix } = opts
+  const tmpDir = path.join(cache, 'tmp')
+  await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
+  // do not use path.join(), it drops the trailing / if tmpPrefix is unset
+  const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
+  return fs.mkdtemp(target, { owner: 'inherit' })
+}
+
+module.exports.withTmp = withTmp
+
+function withTmp (cache, opts, cb) {
+  if (!cb) {
+    cb = opts
+    opts = {}
+  }
+  return withTempDir(path.join(cache, 'tmp'), cb, opts)
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/verify.js b/node_modules/pacote/node_modules/cacache/lib/verify.js
new file mode 100644
index 0000000000000..62e85c946490f
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/verify.js
@@ -0,0 +1,257 @@
+'use strict'
+
+const {
+  mkdir,
+  readFile,
+  rm,
+  stat,
+  truncate,
+  writeFile,
+} = require('fs/promises')
+const pMap = require('p-map')
+const contentPath = require('./content/path')
+const fsm = require('fs-minipass')
+const glob = require('./util/glob.js')
+const index = require('./entry-index')
+const path = require('path')
+const ssri = require('ssri')
+
+const hasOwnProperty = (obj, key) =>
+  Object.prototype.hasOwnProperty.call(obj, key)
+
+const verifyOpts = (opts) => ({
+  concurrency: 20,
+  log: { silly () {} },
+  ...opts,
+})
+
+module.exports = verify
+
+async function verify (cache, opts) {
+  opts = verifyOpts(opts)
+  opts.log.silly('verify', 'verifying cache at', cache)
+
+  const steps = [
+    markStartTime,
+    fixPerms,
+    garbageCollect,
+    rebuildIndex,
+    cleanTmp,
+    writeVerifile,
+    markEndTime,
+  ]
+
+  const stats = {}
+  for (const step of steps) {
+    const label = step.name
+    const start = new Date()
+    const s = await step(cache, opts)
+    if (s) {
+      Object.keys(s).forEach((k) => {
+        stats[k] = s[k]
+      })
+    }
+    const end = new Date()
+    if (!stats.runTime) {
+      stats.runTime = {}
+    }
+    stats.runTime[label] = end - start
+  }
+  stats.runTime.total = stats.endTime - stats.startTime
+  opts.log.silly(
+    'verify',
+    'verification finished for',
+    cache,
+    'in',
+    `${stats.runTime.total}ms`
+  )
+  return stats
+}
+
+async function markStartTime (cache, opts) {
+  return { startTime: new Date() }
+}
+
+async function markEndTime (cache, opts) {
+  return { endTime: new Date() }
+}
+
+async function fixPerms (cache, opts) {
+  opts.log.silly('verify', 'fixing cache permissions')
+  await mkdir(cache, { recursive: true })
+  return null
+}
+
+// Implements a naive mark-and-sweep tracing garbage collector.
+//
+// The algorithm is basically as follows:
+// 1. Read (and filter) all index entries ("pointers")
+// 2. Mark each integrity value as "live"
+// 3. Read entire filesystem tree in `content-vX/` dir
+// 4. If content is live, verify its checksum and delete it if it fails
+// 5. If content is not marked as live, rm it.
+//
+async function garbageCollect (cache, opts) {
+  opts.log.silly('verify', 'garbage collecting content')
+  const indexStream = index.lsStream(cache)
+  const liveContent = new Set()
+  indexStream.on('data', (entry) => {
+    if (opts.filter && !opts.filter(entry)) {
+      return
+    }
+
+    // integrity is stringified, re-parse it so we can get each hash
+    const integrity = ssri.parse(entry.integrity)
+    for (const algo in integrity) {
+      liveContent.add(integrity[algo].toString())
+    }
+  })
+  await new Promise((resolve, reject) => {
+    indexStream.on('end', resolve).on('error', reject)
+  })
+  const contentDir = contentPath.contentDir(cache)
+  const files = await glob(path.join(contentDir, '**'), {
+    follow: false,
+    nodir: true,
+    nosort: true,
+  })
+  const stats = {
+    verifiedContent: 0,
+    reclaimedCount: 0,
+    reclaimedSize: 0,
+    badContentCount: 0,
+    keptSize: 0,
+  }
+  await pMap(
+    files,
+    async (f) => {
+      const split = f.split(/[/\\]/)
+      const digest = split.slice(split.length - 3).join('')
+      const algo = split[split.length - 4]
+      const integrity = ssri.fromHex(digest, algo)
+      if (liveContent.has(integrity.toString())) {
+        const info = await verifyContent(f, integrity)
+        if (!info.valid) {
+          stats.reclaimedCount++
+          stats.badContentCount++
+          stats.reclaimedSize += info.size
+        } else {
+          stats.verifiedContent++
+          stats.keptSize += info.size
+        }
+      } else {
+        // No entries refer to this content. We can delete.
+        stats.reclaimedCount++
+        const s = await stat(f)
+        await rm(f, { recursive: true, force: true })
+        stats.reclaimedSize += s.size
+      }
+      return stats
+    },
+    { concurrency: opts.concurrency }
+  )
+  return stats
+}
+
+async function verifyContent (filepath, sri) {
+  const contentInfo = {}
+  try {
+    const { size } = await stat(filepath)
+    contentInfo.size = size
+    contentInfo.valid = true
+    await ssri.checkStream(new fsm.ReadStream(filepath), sri)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return { size: 0, valid: false }
+    }
+    if (err.code !== 'EINTEGRITY') {
+      throw err
+    }
+
+    await rm(filepath, { recursive: true, force: true })
+    contentInfo.valid = false
+  }
+  return contentInfo
+}
+
+async function rebuildIndex (cache, opts) {
+  opts.log.silly('verify', 'rebuilding index')
+  const entries = await index.ls(cache)
+  const stats = {
+    missingContent: 0,
+    rejectedEntries: 0,
+    totalEntries: 0,
+  }
+  const buckets = {}
+  for (const k in entries) {
+    /* istanbul ignore else */
+    if (hasOwnProperty(entries, k)) {
+      const hashed = index.hashKey(k)
+      const entry = entries[k]
+      const excluded = opts.filter && !opts.filter(entry)
+      excluded && stats.rejectedEntries++
+      if (buckets[hashed] && !excluded) {
+        buckets[hashed].push(entry)
+      } else if (buckets[hashed] && excluded) {
+        // skip
+      } else if (excluded) {
+        buckets[hashed] = []
+        buckets[hashed]._path = index.bucketPath(cache, k)
+      } else {
+        buckets[hashed] = [entry]
+        buckets[hashed]._path = index.bucketPath(cache, k)
+      }
+    }
+  }
+  await pMap(
+    Object.keys(buckets),
+    (key) => {
+      return rebuildBucket(cache, buckets[key], stats, opts)
+    },
+    { concurrency: opts.concurrency }
+  )
+  return stats
+}
+
+async function rebuildBucket (cache, bucket, stats, opts) {
+  await truncate(bucket._path)
+  // This needs to be serialized because cacache explicitly
+  // lets very racy bucket conflicts clobber each other.
+  for (const entry of bucket) {
+    const content = contentPath(cache, entry.integrity)
+    try {
+      await stat(content)
+      await index.insert(cache, entry.key, entry.integrity, {
+        metadata: entry.metadata,
+        size: entry.size,
+        time: entry.time,
+      })
+      stats.totalEntries++
+    } catch (err) {
+      if (err.code === 'ENOENT') {
+        stats.rejectedEntries++
+        stats.missingContent++
+      } else {
+        throw err
+      }
+    }
+  }
+}
+
+function cleanTmp (cache, opts) {
+  opts.log.silly('verify', 'cleaning tmp directory')
+  return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
+}
+
+async function writeVerifile (cache, opts) {
+  const verifile = path.join(cache, '_lastverified')
+  opts.log.silly('verify', 'writing verifile to ' + verifile)
+  return writeFile(verifile, `${Date.now()}`)
+}
+
+module.exports.lastRun = lastRun
+
+async function lastRun (cache) {
+  const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
+  return new Date(+data)
+}
diff --git a/node_modules/pacote/node_modules/cacache/package.json b/node_modules/pacote/node_modules/cacache/package.json
new file mode 100644
index 0000000000000..ab58cb8b7c50f
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/package.json
@@ -0,0 +1,82 @@
+{
+  "name": "cacache",
+  "version": "17.1.4",
+  "cache-version": {
+    "content": "2",
+    "index": "5"
+  },
+  "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "coverage": "tap",
+    "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
+    "lint": "eslint \"**/*.js\"",
+    "npmclilint": "npmcli-lint",
+    "lintfix": "npm run lint -- --fix",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "posttest": "npm run lint",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/cacache.git"
+  },
+  "keywords": [
+    "cache",
+    "caching",
+    "content-addressable",
+    "sri",
+    "sri hash",
+    "subresource integrity",
+    "cache",
+    "storage",
+    "store",
+    "file store",
+    "filesystem",
+    "disk cache",
+    "disk storage"
+  ],
+  "license": "ISC",
+  "dependencies": {
+    "@npmcli/fs": "^3.1.0",
+    "fs-minipass": "^3.0.0",
+    "glob": "^10.2.2",
+    "lru-cache": "^7.7.1",
+    "minipass": "^7.0.3",
+    "minipass-collect": "^1.0.2",
+    "minipass-flush": "^1.0.5",
+    "minipass-pipeline": "^1.2.4",
+    "p-map": "^4.0.0",
+    "ssri": "^10.0.0",
+    "tar": "^6.1.11",
+    "unique-filename": "^3.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "tap": "^16.0.0"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "windowsCI": false,
+    "version": "4.18.0",
+    "publish": "true"
+  },
+  "author": "GitHub Inc.",
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/node_modules/sigstore/node_modules/cacache/LICENSE.md b/node_modules/sigstore/node_modules/cacache/LICENSE.md
new file mode 100644
index 0000000000000..8d28acf866d93
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/LICENSE.md
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/cacache/lib/content/path.js b/node_modules/sigstore/node_modules/cacache/lib/content/path.js
new file mode 100644
index 0000000000000..ad5a76a4f73f2
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/content/path.js
@@ -0,0 +1,29 @@
+'use strict'
+
+const contentVer = require('../../package.json')['cache-version'].content
+const hashToSegments = require('../util/hash-to-segments')
+const path = require('path')
+const ssri = require('ssri')
+
+// Current format of content file path:
+//
+// sha512-BaSE64Hex= ->
+// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
+//
+module.exports = contentPath
+
+function contentPath (cache, integrity) {
+  const sri = ssri.parse(integrity, { single: true })
+  // contentPath is the *strongest* algo given
+  return path.join(
+    contentDir(cache),
+    sri.algorithm,
+    ...hashToSegments(sri.hexDigest())
+  )
+}
+
+module.exports.contentDir = contentDir
+
+function contentDir (cache) {
+  return path.join(cache, `content-v${contentVer}`)
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/content/read.js b/node_modules/sigstore/node_modules/cacache/lib/content/read.js
new file mode 100644
index 0000000000000..f41b539df65dc
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/content/read.js
@@ -0,0 +1,166 @@
+'use strict'
+
+const fs = require('fs/promises')
+const fsm = require('fs-minipass')
+const ssri = require('ssri')
+const contentPath = require('./path')
+const Pipeline = require('minipass-pipeline')
+
+module.exports = read
+
+const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
+async function read (cache, integrity, opts = {}) {
+  const { size } = opts
+  const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
+    // get size
+    const stat = await fs.stat(cpath)
+    return { stat, cpath, sri }
+  })
+  if (typeof size === 'number' && stat.size !== size) {
+    throw sizeError(size, stat.size)
+  }
+
+  if (stat.size > MAX_SINGLE_READ_SIZE) {
+    return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
+  }
+
+  const data = await fs.readFile(cpath, { encoding: null })
+  if (!ssri.checkData(data, sri)) {
+    throw integrityError(sri, cpath)
+  }
+
+  return data
+}
+
+const readPipeline = (cpath, size, sri, stream) => {
+  stream.push(
+    new fsm.ReadStream(cpath, {
+      size,
+      readSize: MAX_SINGLE_READ_SIZE,
+    }),
+    ssri.integrityStream({
+      integrity: sri,
+      size,
+    })
+  )
+  return stream
+}
+
+module.exports.stream = readStream
+module.exports.readStream = readStream
+
+function readStream (cache, integrity, opts = {}) {
+  const { size } = opts
+  const stream = new Pipeline()
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
+      // just stat to ensure it exists
+      const stat = await fs.stat(cpath)
+      return { stat, cpath, sri }
+    })
+    if (typeof size === 'number' && size !== stat.size) {
+      return stream.emit('error', sizeError(size, stat.size))
+    }
+
+    return readPipeline(cpath, stat.size, sri, stream)
+  }).catch(err => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.copy = copy
+
+function copy (cache, integrity, dest) {
+  return withContentSri(cache, integrity, (cpath, sri) => {
+    return fs.copyFile(cpath, dest)
+  })
+}
+
+module.exports.hasContent = hasContent
+
+async function hasContent (cache, integrity) {
+  if (!integrity) {
+    return false
+  }
+
+  try {
+    return await withContentSri(cache, integrity, async (cpath, sri) => {
+      const stat = await fs.stat(cpath)
+      return { size: stat.size, sri, stat }
+    })
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return false
+    }
+
+    if (err.code === 'EPERM') {
+      /* istanbul ignore else */
+      if (process.platform !== 'win32') {
+        throw err
+      } else {
+        return false
+      }
+    }
+  }
+}
+
+async function withContentSri (cache, integrity, fn) {
+  const sri = ssri.parse(integrity)
+  // If `integrity` has multiple entries, pick the first digest
+  // with available local data.
+  const algo = sri.pickAlgorithm()
+  const digests = sri[algo]
+
+  if (digests.length <= 1) {
+    const cpath = contentPath(cache, digests[0])
+    return fn(cpath, digests[0])
+  } else {
+    // Can't use race here because a generic error can happen before
+    // a ENOENT error, and can happen before a valid result
+    const results = await Promise.all(digests.map(async (meta) => {
+      try {
+        return await withContentSri(cache, meta, fn)
+      } catch (err) {
+        if (err.code === 'ENOENT') {
+          return Object.assign(
+            new Error('No matching content found for ' + sri.toString()),
+            { code: 'ENOENT' }
+          )
+        }
+        return err
+      }
+    }))
+    // Return the first non error if it is found
+    const result = results.find((r) => !(r instanceof Error))
+    if (result) {
+      return result
+    }
+
+    // Throw the No matching content found error
+    const enoentError = results.find((r) => r.code === 'ENOENT')
+    if (enoentError) {
+      throw enoentError
+    }
+
+    // Throw generic error
+    throw results.find((r) => r instanceof Error)
+  }
+}
+
+function sizeError (expected, found) {
+  /* eslint-disable-next-line max-len */
+  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+  err.expected = expected
+  err.found = found
+  err.code = 'EBADSIZE'
+  return err
+}
+
+function integrityError (sri, path) {
+  const err = new Error(`Integrity verification failed for ${sri} (${path})`)
+  err.code = 'EINTEGRITY'
+  err.sri = sri
+  err.path = path
+  return err
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/content/rm.js b/node_modules/sigstore/node_modules/cacache/lib/content/rm.js
new file mode 100644
index 0000000000000..ce58d679e4cb2
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/content/rm.js
@@ -0,0 +1,18 @@
+'use strict'
+
+const fs = require('fs/promises')
+const contentPath = require('./path')
+const { hasContent } = require('./read')
+
+module.exports = rm
+
+async function rm (cache, integrity) {
+  const content = await hasContent(cache, integrity)
+  // ~pretty~ sure we can't end up with a content lacking sri, but be safe
+  if (content && content.sri) {
+    await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
+    return true
+  } else {
+    return false
+  }
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/content/write.js b/node_modules/sigstore/node_modules/cacache/lib/content/write.js
new file mode 100644
index 0000000000000..7146146581287
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/content/write.js
@@ -0,0 +1,205 @@
+'use strict'
+
+const events = require('events')
+
+const contentPath = require('./path')
+const fs = require('fs/promises')
+const { moveFile } = require('@npmcli/fs')
+const { Minipass } = require('minipass')
+const Pipeline = require('minipass-pipeline')
+const Flush = require('minipass-flush')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+const fsm = require('fs-minipass')
+
+module.exports = write
+
+// Cache of move operations in process so we don't duplicate
+const moveOperations = new Map()
+
+async function write (cache, data, opts = {}) {
+  const { algorithms, size, integrity } = opts
+
+  if (typeof size === 'number' && data.length !== size) {
+    throw sizeError(size, data.length)
+  }
+
+  const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
+  if (integrity && !ssri.checkData(data, integrity, opts)) {
+    throw checksumError(integrity, sri)
+  }
+
+  for (const algo in sri) {
+    const tmp = await makeTmp(cache, opts)
+    const hash = sri[algo].toString()
+    try {
+      await fs.writeFile(tmp.target, data, { flag: 'wx' })
+      await moveToDestination(tmp, cache, hash, opts)
+    } finally {
+      if (!tmp.moved) {
+        await fs.rm(tmp.target, { recursive: true, force: true })
+      }
+    }
+  }
+  return { integrity: sri, size: data.length }
+}
+
+module.exports.stream = writeStream
+
+// writes proxied to the 'inputStream' that is passed to the Promise
+// 'end' is deferred until content is handled.
+class CacacheWriteStream extends Flush {
+  constructor (cache, opts) {
+    super()
+    this.opts = opts
+    this.cache = cache
+    this.inputStream = new Minipass()
+    this.inputStream.on('error', er => this.emit('error', er))
+    this.inputStream.on('drain', () => this.emit('drain'))
+    this.handleContentP = null
+  }
+
+  write (chunk, encoding, cb) {
+    if (!this.handleContentP) {
+      this.handleContentP = handleContent(
+        this.inputStream,
+        this.cache,
+        this.opts
+      )
+    }
+    return this.inputStream.write(chunk, encoding, cb)
+  }
+
+  flush (cb) {
+    this.inputStream.end(() => {
+      if (!this.handleContentP) {
+        const e = new Error('Cache input stream was empty')
+        e.code = 'ENODATA'
+        // empty streams are probably emitting end right away.
+        // defer this one tick by rejecting a promise on it.
+        return Promise.reject(e).catch(cb)
+      }
+      // eslint-disable-next-line promise/catch-or-return
+      this.handleContentP.then(
+        (res) => {
+          res.integrity && this.emit('integrity', res.integrity)
+          // eslint-disable-next-line promise/always-return
+          res.size !== null && this.emit('size', res.size)
+          cb()
+        },
+        (er) => cb(er)
+      )
+    })
+  }
+}
+
+function writeStream (cache, opts = {}) {
+  return new CacacheWriteStream(cache, opts)
+}
+
+async function handleContent (inputStream, cache, opts) {
+  const tmp = await makeTmp(cache, opts)
+  try {
+    const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
+    await moveToDestination(
+      tmp,
+      cache,
+      res.integrity,
+      opts
+    )
+    return res
+  } finally {
+    if (!tmp.moved) {
+      await fs.rm(tmp.target, { recursive: true, force: true })
+    }
+  }
+}
+
+async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
+  const outStream = new fsm.WriteStream(tmpTarget, {
+    flags: 'wx',
+  })
+
+  if (opts.integrityEmitter) {
+    // we need to create these all simultaneously since they can fire in any order
+    const [integrity, size] = await Promise.all([
+      events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
+      events.once(opts.integrityEmitter, 'size').then(res => res[0]),
+      new Pipeline(inputStream, outStream).promise(),
+    ])
+    return { integrity, size }
+  }
+
+  let integrity
+  let size
+  const hashStream = ssri.integrityStream({
+    integrity: opts.integrity,
+    algorithms: opts.algorithms,
+    size: opts.size,
+  })
+  hashStream.on('integrity', i => {
+    integrity = i
+  })
+  hashStream.on('size', s => {
+    size = s
+  })
+
+  const pipeline = new Pipeline(inputStream, hashStream, outStream)
+  await pipeline.promise()
+  return { integrity, size }
+}
+
+async function makeTmp (cache, opts) {
+  const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+  await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
+  return {
+    target: tmpTarget,
+    moved: false,
+  }
+}
+
+async function moveToDestination (tmp, cache, sri, opts) {
+  const destination = contentPath(cache, sri)
+  const destDir = path.dirname(destination)
+  if (moveOperations.has(destination)) {
+    return moveOperations.get(destination)
+  }
+  moveOperations.set(
+    destination,
+    fs.mkdir(destDir, { recursive: true })
+      .then(async () => {
+        await moveFile(tmp.target, destination, { overwrite: false })
+        tmp.moved = true
+        return tmp.moved
+      })
+      .catch(err => {
+        if (!err.message.startsWith('The destination file exists')) {
+          throw Object.assign(err, { code: 'EEXIST' })
+        }
+      }).finally(() => {
+        moveOperations.delete(destination)
+      })
+
+  )
+  return moveOperations.get(destination)
+}
+
+function sizeError (expected, found) {
+  /* eslint-disable-next-line max-len */
+  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+  err.expected = expected
+  err.found = found
+  err.code = 'EBADSIZE'
+  return err
+}
+
+function checksumError (expected, found) {
+  const err = new Error(`Integrity check failed:
+  Wanted: ${expected}
+   Found: ${found}`)
+  err.code = 'EINTEGRITY'
+  err.expected = expected
+  err.found = found
+  return err
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/entry-index.js b/node_modules/sigstore/node_modules/cacache/lib/entry-index.js
new file mode 100644
index 0000000000000..722a37af5ce15
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/entry-index.js
@@ -0,0 +1,330 @@
+'use strict'
+
+const crypto = require('crypto')
+const {
+  appendFile,
+  mkdir,
+  readFile,
+  readdir,
+  rm,
+  writeFile,
+} = require('fs/promises')
+const { Minipass } = require('minipass')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+
+const contentPath = require('./content/path')
+const hashToSegments = require('./util/hash-to-segments')
+const indexV = require('../package.json')['cache-version'].index
+const { moveFile } = require('@npmcli/fs')
+
+module.exports.NotFoundError = class NotFoundError extends Error {
+  constructor (cache, key) {
+    super(`No cache entry for ${key} found in ${cache}`)
+    this.code = 'ENOENT'
+    this.cache = cache
+    this.key = key
+  }
+}
+
+module.exports.compact = compact
+
+async function compact (cache, key, matchFn, opts = {}) {
+  const bucket = bucketPath(cache, key)
+  const entries = await bucketEntries(bucket)
+  const newEntries = []
+  // we loop backwards because the bottom-most result is the newest
+  // since we add new entries with appendFile
+  for (let i = entries.length - 1; i >= 0; --i) {
+    const entry = entries[i]
+    // a null integrity could mean either a delete was appended
+    // or the user has simply stored an index that does not map
+    // to any content. we determine if the user wants to keep the
+    // null integrity based on the validateEntry function passed in options.
+    // if the integrity is null and no validateEntry is provided, we break
+    // as we consider the null integrity to be a deletion of everything
+    // that came before it.
+    if (entry.integrity === null && !opts.validateEntry) {
+      break
+    }
+
+    // if this entry is valid, and it is either the first entry or
+    // the newEntries array doesn't already include an entry that
+    // matches this one based on the provided matchFn, then we add
+    // it to the beginning of our list
+    if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
+      (newEntries.length === 0 ||
+        !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
+      newEntries.unshift(entry)
+    }
+  }
+
+  const newIndex = '\n' + newEntries.map((entry) => {
+    const stringified = JSON.stringify(entry)
+    const hash = hashEntry(stringified)
+    return `${hash}\t${stringified}`
+  }).join('\n')
+
+  const setup = async () => {
+    const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+    await mkdir(path.dirname(target), { recursive: true })
+    return {
+      target,
+      moved: false,
+    }
+  }
+
+  const teardown = async (tmp) => {
+    if (!tmp.moved) {
+      return rm(tmp.target, { recursive: true, force: true })
+    }
+  }
+
+  const write = async (tmp) => {
+    await writeFile(tmp.target, newIndex, { flag: 'wx' })
+    await mkdir(path.dirname(bucket), { recursive: true })
+    // we use @npmcli/move-file directly here because we
+    // want to overwrite the existing file
+    await moveFile(tmp.target, bucket)
+    tmp.moved = true
+  }
+
+  // write the file atomically
+  const tmp = await setup()
+  try {
+    await write(tmp)
+  } finally {
+    await teardown(tmp)
+  }
+
+  // we reverse the list we generated such that the newest
+  // entries come first in order to make looping through them easier
+  // the true passed to formatEntry tells it to keep null
+  // integrity values, if they made it this far it's because
+  // validateEntry returned true, and as such we should return it
+  return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
+}
+
+module.exports.insert = insert
+
+async function insert (cache, key, integrity, opts = {}) {
+  const { metadata, size, time } = opts
+  const bucket = bucketPath(cache, key)
+  const entry = {
+    key,
+    integrity: integrity && ssri.stringify(integrity),
+    time: time || Date.now(),
+    size,
+    metadata,
+  }
+  try {
+    await mkdir(path.dirname(bucket), { recursive: true })
+    const stringified = JSON.stringify(entry)
+    // NOTE - Cleverness ahoy!
+    //
+    // This works because it's tremendously unlikely for an entry to corrupt
+    // another while still preserving the string length of the JSON in
+    // question. So, we just slap the length in there and verify it on read.
+    //
+    // Thanks to @isaacs for the whiteboarding session that ended up with
+    // this.
+    await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return undefined
+    }
+
+    throw err
+  }
+  return formatEntry(cache, entry)
+}
+
+module.exports.find = find
+
+async function find (cache, key) {
+  const bucket = bucketPath(cache, key)
+  try {
+    const entries = await bucketEntries(bucket)
+    return entries.reduce((latest, next) => {
+      if (next && next.key === key) {
+        return formatEntry(cache, next)
+      } else {
+        return latest
+      }
+    }, null)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return null
+    } else {
+      throw err
+    }
+  }
+}
+
+module.exports.delete = del
+
+function del (cache, key, opts = {}) {
+  if (!opts.removeFully) {
+    return insert(cache, key, null, opts)
+  }
+
+  const bucket = bucketPath(cache, key)
+  return rm(bucket, { recursive: true, force: true })
+}
+
+module.exports.lsStream = lsStream
+
+function lsStream (cache) {
+  const indexDir = bucketDir(cache)
+  const stream = new Minipass({ objectMode: true })
+
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const buckets = await readdirOrEmpty(indexDir)
+    await Promise.all(buckets.map(async (bucket) => {
+      const bucketPath = path.join(indexDir, bucket)
+      const subbuckets = await readdirOrEmpty(bucketPath)
+      await Promise.all(subbuckets.map(async (subbucket) => {
+        const subbucketPath = path.join(bucketPath, subbucket)
+
+        // "/cachename//./*"
+        const subbucketEntries = await readdirOrEmpty(subbucketPath)
+        await Promise.all(subbucketEntries.map(async (entry) => {
+          const entryPath = path.join(subbucketPath, entry)
+          try {
+            const entries = await bucketEntries(entryPath)
+            // using a Map here prevents duplicate keys from showing up
+            // twice, I guess?
+            const reduced = entries.reduce((acc, entry) => {
+              acc.set(entry.key, entry)
+              return acc
+            }, new Map())
+            // reduced is a map of key => entry
+            for (const entry of reduced.values()) {
+              const formatted = formatEntry(cache, entry)
+              if (formatted) {
+                stream.write(formatted)
+              }
+            }
+          } catch (err) {
+            if (err.code === 'ENOENT') {
+              return undefined
+            }
+            throw err
+          }
+        }))
+      }))
+    }))
+    stream.end()
+    return stream
+  }).catch(err => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.ls = ls
+
+async function ls (cache) {
+  const entries = await lsStream(cache).collect()
+  return entries.reduce((acc, xs) => {
+    acc[xs.key] = xs
+    return acc
+  }, {})
+}
+
+module.exports.bucketEntries = bucketEntries
+
+async function bucketEntries (bucket, filter) {
+  const data = await readFile(bucket, 'utf8')
+  return _bucketEntries(data, filter)
+}
+
+function _bucketEntries (data, filter) {
+  const entries = []
+  data.split('\n').forEach((entry) => {
+    if (!entry) {
+      return
+    }
+
+    const pieces = entry.split('\t')
+    if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
+      // Hash is no good! Corruption or malice? Doesn't matter!
+      // EJECT EJECT
+      return
+    }
+    let obj
+    try {
+      obj = JSON.parse(pieces[1])
+    } catch (_) {
+      // eslint-ignore-next-line no-empty-block
+    }
+    // coverage disabled here, no need to test with an entry that parses to something falsey
+    // istanbul ignore else
+    if (obj) {
+      entries.push(obj)
+    }
+  })
+  return entries
+}
+
+module.exports.bucketDir = bucketDir
+
+function bucketDir (cache) {
+  return path.join(cache, `index-v${indexV}`)
+}
+
+module.exports.bucketPath = bucketPath
+
+function bucketPath (cache, key) {
+  const hashed = hashKey(key)
+  return path.join.apply(
+    path,
+    [bucketDir(cache)].concat(hashToSegments(hashed))
+  )
+}
+
+module.exports.hashKey = hashKey
+
+function hashKey (key) {
+  return hash(key, 'sha256')
+}
+
+module.exports.hashEntry = hashEntry
+
+function hashEntry (str) {
+  return hash(str, 'sha1')
+}
+
+function hash (str, digest) {
+  return crypto
+    .createHash(digest)
+    .update(str)
+    .digest('hex')
+}
+
+function formatEntry (cache, entry, keepAll) {
+  // Treat null digests as deletions. They'll shadow any previous entries.
+  if (!entry.integrity && !keepAll) {
+    return null
+  }
+
+  return {
+    key: entry.key,
+    integrity: entry.integrity,
+    path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
+    size: entry.size,
+    time: entry.time,
+    metadata: entry.metadata,
+  }
+}
+
+function readdirOrEmpty (dir) {
+  return readdir(dir).catch((err) => {
+    if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
+      return []
+    }
+
+    throw err
+  })
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/get.js b/node_modules/sigstore/node_modules/cacache/lib/get.js
new file mode 100644
index 0000000000000..80ec206c7ecaa
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/get.js
@@ -0,0 +1,170 @@
+'use strict'
+
+const Collect = require('minipass-collect')
+const { Minipass } = require('minipass')
+const Pipeline = require('minipass-pipeline')
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const read = require('./content/read')
+
+async function getData (cache, key, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return {
+      metadata: memoized.entry.metadata,
+      data: memoized.data,
+      integrity: memoized.entry.integrity,
+      size: memoized.entry.size,
+    }
+  }
+
+  const entry = await index.find(cache, key, opts)
+  if (!entry) {
+    throw new index.NotFoundError(cache, key)
+  }
+  const data = await read(cache, entry.integrity, { integrity, size })
+  if (memoize) {
+    memo.put(cache, entry, data, opts)
+  }
+
+  return {
+    data,
+    metadata: entry.metadata,
+    size: entry.size,
+    integrity: entry.integrity,
+  }
+}
+module.exports = getData
+
+async function getDataByDigest (cache, key, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get.byDigest(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return memoized
+  }
+
+  const res = await read(cache, key, { integrity, size })
+  if (memoize) {
+    memo.put.byDigest(cache, key, res, opts)
+  }
+  return res
+}
+module.exports.byDigest = getDataByDigest
+
+const getMemoizedStream = (memoized) => {
+  const stream = new Minipass()
+  stream.on('newListener', function (ev, cb) {
+    ev === 'metadata' && cb(memoized.entry.metadata)
+    ev === 'integrity' && cb(memoized.entry.integrity)
+    ev === 'size' && cb(memoized.entry.size)
+  })
+  stream.end(memoized.data)
+  return stream
+}
+
+function getStream (cache, key, opts = {}) {
+  const { memoize, size } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return getMemoizedStream(memoized)
+  }
+
+  const stream = new Pipeline()
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const entry = await index.find(cache, key)
+    if (!entry) {
+      throw new index.NotFoundError(cache, key)
+    }
+
+    stream.emit('metadata', entry.metadata)
+    stream.emit('integrity', entry.integrity)
+    stream.emit('size', entry.size)
+    stream.on('newListener', function (ev, cb) {
+      ev === 'metadata' && cb(entry.metadata)
+      ev === 'integrity' && cb(entry.integrity)
+      ev === 'size' && cb(entry.size)
+    })
+
+    const src = read.readStream(
+      cache,
+      entry.integrity,
+      { ...opts, size: typeof size !== 'number' ? entry.size : size }
+    )
+
+    if (memoize) {
+      const memoStream = new Collect.PassThrough()
+      memoStream.on('collect', data => memo.put(cache, entry, data, opts))
+      stream.unshift(memoStream)
+    }
+    stream.unshift(src)
+    return stream
+  }).catch((err) => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.stream = getStream
+
+function getStreamDigest (cache, integrity, opts = {}) {
+  const { memoize } = opts
+  const memoized = memo.get.byDigest(cache, integrity, opts)
+  if (memoized && memoize !== false) {
+    const stream = new Minipass()
+    stream.end(memoized)
+    return stream
+  } else {
+    const stream = read.readStream(cache, integrity, opts)
+    if (!memoize) {
+      return stream
+    }
+
+    const memoStream = new Collect.PassThrough()
+    memoStream.on('collect', data => memo.put.byDigest(
+      cache,
+      integrity,
+      data,
+      opts
+    ))
+    return new Pipeline(stream, memoStream)
+  }
+}
+
+module.exports.stream.byDigest = getStreamDigest
+
+function info (cache, key, opts = {}) {
+  const { memoize } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return Promise.resolve(memoized.entry)
+  } else {
+    return index.find(cache, key)
+  }
+}
+module.exports.info = info
+
+async function copy (cache, key, dest, opts = {}) {
+  const entry = await index.find(cache, key, opts)
+  if (!entry) {
+    throw new index.NotFoundError(cache, key)
+  }
+  await read.copy(cache, entry.integrity, dest, opts)
+  return {
+    metadata: entry.metadata,
+    size: entry.size,
+    integrity: entry.integrity,
+  }
+}
+
+module.exports.copy = copy
+
+async function copyByDigest (cache, key, dest, opts = {}) {
+  await read.copy(cache, key, dest, opts)
+  return key
+}
+
+module.exports.copy.byDigest = copyByDigest
+
+module.exports.hasContent = read.hasContent
diff --git a/node_modules/sigstore/node_modules/cacache/lib/index.js b/node_modules/sigstore/node_modules/cacache/lib/index.js
new file mode 100644
index 0000000000000..c9b0da5f3a271
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/index.js
@@ -0,0 +1,42 @@
+'use strict'
+
+const get = require('./get.js')
+const put = require('./put.js')
+const rm = require('./rm.js')
+const verify = require('./verify.js')
+const { clearMemoized } = require('./memoization.js')
+const tmp = require('./util/tmp.js')
+const index = require('./entry-index.js')
+
+module.exports.index = {}
+module.exports.index.compact = index.compact
+module.exports.index.insert = index.insert
+
+module.exports.ls = index.ls
+module.exports.ls.stream = index.lsStream
+
+module.exports.get = get
+module.exports.get.byDigest = get.byDigest
+module.exports.get.stream = get.stream
+module.exports.get.stream.byDigest = get.stream.byDigest
+module.exports.get.copy = get.copy
+module.exports.get.copy.byDigest = get.copy.byDigest
+module.exports.get.info = get.info
+module.exports.get.hasContent = get.hasContent
+
+module.exports.put = put
+module.exports.put.stream = put.stream
+
+module.exports.rm = rm.entry
+module.exports.rm.all = rm.all
+module.exports.rm.entry = module.exports.rm
+module.exports.rm.content = rm.content
+
+module.exports.clearMemoized = clearMemoized
+
+module.exports.tmp = {}
+module.exports.tmp.mkdir = tmp.mkdir
+module.exports.tmp.withTmp = tmp.withTmp
+
+module.exports.verify = verify
+module.exports.verify.lastRun = verify.lastRun
diff --git a/node_modules/sigstore/node_modules/cacache/lib/memoization.js b/node_modules/sigstore/node_modules/cacache/lib/memoization.js
new file mode 100644
index 0000000000000..0ff604a479c9c
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/memoization.js
@@ -0,0 +1,72 @@
+'use strict'
+
+const LRU = require('lru-cache')
+
+const MEMOIZED = new LRU({
+  max: 500,
+  maxSize: 50 * 1024 * 1024, // 50MB
+  ttl: 3 * 60 * 1000, // 3 minutes
+  sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
+})
+
+module.exports.clearMemoized = clearMemoized
+
+function clearMemoized () {
+  const old = {}
+  MEMOIZED.forEach((v, k) => {
+    old[k] = v
+  })
+  MEMOIZED.clear()
+  return old
+}
+
+module.exports.put = put
+
+function put (cache, entry, data, opts) {
+  pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
+  putDigest(cache, entry.integrity, data, opts)
+}
+
+module.exports.put.byDigest = putDigest
+
+function putDigest (cache, integrity, data, opts) {
+  pickMem(opts).set(`digest:${cache}:${integrity}`, data)
+}
+
+module.exports.get = get
+
+function get (cache, key, opts) {
+  return pickMem(opts).get(`key:${cache}:${key}`)
+}
+
+module.exports.get.byDigest = getDigest
+
+function getDigest (cache, integrity, opts) {
+  return pickMem(opts).get(`digest:${cache}:${integrity}`)
+}
+
+class ObjProxy {
+  constructor (obj) {
+    this.obj = obj
+  }
+
+  get (key) {
+    return this.obj[key]
+  }
+
+  set (key, val) {
+    this.obj[key] = val
+  }
+}
+
+function pickMem (opts) {
+  if (!opts || !opts.memoize) {
+    return MEMOIZED
+  } else if (opts.memoize.get && opts.memoize.set) {
+    return opts.memoize
+  } else if (typeof opts.memoize === 'object') {
+    return new ObjProxy(opts.memoize)
+  } else {
+    return MEMOIZED
+  }
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/put.js b/node_modules/sigstore/node_modules/cacache/lib/put.js
new file mode 100644
index 0000000000000..9fc932d5f6dec
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/put.js
@@ -0,0 +1,80 @@
+'use strict'
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const write = require('./content/write')
+const Flush = require('minipass-flush')
+const { PassThrough } = require('minipass-collect')
+const Pipeline = require('minipass-pipeline')
+
+const putOpts = (opts) => ({
+  algorithms: ['sha512'],
+  ...opts,
+})
+
+module.exports = putData
+
+async function putData (cache, key, data, opts = {}) {
+  const { memoize } = opts
+  opts = putOpts(opts)
+  const res = await write(cache, data, opts)
+  const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
+  if (memoize) {
+    memo.put(cache, entry, data, opts)
+  }
+
+  return res.integrity
+}
+
+module.exports.stream = putStream
+
+function putStream (cache, key, opts = {}) {
+  const { memoize } = opts
+  opts = putOpts(opts)
+  let integrity
+  let size
+  let error
+
+  let memoData
+  const pipeline = new Pipeline()
+  // first item in the pipeline is the memoizer, because we need
+  // that to end first and get the collected data.
+  if (memoize) {
+    const memoizer = new PassThrough().on('collect', data => {
+      memoData = data
+    })
+    pipeline.push(memoizer)
+  }
+
+  // contentStream is a write-only, not a passthrough
+  // no data comes out of it.
+  const contentStream = write.stream(cache, opts)
+    .on('integrity', (int) => {
+      integrity = int
+    })
+    .on('size', (s) => {
+      size = s
+    })
+    .on('error', (err) => {
+      error = err
+    })
+
+  pipeline.push(contentStream)
+
+  // last but not least, we write the index and emit hash and size,
+  // and memoize if we're doing that
+  pipeline.push(new Flush({
+    async flush () {
+      if (!error) {
+        const entry = await index.insert(cache, key, integrity, { ...opts, size })
+        if (memoize && memoData) {
+          memo.put(cache, entry, memoData, opts)
+        }
+        pipeline.emit('integrity', integrity)
+        pipeline.emit('size', size)
+      }
+    },
+  }))
+
+  return pipeline
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/rm.js b/node_modules/sigstore/node_modules/cacache/lib/rm.js
new file mode 100644
index 0000000000000..a94760c7cf243
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/rm.js
@@ -0,0 +1,31 @@
+'use strict'
+
+const { rm } = require('fs/promises')
+const glob = require('./util/glob.js')
+const index = require('./entry-index')
+const memo = require('./memoization')
+const path = require('path')
+const rmContent = require('./content/rm')
+
+module.exports = entry
+module.exports.entry = entry
+
+function entry (cache, key, opts) {
+  memo.clearMemoized()
+  return index.delete(cache, key, opts)
+}
+
+module.exports.content = content
+
+function content (cache, integrity) {
+  memo.clearMemoized()
+  return rmContent(cache, integrity)
+}
+
+module.exports.all = all
+
+async function all (cache) {
+  memo.clearMemoized()
+  const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
+  return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/util/glob.js b/node_modules/sigstore/node_modules/cacache/lib/util/glob.js
new file mode 100644
index 0000000000000..8500c1c16a429
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/util/glob.js
@@ -0,0 +1,7 @@
+'use strict'
+
+const { glob } = require('glob')
+const path = require('path')
+
+const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep)
+module.exports = (path, options) => glob(globify(path), options)
diff --git a/node_modules/sigstore/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/sigstore/node_modules/cacache/lib/util/hash-to-segments.js
new file mode 100644
index 0000000000000..445599b503808
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/util/hash-to-segments.js
@@ -0,0 +1,7 @@
+'use strict'
+
+module.exports = hashToSegments
+
+function hashToSegments (hash) {
+  return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/util/tmp.js b/node_modules/sigstore/node_modules/cacache/lib/util/tmp.js
new file mode 100644
index 0000000000000..0bf5302136ebe
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/util/tmp.js
@@ -0,0 +1,26 @@
+'use strict'
+
+const { withTempDir } = require('@npmcli/fs')
+const fs = require('fs/promises')
+const path = require('path')
+
+module.exports.mkdir = mktmpdir
+
+async function mktmpdir (cache, opts = {}) {
+  const { tmpPrefix } = opts
+  const tmpDir = path.join(cache, 'tmp')
+  await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
+  // do not use path.join(), it drops the trailing / if tmpPrefix is unset
+  const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
+  return fs.mkdtemp(target, { owner: 'inherit' })
+}
+
+module.exports.withTmp = withTmp
+
+function withTmp (cache, opts, cb) {
+  if (!cb) {
+    cb = opts
+    opts = {}
+  }
+  return withTempDir(path.join(cache, 'tmp'), cb, opts)
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/verify.js b/node_modules/sigstore/node_modules/cacache/lib/verify.js
new file mode 100644
index 0000000000000..62e85c946490f
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/verify.js
@@ -0,0 +1,257 @@
+'use strict'
+
+const {
+  mkdir,
+  readFile,
+  rm,
+  stat,
+  truncate,
+  writeFile,
+} = require('fs/promises')
+const pMap = require('p-map')
+const contentPath = require('./content/path')
+const fsm = require('fs-minipass')
+const glob = require('./util/glob.js')
+const index = require('./entry-index')
+const path = require('path')
+const ssri = require('ssri')
+
+const hasOwnProperty = (obj, key) =>
+  Object.prototype.hasOwnProperty.call(obj, key)
+
+const verifyOpts = (opts) => ({
+  concurrency: 20,
+  log: { silly () {} },
+  ...opts,
+})
+
+module.exports = verify
+
+async function verify (cache, opts) {
+  opts = verifyOpts(opts)
+  opts.log.silly('verify', 'verifying cache at', cache)
+
+  const steps = [
+    markStartTime,
+    fixPerms,
+    garbageCollect,
+    rebuildIndex,
+    cleanTmp,
+    writeVerifile,
+    markEndTime,
+  ]
+
+  const stats = {}
+  for (const step of steps) {
+    const label = step.name
+    const start = new Date()
+    const s = await step(cache, opts)
+    if (s) {
+      Object.keys(s).forEach((k) => {
+        stats[k] = s[k]
+      })
+    }
+    const end = new Date()
+    if (!stats.runTime) {
+      stats.runTime = {}
+    }
+    stats.runTime[label] = end - start
+  }
+  stats.runTime.total = stats.endTime - stats.startTime
+  opts.log.silly(
+    'verify',
+    'verification finished for',
+    cache,
+    'in',
+    `${stats.runTime.total}ms`
+  )
+  return stats
+}
+
+async function markStartTime (cache, opts) {
+  return { startTime: new Date() }
+}
+
+async function markEndTime (cache, opts) {
+  return { endTime: new Date() }
+}
+
+async function fixPerms (cache, opts) {
+  opts.log.silly('verify', 'fixing cache permissions')
+  await mkdir(cache, { recursive: true })
+  return null
+}
+
+// Implements a naive mark-and-sweep tracing garbage collector.
+//
+// The algorithm is basically as follows:
+// 1. Read (and filter) all index entries ("pointers")
+// 2. Mark each integrity value as "live"
+// 3. Read entire filesystem tree in `content-vX/` dir
+// 4. If content is live, verify its checksum and delete it if it fails
+// 5. If content is not marked as live, rm it.
+//
+async function garbageCollect (cache, opts) {
+  opts.log.silly('verify', 'garbage collecting content')
+  const indexStream = index.lsStream(cache)
+  const liveContent = new Set()
+  indexStream.on('data', (entry) => {
+    if (opts.filter && !opts.filter(entry)) {
+      return
+    }
+
+    // integrity is stringified, re-parse it so we can get each hash
+    const integrity = ssri.parse(entry.integrity)
+    for (const algo in integrity) {
+      liveContent.add(integrity[algo].toString())
+    }
+  })
+  await new Promise((resolve, reject) => {
+    indexStream.on('end', resolve).on('error', reject)
+  })
+  const contentDir = contentPath.contentDir(cache)
+  const files = await glob(path.join(contentDir, '**'), {
+    follow: false,
+    nodir: true,
+    nosort: true,
+  })
+  const stats = {
+    verifiedContent: 0,
+    reclaimedCount: 0,
+    reclaimedSize: 0,
+    badContentCount: 0,
+    keptSize: 0,
+  }
+  await pMap(
+    files,
+    async (f) => {
+      const split = f.split(/[/\\]/)
+      const digest = split.slice(split.length - 3).join('')
+      const algo = split[split.length - 4]
+      const integrity = ssri.fromHex(digest, algo)
+      if (liveContent.has(integrity.toString())) {
+        const info = await verifyContent(f, integrity)
+        if (!info.valid) {
+          stats.reclaimedCount++
+          stats.badContentCount++
+          stats.reclaimedSize += info.size
+        } else {
+          stats.verifiedContent++
+          stats.keptSize += info.size
+        }
+      } else {
+        // No entries refer to this content. We can delete.
+        stats.reclaimedCount++
+        const s = await stat(f)
+        await rm(f, { recursive: true, force: true })
+        stats.reclaimedSize += s.size
+      }
+      return stats
+    },
+    { concurrency: opts.concurrency }
+  )
+  return stats
+}
+
+async function verifyContent (filepath, sri) {
+  const contentInfo = {}
+  try {
+    const { size } = await stat(filepath)
+    contentInfo.size = size
+    contentInfo.valid = true
+    await ssri.checkStream(new fsm.ReadStream(filepath), sri)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return { size: 0, valid: false }
+    }
+    if (err.code !== 'EINTEGRITY') {
+      throw err
+    }
+
+    await rm(filepath, { recursive: true, force: true })
+    contentInfo.valid = false
+  }
+  return contentInfo
+}
+
+async function rebuildIndex (cache, opts) {
+  opts.log.silly('verify', 'rebuilding index')
+  const entries = await index.ls(cache)
+  const stats = {
+    missingContent: 0,
+    rejectedEntries: 0,
+    totalEntries: 0,
+  }
+  const buckets = {}
+  for (const k in entries) {
+    /* istanbul ignore else */
+    if (hasOwnProperty(entries, k)) {
+      const hashed = index.hashKey(k)
+      const entry = entries[k]
+      const excluded = opts.filter && !opts.filter(entry)
+      excluded && stats.rejectedEntries++
+      if (buckets[hashed] && !excluded) {
+        buckets[hashed].push(entry)
+      } else if (buckets[hashed] && excluded) {
+        // skip
+      } else if (excluded) {
+        buckets[hashed] = []
+        buckets[hashed]._path = index.bucketPath(cache, k)
+      } else {
+        buckets[hashed] = [entry]
+        buckets[hashed]._path = index.bucketPath(cache, k)
+      }
+    }
+  }
+  await pMap(
+    Object.keys(buckets),
+    (key) => {
+      return rebuildBucket(cache, buckets[key], stats, opts)
+    },
+    { concurrency: opts.concurrency }
+  )
+  return stats
+}
+
+async function rebuildBucket (cache, bucket, stats, opts) {
+  await truncate(bucket._path)
+  // This needs to be serialized because cacache explicitly
+  // lets very racy bucket conflicts clobber each other.
+  for (const entry of bucket) {
+    const content = contentPath(cache, entry.integrity)
+    try {
+      await stat(content)
+      await index.insert(cache, entry.key, entry.integrity, {
+        metadata: entry.metadata,
+        size: entry.size,
+        time: entry.time,
+      })
+      stats.totalEntries++
+    } catch (err) {
+      if (err.code === 'ENOENT') {
+        stats.rejectedEntries++
+        stats.missingContent++
+      } else {
+        throw err
+      }
+    }
+  }
+}
+
+function cleanTmp (cache, opts) {
+  opts.log.silly('verify', 'cleaning tmp directory')
+  return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
+}
+
+async function writeVerifile (cache, opts) {
+  const verifile = path.join(cache, '_lastverified')
+  opts.log.silly('verify', 'writing verifile to ' + verifile)
+  return writeFile(verifile, `${Date.now()}`)
+}
+
+module.exports.lastRun = lastRun
+
+async function lastRun (cache) {
+  const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
+  return new Date(+data)
+}
diff --git a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/LICENSE b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/LICENSE
new file mode 100644
index 0000000000000..97f8e32ed82e4
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/index.js b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
new file mode 100644
index 0000000000000..b6cdae8eb514b
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
@@ -0,0 +1,1028 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0;
+const proc = typeof process === 'object' && process
+    ? process
+    : {
+        stdout: null,
+        stderr: null,
+    };
+const events_1 = require("events");
+const stream_1 = __importDefault(require("stream"));
+const string_decoder_1 = require("string_decoder");
+/**
+ * Return true if the argument is a Minipass stream, Node stream, or something
+ * else that Minipass can interact with.
+ */
+const isStream = (s) => !!s &&
+    typeof s === 'object' &&
+    (s instanceof Minipass ||
+        s instanceof stream_1.default ||
+        (0, exports.isReadable)(s) ||
+        (0, exports.isWritable)(s));
+exports.isStream = isStream;
+/**
+ * Return true if the argument is a valid {@link Minipass.Readable}
+ */
+const isReadable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof events_1.EventEmitter &&
+    typeof s.pipe === 'function' &&
+    // node core Writable streams have a pipe() method, but it throws
+    s.pipe !== stream_1.default.Writable.prototype.pipe;
+exports.isReadable = isReadable;
+/**
+ * Return true if the argument is a valid {@link Minipass.Writable}
+ */
+const isWritable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof events_1.EventEmitter &&
+    typeof s.write === 'function' &&
+    typeof s.end === 'function';
+exports.isWritable = isWritable;
+const EOF = Symbol('EOF');
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
+const EMITTED_END = Symbol('emittedEnd');
+const EMITTING_END = Symbol('emittingEnd');
+const EMITTED_ERROR = Symbol('emittedError');
+const CLOSED = Symbol('closed');
+const READ = Symbol('read');
+const FLUSH = Symbol('flush');
+const FLUSHCHUNK = Symbol('flushChunk');
+const ENCODING = Symbol('encoding');
+const DECODER = Symbol('decoder');
+const FLOWING = Symbol('flowing');
+const PAUSED = Symbol('paused');
+const RESUME = Symbol('resume');
+const BUFFER = Symbol('buffer');
+const PIPES = Symbol('pipes');
+const BUFFERLENGTH = Symbol('bufferLength');
+const BUFFERPUSH = Symbol('bufferPush');
+const BUFFERSHIFT = Symbol('bufferShift');
+const OBJECTMODE = Symbol('objectMode');
+// internal event when stream is destroyed
+const DESTROYED = Symbol('destroyed');
+// internal event when stream has an error
+const ERROR = Symbol('error');
+const EMITDATA = Symbol('emitData');
+const EMITEND = Symbol('emitEnd');
+const EMITEND2 = Symbol('emitEnd2');
+const ASYNC = Symbol('async');
+const ABORT = Symbol('abort');
+const ABORTED = Symbol('aborted');
+const SIGNAL = Symbol('signal');
+const DATALISTENERS = Symbol('dataListeners');
+const DISCARDED = Symbol('discarded');
+const defer = (fn) => Promise.resolve().then(fn);
+const nodefer = (fn) => fn();
+const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
+const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
+    (!!b &&
+        typeof b === 'object' &&
+        b.constructor &&
+        b.constructor.name === 'ArrayBuffer' &&
+        b.byteLength >= 0);
+const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
+/**
+ * Internal class representing a pipe to a destination stream.
+ *
+ * @internal
+ */
+class Pipe {
+    src;
+    dest;
+    opts;
+    ondrain;
+    constructor(src, dest, opts) {
+        this.src = src;
+        this.dest = dest;
+        this.opts = opts;
+        this.ondrain = () => src[RESUME]();
+        this.dest.on('drain', this.ondrain);
+    }
+    unpipe() {
+        this.dest.removeListener('drain', this.ondrain);
+    }
+    // only here for the prototype
+    /* c8 ignore start */
+    proxyErrors(_er) { }
+    /* c8 ignore stop */
+    end() {
+        this.unpipe();
+        if (this.opts.end)
+            this.dest.end();
+    }
+}
+/**
+ * Internal class representing a pipe to a destination stream where
+ * errors are proxied.
+ *
+ * @internal
+ */
+class PipeProxyErrors extends Pipe {
+    unpipe() {
+        this.src.removeListener('error', this.proxyErrors);
+        super.unpipe();
+    }
+    constructor(src, dest, opts) {
+        super(src, dest, opts);
+        this.proxyErrors = er => dest.emit('error', er);
+        src.on('error', this.proxyErrors);
+    }
+}
+const isObjectModeOptions = (o) => !!o.objectMode;
+const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
+/**
+ * Main export, the Minipass class
+ *
+ * `RType` is the type of data emitted, defaults to Buffer
+ *
+ * `WType` is the type of data to be written, if RType is buffer or string,
+ * then any {@link Minipass.ContiguousData} is allowed.
+ *
+ * `Events` is the set of event handler signatures that this object
+ * will emit, see {@link Minipass.Events}
+ */
+class Minipass extends events_1.EventEmitter {
+    [FLOWING] = false;
+    [PAUSED] = false;
+    [PIPES] = [];
+    [BUFFER] = [];
+    [OBJECTMODE];
+    [ENCODING];
+    [ASYNC];
+    [DECODER];
+    [EOF] = false;
+    [EMITTED_END] = false;
+    [EMITTING_END] = false;
+    [CLOSED] = false;
+    [EMITTED_ERROR] = null;
+    [BUFFERLENGTH] = 0;
+    [DESTROYED] = false;
+    [SIGNAL];
+    [ABORTED] = false;
+    [DATALISTENERS] = 0;
+    [DISCARDED] = false;
+    /**
+     * true if the stream can be written
+     */
+    writable = true;
+    /**
+     * true if the stream can be read
+     */
+    readable = true;
+    /**
+     * If `RType` is Buffer, then options do not need to be provided.
+     * Otherwise, an options object must be provided to specify either
+     * {@link Minipass.SharedOptions.objectMode} or
+     * {@link Minipass.SharedOptions.encoding}, as appropriate.
+     */
+    constructor(...args) {
+        const options = (args[0] ||
+            {});
+        super();
+        if (options.objectMode && typeof options.encoding === 'string') {
+            throw new TypeError('Encoding and objectMode may not be used together');
+        }
+        if (isObjectModeOptions(options)) {
+            this[OBJECTMODE] = true;
+            this[ENCODING] = null;
+        }
+        else if (isEncodingOptions(options)) {
+            this[ENCODING] = options.encoding;
+            this[OBJECTMODE] = false;
+        }
+        else {
+            this[OBJECTMODE] = false;
+            this[ENCODING] = null;
+        }
+        this[ASYNC] = !!options.async;
+        this[DECODER] = this[ENCODING]
+            ? new string_decoder_1.StringDecoder(this[ENCODING])
+            : null;
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposeBuffer === true) {
+            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
+        }
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposePipes === true) {
+            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
+        }
+        const { signal } = options;
+        if (signal) {
+            this[SIGNAL] = signal;
+            if (signal.aborted) {
+                this[ABORT]();
+            }
+            else {
+                signal.addEventListener('abort', () => this[ABORT]());
+            }
+        }
+    }
+    /**
+     * The amount of data stored in the buffer waiting to be read.
+     *
+     * For Buffer strings, this will be the total byte length.
+     * For string encoding streams, this will be the string character length,
+     * according to JavaScript's `string.length` logic.
+     * For objectMode streams, this is a count of the items waiting to be
+     * emitted.
+     */
+    get bufferLength() {
+        return this[BUFFERLENGTH];
+    }
+    /**
+     * The `BufferEncoding` currently in use, or `null`
+     */
+    get encoding() {
+        return this[ENCODING];
+    }
+    /**
+     * @deprecated - This is a read only property
+     */
+    set encoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * @deprecated - Encoding may only be set at instantiation time
+     */
+    setEncoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * True if this is an objectMode stream
+     */
+    get objectMode() {
+        return this[OBJECTMODE];
+    }
+    /**
+     * @deprecated - This is a read-only property
+     */
+    set objectMode(_om) {
+        throw new Error('objectMode must be set at instantiation time');
+    }
+    /**
+     * true if this is an async stream
+     */
+    get ['async']() {
+        return this[ASYNC];
+    }
+    /**
+     * Set to true to make this stream async.
+     *
+     * Once set, it cannot be unset, as this would potentially cause incorrect
+     * behavior.  Ie, a sync stream can be made async, but an async stream
+     * cannot be safely made sync.
+     */
+    set ['async'](a) {
+        this[ASYNC] = this[ASYNC] || !!a;
+    }
+    // drop everything and get out of the flow completely
+    [ABORT]() {
+        this[ABORTED] = true;
+        this.emit('abort', this[SIGNAL]?.reason);
+        this.destroy(this[SIGNAL]?.reason);
+    }
+    /**
+     * True if the stream has been aborted.
+     */
+    get aborted() {
+        return this[ABORTED];
+    }
+    /**
+     * No-op setter. Stream aborted status is set via the AbortSignal provided
+     * in the constructor options.
+     */
+    set aborted(_) { }
+    write(chunk, encoding, cb) {
+        if (this[ABORTED])
+            return false;
+        if (this[EOF])
+            throw new Error('write after end');
+        if (this[DESTROYED]) {
+            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
+            return true;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (!encoding)
+            encoding = 'utf8';
+        const fn = this[ASYNC] ? defer : nodefer;
+        // convert array buffers and typed array views into buffers
+        // at some point in the future, we may want to do the opposite!
+        // leave strings and buffers as-is
+        // anything is only allowed if in object mode, so throw
+        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+            if (isArrayBufferView(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
+            }
+            else if (isArrayBufferLike(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk);
+            }
+            else if (typeof chunk !== 'string') {
+                throw new Error('Non-contiguous data written to non-objectMode stream');
+            }
+        }
+        // handle object mode up front, since it's simpler
+        // this yields better performance, fewer checks later.
+        if (this[OBJECTMODE]) {
+            // maybe impossible?
+            /* c8 ignore start */
+            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+                this[FLUSH](true);
+            /* c8 ignore stop */
+            if (this[FLOWING])
+                this.emit('data', chunk);
+            else
+                this[BUFFERPUSH](chunk);
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // at this point the chunk is a buffer or string
+        // don't buffer it up or send it to the decoder
+        if (!chunk.length) {
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // fast-path writing strings of same encoding to a stream with
+        // an empty buffer, skipping the buffer/decoder dance
+        if (typeof chunk === 'string' &&
+            // unless it is a string already ready for us to use
+            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = Buffer.from(chunk, encoding);
+        }
+        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = this[DECODER].write(chunk);
+        }
+        // Note: flushing CAN potentially switch us into not-flowing mode
+        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+            this[FLUSH](true);
+        if (this[FLOWING])
+            this.emit('data', chunk);
+        else
+            this[BUFFERPUSH](chunk);
+        if (this[BUFFERLENGTH] !== 0)
+            this.emit('readable');
+        if (cb)
+            fn(cb);
+        return this[FLOWING];
+    }
+    /**
+     * Low-level explicit read method.
+     *
+     * In objectMode, the argument is ignored, and one item is returned if
+     * available.
+     *
+     * `n` is the number of bytes (or in the case of encoding streams,
+     * characters) to consume. If `n` is not provided, then the entire buffer
+     * is returned, or `null` is returned if no data is available.
+     *
+     * If `n` is greater that the amount of data in the internal buffer,
+     * then `null` is returned.
+     */
+    read(n) {
+        if (this[DESTROYED])
+            return null;
+        this[DISCARDED] = false;
+        if (this[BUFFERLENGTH] === 0 ||
+            n === 0 ||
+            (n && n > this[BUFFERLENGTH])) {
+            this[MAYBE_EMIT_END]();
+            return null;
+        }
+        if (this[OBJECTMODE])
+            n = null;
+        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
+            // not object mode, so if we have an encoding, then RType is string
+            // otherwise, must be Buffer
+            this[BUFFER] = [
+                (this[ENCODING]
+                    ? this[BUFFER].join('')
+                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
+            ];
+        }
+        const ret = this[READ](n || null, this[BUFFER][0]);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [READ](n, chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERSHIFT]();
+        else {
+            const c = chunk;
+            if (n === c.length || n === null)
+                this[BUFFERSHIFT]();
+            else if (typeof c === 'string') {
+                this[BUFFER][0] = c.slice(n);
+                chunk = c.slice(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+            else {
+                this[BUFFER][0] = c.subarray(n);
+                chunk = c.subarray(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+        }
+        this.emit('data', chunk);
+        if (!this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+        return chunk;
+    }
+    end(chunk, encoding, cb) {
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (chunk !== undefined)
+            this.write(chunk, encoding);
+        if (cb)
+            this.once('end', cb);
+        this[EOF] = true;
+        this.writable = false;
+        // if we haven't written anything, then go ahead and emit,
+        // even if we're not reading.
+        // we'll re-emit if a new 'end' listener is added anyway.
+        // This makes MP more suitable to write-only use cases.
+        if (this[FLOWING] || !this[PAUSED])
+            this[MAYBE_EMIT_END]();
+        return this;
+    }
+    // don't let the internal resume be overwritten
+    [RESUME]() {
+        if (this[DESTROYED])
+            return;
+        if (!this[DATALISTENERS] && !this[PIPES].length) {
+            this[DISCARDED] = true;
+        }
+        this[PAUSED] = false;
+        this[FLOWING] = true;
+        this.emit('resume');
+        if (this[BUFFER].length)
+            this[FLUSH]();
+        else if (this[EOF])
+            this[MAYBE_EMIT_END]();
+        else
+            this.emit('drain');
+    }
+    /**
+     * Resume the stream if it is currently in a paused state
+     *
+     * If called when there are no pipe destinations or `data` event listeners,
+     * this will place the stream in a "discarded" state, where all data will
+     * be thrown away. The discarded state is removed if a pipe destination or
+     * data handler is added, if pause() is called, or if any synchronous or
+     * asynchronous iteration is started.
+     */
+    resume() {
+        return this[RESUME]();
+    }
+    /**
+     * Pause the stream
+     */
+    pause() {
+        this[FLOWING] = false;
+        this[PAUSED] = true;
+        this[DISCARDED] = false;
+    }
+    /**
+     * true if the stream has been forcibly destroyed
+     */
+    get destroyed() {
+        return this[DESTROYED];
+    }
+    /**
+     * true if the stream is currently in a flowing state, meaning that
+     * any writes will be immediately emitted.
+     */
+    get flowing() {
+        return this[FLOWING];
+    }
+    /**
+     * true if the stream is currently in a paused state
+     */
+    get paused() {
+        return this[PAUSED];
+    }
+    [BUFFERPUSH](chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] += 1;
+        else
+            this[BUFFERLENGTH] += chunk.length;
+        this[BUFFER].push(chunk);
+    }
+    [BUFFERSHIFT]() {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] -= 1;
+        else
+            this[BUFFERLENGTH] -= this[BUFFER][0].length;
+        return this[BUFFER].shift();
+    }
+    [FLUSH](noDrain = false) {
+        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
+            this[BUFFER].length);
+        if (!noDrain && !this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+    }
+    [FLUSHCHUNK](chunk) {
+        this.emit('data', chunk);
+        return this[FLOWING];
+    }
+    /**
+     * Pipe all data emitted by this stream into the destination provided.
+     *
+     * Triggers the flow of data.
+     */
+    pipe(dest, opts) {
+        if (this[DESTROYED])
+            return dest;
+        this[DISCARDED] = false;
+        const ended = this[EMITTED_END];
+        opts = opts || {};
+        if (dest === proc.stdout || dest === proc.stderr)
+            opts.end = false;
+        else
+            opts.end = opts.end !== false;
+        opts.proxyErrors = !!opts.proxyErrors;
+        // piping an ended stream ends immediately
+        if (ended) {
+            if (opts.end)
+                dest.end();
+        }
+        else {
+            // "as" here just ignores the WType, which pipes don't care about,
+            // since they're only consuming from us, and writing to the dest
+            this[PIPES].push(!opts.proxyErrors
+                ? new Pipe(this, dest, opts)
+                : new PipeProxyErrors(this, dest, opts));
+            if (this[ASYNC])
+                defer(() => this[RESUME]());
+            else
+                this[RESUME]();
+        }
+        return dest;
+    }
+    /**
+     * Fully unhook a piped destination stream.
+     *
+     * If the destination stream was the only consumer of this stream (ie,
+     * there are no other piped destinations or `'data'` event listeners)
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    unpipe(dest) {
+        const p = this[PIPES].find(p => p.dest === dest);
+        if (p) {
+            if (this[PIPES].length === 1) {
+                if (this[FLOWING] && this[DATALISTENERS] === 0) {
+                    this[FLOWING] = false;
+                }
+                this[PIPES] = [];
+            }
+            else
+                this[PIPES].splice(this[PIPES].indexOf(p), 1);
+            p.unpipe();
+        }
+    }
+    /**
+     * Alias for {@link Minipass#on}
+     */
+    addListener(ev, handler) {
+        return this.on(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.on`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * - Adding a 'data' event handler will trigger the flow of data
+     *
+     * - Adding a 'readable' event handler when there is data waiting to be read
+     *   will cause 'readable' to be emitted immediately.
+     *
+     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
+     *   already passed will cause the event to be emitted immediately and all
+     *   handlers removed.
+     *
+     * - Adding an 'error' event handler after an error has been emitted will
+     *   cause the event to be re-emitted immediately with the error previously
+     *   raised.
+     */
+    on(ev, handler) {
+        const ret = super.on(ev, handler);
+        if (ev === 'data') {
+            this[DISCARDED] = false;
+            this[DATALISTENERS]++;
+            if (!this[PIPES].length && !this[FLOWING]) {
+                this[RESUME]();
+            }
+        }
+        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
+            super.emit('readable');
+        }
+        else if (isEndish(ev) && this[EMITTED_END]) {
+            super.emit(ev);
+            this.removeAllListeners(ev);
+        }
+        else if (ev === 'error' && this[EMITTED_ERROR]) {
+            const h = handler;
+            if (this[ASYNC])
+                defer(() => h.call(this, this[EMITTED_ERROR]));
+            else
+                h.call(this, this[EMITTED_ERROR]);
+        }
+        return ret;
+    }
+    /**
+     * Alias for {@link Minipass#off}
+     */
+    removeListener(ev, handler) {
+        return this.off(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.off`
+     *
+     * If a 'data' event handler is removed, and it was the last consumer
+     * (ie, there are no pipe destinations or other 'data' event listeners),
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    off(ev, handler) {
+        const ret = super.off(ev, handler);
+        // if we previously had listeners, and now we don't, and we don't
+        // have any pipes, then stop the flow, unless it's been explicitly
+        // put in a discarded flowing state via stream.resume().
+        if (ev === 'data') {
+            this[DATALISTENERS] = this.listeners('data').length;
+            if (this[DATALISTENERS] === 0 &&
+                !this[DISCARDED] &&
+                !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * Mostly identical to `EventEmitter.removeAllListeners`
+     *
+     * If all 'data' event handlers are removed, and they were the last consumer
+     * (ie, there are no pipe destinations), then the flow of data will stop
+     * until there is another consumer or {@link Minipass#resume} is explicitly
+     * called.
+     */
+    removeAllListeners(ev) {
+        const ret = super.removeAllListeners(ev);
+        if (ev === 'data' || ev === undefined) {
+            this[DATALISTENERS] = 0;
+            if (!this[DISCARDED] && !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * true if the 'end' event has been emitted
+     */
+    get emittedEnd() {
+        return this[EMITTED_END];
+    }
+    [MAYBE_EMIT_END]() {
+        if (!this[EMITTING_END] &&
+            !this[EMITTED_END] &&
+            !this[DESTROYED] &&
+            this[BUFFER].length === 0 &&
+            this[EOF]) {
+            this[EMITTING_END] = true;
+            this.emit('end');
+            this.emit('prefinish');
+            this.emit('finish');
+            if (this[CLOSED])
+                this.emit('close');
+            this[EMITTING_END] = false;
+        }
+    }
+    /**
+     * Mostly identical to `EventEmitter.emit`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * If the stream has been destroyed, and the event is something other
+     * than 'close' or 'error', then `false` is returned and no handlers
+     * are called.
+     *
+     * If the event is 'end', and has already been emitted, then the event
+     * is ignored. If the stream is in a paused or non-flowing state, then
+     * the event will be deferred until data flow resumes. If the stream is
+     * async, then handlers will be called on the next tick rather than
+     * immediately.
+     *
+     * If the event is 'close', and 'end' has not yet been emitted, then
+     * the event will be deferred until after 'end' is emitted.
+     *
+     * If the event is 'error', and an AbortSignal was provided for the stream,
+     * and there are no listeners, then the event is ignored, matching the
+     * behavior of node core streams in the presense of an AbortSignal.
+     *
+     * If the event is 'finish' or 'prefinish', then all listeners will be
+     * removed after emitting the event, to prevent double-firing.
+     */
+    emit(ev, ...args) {
+        const data = args[0];
+        // error and close are only events allowed after calling destroy()
+        if (ev !== 'error' &&
+            ev !== 'close' &&
+            ev !== DESTROYED &&
+            this[DESTROYED]) {
+            return false;
+        }
+        else if (ev === 'data') {
+            return !this[OBJECTMODE] && !data
+                ? false
+                : this[ASYNC]
+                    ? (defer(() => this[EMITDATA](data)), true)
+                    : this[EMITDATA](data);
+        }
+        else if (ev === 'end') {
+            return this[EMITEND]();
+        }
+        else if (ev === 'close') {
+            this[CLOSED] = true;
+            // don't emit close before 'end' and 'finish'
+            if (!this[EMITTED_END] && !this[DESTROYED])
+                return false;
+            const ret = super.emit('close');
+            this.removeAllListeners('close');
+            return ret;
+        }
+        else if (ev === 'error') {
+            this[EMITTED_ERROR] = data;
+            super.emit(ERROR, data);
+            const ret = !this[SIGNAL] || this.listeners('error').length
+                ? super.emit('error', data)
+                : false;
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'resume') {
+            const ret = super.emit('resume');
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'finish' || ev === 'prefinish') {
+            const ret = super.emit(ev);
+            this.removeAllListeners(ev);
+            return ret;
+        }
+        // Some other unknown event
+        const ret = super.emit(ev, ...args);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITDATA](data) {
+        for (const p of this[PIPES]) {
+            if (p.dest.write(data) === false)
+                this.pause();
+        }
+        const ret = this[DISCARDED] ? false : super.emit('data', data);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITEND]() {
+        if (this[EMITTED_END])
+            return false;
+        this[EMITTED_END] = true;
+        this.readable = false;
+        return this[ASYNC]
+            ? (defer(() => this[EMITEND2]()), true)
+            : this[EMITEND2]();
+    }
+    [EMITEND2]() {
+        if (this[DECODER]) {
+            const data = this[DECODER].end();
+            if (data) {
+                for (const p of this[PIPES]) {
+                    p.dest.write(data);
+                }
+                if (!this[DISCARDED])
+                    super.emit('data', data);
+            }
+        }
+        for (const p of this[PIPES]) {
+            p.end();
+        }
+        const ret = super.emit('end');
+        this.removeAllListeners('end');
+        return ret;
+    }
+    /**
+     * Return a Promise that resolves to an array of all emitted data once
+     * the stream ends.
+     */
+    async collect() {
+        const buf = Object.assign([], {
+            dataLength: 0,
+        });
+        if (!this[OBJECTMODE])
+            buf.dataLength = 0;
+        // set the promise first, in case an error is raised
+        // by triggering the flow here.
+        const p = this.promise();
+        this.on('data', c => {
+            buf.push(c);
+            if (!this[OBJECTMODE])
+                buf.dataLength += c.length;
+        });
+        await p;
+        return buf;
+    }
+    /**
+     * Return a Promise that resolves to the concatenation of all emitted data
+     * once the stream ends.
+     *
+     * Not allowed on objectMode streams.
+     */
+    async concat() {
+        if (this[OBJECTMODE]) {
+            throw new Error('cannot concat in objectMode');
+        }
+        const buf = await this.collect();
+        return (this[ENCODING]
+            ? buf.join('')
+            : Buffer.concat(buf, buf.dataLength));
+    }
+    /**
+     * Return a void Promise that resolves once the stream ends.
+     */
+    async promise() {
+        return new Promise((resolve, reject) => {
+            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
+            this.on('error', er => reject(er));
+            this.on('end', () => resolve());
+        });
+    }
+    /**
+     * Asynchronous `for await of` iteration.
+     *
+     * This will continue emitting all chunks until the stream terminates.
+     */
+    [Symbol.asyncIterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = async () => {
+            this.pause();
+            stopped = true;
+            return { value: undefined, done: true };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const res = this.read();
+            if (res !== null)
+                return Promise.resolve({ done: false, value: res });
+            if (this[EOF])
+                return stop();
+            let resolve;
+            let reject;
+            const onerr = (er) => {
+                this.off('data', ondata);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                reject(er);
+            };
+            const ondata = (value) => {
+                this.off('error', onerr);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                this.pause();
+                resolve({ value, done: !!this[EOF] });
+            };
+            const onend = () => {
+                this.off('error', onerr);
+                this.off('data', ondata);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                resolve({ done: true, value: undefined });
+            };
+            const ondestroy = () => onerr(new Error('stream destroyed'));
+            return new Promise((res, rej) => {
+                reject = rej;
+                resolve = res;
+                this.once(DESTROYED, ondestroy);
+                this.once('error', onerr);
+                this.once('end', onend);
+                this.once('data', ondata);
+            });
+        };
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Synchronous `for of` iteration.
+     *
+     * The iteration will terminate when the internal buffer runs out, even
+     * if the stream has not yet terminated.
+     */
+    [Symbol.iterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = () => {
+            this.pause();
+            this.off(ERROR, stop);
+            this.off(DESTROYED, stop);
+            this.off('end', stop);
+            stopped = true;
+            return { done: true, value: undefined };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const value = this.read();
+            return value === null ? stop() : { done: false, value };
+        };
+        this.once('end', stop);
+        this.once(ERROR, stop);
+        this.once(DESTROYED, stop);
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.iterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Destroy a stream, preventing it from being used for any further purpose.
+     *
+     * If the stream has a `close()` method, then it will be called on
+     * destruction.
+     *
+     * After destruction, any attempt to write data, read data, or emit most
+     * events will be ignored.
+     *
+     * If an error argument is provided, then it will be emitted in an
+     * 'error' event.
+     */
+    destroy(er) {
+        if (this[DESTROYED]) {
+            if (er)
+                this.emit('error', er);
+            else
+                this.emit(DESTROYED);
+            return this;
+        }
+        this[DESTROYED] = true;
+        this[DISCARDED] = true;
+        // throw away all buffered data, it's never coming out
+        this[BUFFER].length = 0;
+        this[BUFFERLENGTH] = 0;
+        const wc = this;
+        if (typeof wc.close === 'function' && !this[CLOSED])
+            wc.close();
+        if (er)
+            this.emit('error', er);
+        // if no error to emit, still reject pending promises
+        else
+            this.emit(DESTROYED);
+        return this;
+    }
+    /**
+     * Alias for {@link isStream}
+     *
+     * Former export location, maintained for backwards compatibility.
+     *
+     * @deprecated
+     */
+    static get isStream() {
+        return exports.isStream;
+    }
+}
+exports.Minipass = Minipass;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/package.json b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/index.js b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
new file mode 100644
index 0000000000000..b65fafbae43a4
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
@@ -0,0 +1,1018 @@
+const proc = typeof process === 'object' && process
+    ? process
+    : {
+        stdout: null,
+        stderr: null,
+    };
+import { EventEmitter } from 'events';
+import Stream from 'stream';
+import { StringDecoder } from 'string_decoder';
+/**
+ * Return true if the argument is a Minipass stream, Node stream, or something
+ * else that Minipass can interact with.
+ */
+export const isStream = (s) => !!s &&
+    typeof s === 'object' &&
+    (s instanceof Minipass ||
+        s instanceof Stream ||
+        isReadable(s) ||
+        isWritable(s));
+/**
+ * Return true if the argument is a valid {@link Minipass.Readable}
+ */
+export const isReadable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof EventEmitter &&
+    typeof s.pipe === 'function' &&
+    // node core Writable streams have a pipe() method, but it throws
+    s.pipe !== Stream.Writable.prototype.pipe;
+/**
+ * Return true if the argument is a valid {@link Minipass.Writable}
+ */
+export const isWritable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof EventEmitter &&
+    typeof s.write === 'function' &&
+    typeof s.end === 'function';
+const EOF = Symbol('EOF');
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
+const EMITTED_END = Symbol('emittedEnd');
+const EMITTING_END = Symbol('emittingEnd');
+const EMITTED_ERROR = Symbol('emittedError');
+const CLOSED = Symbol('closed');
+const READ = Symbol('read');
+const FLUSH = Symbol('flush');
+const FLUSHCHUNK = Symbol('flushChunk');
+const ENCODING = Symbol('encoding');
+const DECODER = Symbol('decoder');
+const FLOWING = Symbol('flowing');
+const PAUSED = Symbol('paused');
+const RESUME = Symbol('resume');
+const BUFFER = Symbol('buffer');
+const PIPES = Symbol('pipes');
+const BUFFERLENGTH = Symbol('bufferLength');
+const BUFFERPUSH = Symbol('bufferPush');
+const BUFFERSHIFT = Symbol('bufferShift');
+const OBJECTMODE = Symbol('objectMode');
+// internal event when stream is destroyed
+const DESTROYED = Symbol('destroyed');
+// internal event when stream has an error
+const ERROR = Symbol('error');
+const EMITDATA = Symbol('emitData');
+const EMITEND = Symbol('emitEnd');
+const EMITEND2 = Symbol('emitEnd2');
+const ASYNC = Symbol('async');
+const ABORT = Symbol('abort');
+const ABORTED = Symbol('aborted');
+const SIGNAL = Symbol('signal');
+const DATALISTENERS = Symbol('dataListeners');
+const DISCARDED = Symbol('discarded');
+const defer = (fn) => Promise.resolve().then(fn);
+const nodefer = (fn) => fn();
+const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
+const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
+    (!!b &&
+        typeof b === 'object' &&
+        b.constructor &&
+        b.constructor.name === 'ArrayBuffer' &&
+        b.byteLength >= 0);
+const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
+/**
+ * Internal class representing a pipe to a destination stream.
+ *
+ * @internal
+ */
+class Pipe {
+    src;
+    dest;
+    opts;
+    ondrain;
+    constructor(src, dest, opts) {
+        this.src = src;
+        this.dest = dest;
+        this.opts = opts;
+        this.ondrain = () => src[RESUME]();
+        this.dest.on('drain', this.ondrain);
+    }
+    unpipe() {
+        this.dest.removeListener('drain', this.ondrain);
+    }
+    // only here for the prototype
+    /* c8 ignore start */
+    proxyErrors(_er) { }
+    /* c8 ignore stop */
+    end() {
+        this.unpipe();
+        if (this.opts.end)
+            this.dest.end();
+    }
+}
+/**
+ * Internal class representing a pipe to a destination stream where
+ * errors are proxied.
+ *
+ * @internal
+ */
+class PipeProxyErrors extends Pipe {
+    unpipe() {
+        this.src.removeListener('error', this.proxyErrors);
+        super.unpipe();
+    }
+    constructor(src, dest, opts) {
+        super(src, dest, opts);
+        this.proxyErrors = er => dest.emit('error', er);
+        src.on('error', this.proxyErrors);
+    }
+}
+const isObjectModeOptions = (o) => !!o.objectMode;
+const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
+/**
+ * Main export, the Minipass class
+ *
+ * `RType` is the type of data emitted, defaults to Buffer
+ *
+ * `WType` is the type of data to be written, if RType is buffer or string,
+ * then any {@link Minipass.ContiguousData} is allowed.
+ *
+ * `Events` is the set of event handler signatures that this object
+ * will emit, see {@link Minipass.Events}
+ */
+export class Minipass extends EventEmitter {
+    [FLOWING] = false;
+    [PAUSED] = false;
+    [PIPES] = [];
+    [BUFFER] = [];
+    [OBJECTMODE];
+    [ENCODING];
+    [ASYNC];
+    [DECODER];
+    [EOF] = false;
+    [EMITTED_END] = false;
+    [EMITTING_END] = false;
+    [CLOSED] = false;
+    [EMITTED_ERROR] = null;
+    [BUFFERLENGTH] = 0;
+    [DESTROYED] = false;
+    [SIGNAL];
+    [ABORTED] = false;
+    [DATALISTENERS] = 0;
+    [DISCARDED] = false;
+    /**
+     * true if the stream can be written
+     */
+    writable = true;
+    /**
+     * true if the stream can be read
+     */
+    readable = true;
+    /**
+     * If `RType` is Buffer, then options do not need to be provided.
+     * Otherwise, an options object must be provided to specify either
+     * {@link Minipass.SharedOptions.objectMode} or
+     * {@link Minipass.SharedOptions.encoding}, as appropriate.
+     */
+    constructor(...args) {
+        const options = (args[0] ||
+            {});
+        super();
+        if (options.objectMode && typeof options.encoding === 'string') {
+            throw new TypeError('Encoding and objectMode may not be used together');
+        }
+        if (isObjectModeOptions(options)) {
+            this[OBJECTMODE] = true;
+            this[ENCODING] = null;
+        }
+        else if (isEncodingOptions(options)) {
+            this[ENCODING] = options.encoding;
+            this[OBJECTMODE] = false;
+        }
+        else {
+            this[OBJECTMODE] = false;
+            this[ENCODING] = null;
+        }
+        this[ASYNC] = !!options.async;
+        this[DECODER] = this[ENCODING]
+            ? new StringDecoder(this[ENCODING])
+            : null;
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposeBuffer === true) {
+            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
+        }
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposePipes === true) {
+            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
+        }
+        const { signal } = options;
+        if (signal) {
+            this[SIGNAL] = signal;
+            if (signal.aborted) {
+                this[ABORT]();
+            }
+            else {
+                signal.addEventListener('abort', () => this[ABORT]());
+            }
+        }
+    }
+    /**
+     * The amount of data stored in the buffer waiting to be read.
+     *
+     * For Buffer strings, this will be the total byte length.
+     * For string encoding streams, this will be the string character length,
+     * according to JavaScript's `string.length` logic.
+     * For objectMode streams, this is a count of the items waiting to be
+     * emitted.
+     */
+    get bufferLength() {
+        return this[BUFFERLENGTH];
+    }
+    /**
+     * The `BufferEncoding` currently in use, or `null`
+     */
+    get encoding() {
+        return this[ENCODING];
+    }
+    /**
+     * @deprecated - This is a read only property
+     */
+    set encoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * @deprecated - Encoding may only be set at instantiation time
+     */
+    setEncoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * True if this is an objectMode stream
+     */
+    get objectMode() {
+        return this[OBJECTMODE];
+    }
+    /**
+     * @deprecated - This is a read-only property
+     */
+    set objectMode(_om) {
+        throw new Error('objectMode must be set at instantiation time');
+    }
+    /**
+     * true if this is an async stream
+     */
+    get ['async']() {
+        return this[ASYNC];
+    }
+    /**
+     * Set to true to make this stream async.
+     *
+     * Once set, it cannot be unset, as this would potentially cause incorrect
+     * behavior.  Ie, a sync stream can be made async, but an async stream
+     * cannot be safely made sync.
+     */
+    set ['async'](a) {
+        this[ASYNC] = this[ASYNC] || !!a;
+    }
+    // drop everything and get out of the flow completely
+    [ABORT]() {
+        this[ABORTED] = true;
+        this.emit('abort', this[SIGNAL]?.reason);
+        this.destroy(this[SIGNAL]?.reason);
+    }
+    /**
+     * True if the stream has been aborted.
+     */
+    get aborted() {
+        return this[ABORTED];
+    }
+    /**
+     * No-op setter. Stream aborted status is set via the AbortSignal provided
+     * in the constructor options.
+     */
+    set aborted(_) { }
+    write(chunk, encoding, cb) {
+        if (this[ABORTED])
+            return false;
+        if (this[EOF])
+            throw new Error('write after end');
+        if (this[DESTROYED]) {
+            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
+            return true;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (!encoding)
+            encoding = 'utf8';
+        const fn = this[ASYNC] ? defer : nodefer;
+        // convert array buffers and typed array views into buffers
+        // at some point in the future, we may want to do the opposite!
+        // leave strings and buffers as-is
+        // anything is only allowed if in object mode, so throw
+        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+            if (isArrayBufferView(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
+            }
+            else if (isArrayBufferLike(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk);
+            }
+            else if (typeof chunk !== 'string') {
+                throw new Error('Non-contiguous data written to non-objectMode stream');
+            }
+        }
+        // handle object mode up front, since it's simpler
+        // this yields better performance, fewer checks later.
+        if (this[OBJECTMODE]) {
+            // maybe impossible?
+            /* c8 ignore start */
+            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+                this[FLUSH](true);
+            /* c8 ignore stop */
+            if (this[FLOWING])
+                this.emit('data', chunk);
+            else
+                this[BUFFERPUSH](chunk);
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // at this point the chunk is a buffer or string
+        // don't buffer it up or send it to the decoder
+        if (!chunk.length) {
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // fast-path writing strings of same encoding to a stream with
+        // an empty buffer, skipping the buffer/decoder dance
+        if (typeof chunk === 'string' &&
+            // unless it is a string already ready for us to use
+            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = Buffer.from(chunk, encoding);
+        }
+        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = this[DECODER].write(chunk);
+        }
+        // Note: flushing CAN potentially switch us into not-flowing mode
+        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+            this[FLUSH](true);
+        if (this[FLOWING])
+            this.emit('data', chunk);
+        else
+            this[BUFFERPUSH](chunk);
+        if (this[BUFFERLENGTH] !== 0)
+            this.emit('readable');
+        if (cb)
+            fn(cb);
+        return this[FLOWING];
+    }
+    /**
+     * Low-level explicit read method.
+     *
+     * In objectMode, the argument is ignored, and one item is returned if
+     * available.
+     *
+     * `n` is the number of bytes (or in the case of encoding streams,
+     * characters) to consume. If `n` is not provided, then the entire buffer
+     * is returned, or `null` is returned if no data is available.
+     *
+     * If `n` is greater that the amount of data in the internal buffer,
+     * then `null` is returned.
+     */
+    read(n) {
+        if (this[DESTROYED])
+            return null;
+        this[DISCARDED] = false;
+        if (this[BUFFERLENGTH] === 0 ||
+            n === 0 ||
+            (n && n > this[BUFFERLENGTH])) {
+            this[MAYBE_EMIT_END]();
+            return null;
+        }
+        if (this[OBJECTMODE])
+            n = null;
+        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
+            // not object mode, so if we have an encoding, then RType is string
+            // otherwise, must be Buffer
+            this[BUFFER] = [
+                (this[ENCODING]
+                    ? this[BUFFER].join('')
+                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
+            ];
+        }
+        const ret = this[READ](n || null, this[BUFFER][0]);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [READ](n, chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERSHIFT]();
+        else {
+            const c = chunk;
+            if (n === c.length || n === null)
+                this[BUFFERSHIFT]();
+            else if (typeof c === 'string') {
+                this[BUFFER][0] = c.slice(n);
+                chunk = c.slice(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+            else {
+                this[BUFFER][0] = c.subarray(n);
+                chunk = c.subarray(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+        }
+        this.emit('data', chunk);
+        if (!this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+        return chunk;
+    }
+    end(chunk, encoding, cb) {
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (chunk !== undefined)
+            this.write(chunk, encoding);
+        if (cb)
+            this.once('end', cb);
+        this[EOF] = true;
+        this.writable = false;
+        // if we haven't written anything, then go ahead and emit,
+        // even if we're not reading.
+        // we'll re-emit if a new 'end' listener is added anyway.
+        // This makes MP more suitable to write-only use cases.
+        if (this[FLOWING] || !this[PAUSED])
+            this[MAYBE_EMIT_END]();
+        return this;
+    }
+    // don't let the internal resume be overwritten
+    [RESUME]() {
+        if (this[DESTROYED])
+            return;
+        if (!this[DATALISTENERS] && !this[PIPES].length) {
+            this[DISCARDED] = true;
+        }
+        this[PAUSED] = false;
+        this[FLOWING] = true;
+        this.emit('resume');
+        if (this[BUFFER].length)
+            this[FLUSH]();
+        else if (this[EOF])
+            this[MAYBE_EMIT_END]();
+        else
+            this.emit('drain');
+    }
+    /**
+     * Resume the stream if it is currently in a paused state
+     *
+     * If called when there are no pipe destinations or `data` event listeners,
+     * this will place the stream in a "discarded" state, where all data will
+     * be thrown away. The discarded state is removed if a pipe destination or
+     * data handler is added, if pause() is called, or if any synchronous or
+     * asynchronous iteration is started.
+     */
+    resume() {
+        return this[RESUME]();
+    }
+    /**
+     * Pause the stream
+     */
+    pause() {
+        this[FLOWING] = false;
+        this[PAUSED] = true;
+        this[DISCARDED] = false;
+    }
+    /**
+     * true if the stream has been forcibly destroyed
+     */
+    get destroyed() {
+        return this[DESTROYED];
+    }
+    /**
+     * true if the stream is currently in a flowing state, meaning that
+     * any writes will be immediately emitted.
+     */
+    get flowing() {
+        return this[FLOWING];
+    }
+    /**
+     * true if the stream is currently in a paused state
+     */
+    get paused() {
+        return this[PAUSED];
+    }
+    [BUFFERPUSH](chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] += 1;
+        else
+            this[BUFFERLENGTH] += chunk.length;
+        this[BUFFER].push(chunk);
+    }
+    [BUFFERSHIFT]() {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] -= 1;
+        else
+            this[BUFFERLENGTH] -= this[BUFFER][0].length;
+        return this[BUFFER].shift();
+    }
+    [FLUSH](noDrain = false) {
+        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
+            this[BUFFER].length);
+        if (!noDrain && !this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+    }
+    [FLUSHCHUNK](chunk) {
+        this.emit('data', chunk);
+        return this[FLOWING];
+    }
+    /**
+     * Pipe all data emitted by this stream into the destination provided.
+     *
+     * Triggers the flow of data.
+     */
+    pipe(dest, opts) {
+        if (this[DESTROYED])
+            return dest;
+        this[DISCARDED] = false;
+        const ended = this[EMITTED_END];
+        opts = opts || {};
+        if (dest === proc.stdout || dest === proc.stderr)
+            opts.end = false;
+        else
+            opts.end = opts.end !== false;
+        opts.proxyErrors = !!opts.proxyErrors;
+        // piping an ended stream ends immediately
+        if (ended) {
+            if (opts.end)
+                dest.end();
+        }
+        else {
+            // "as" here just ignores the WType, which pipes don't care about,
+            // since they're only consuming from us, and writing to the dest
+            this[PIPES].push(!opts.proxyErrors
+                ? new Pipe(this, dest, opts)
+                : new PipeProxyErrors(this, dest, opts));
+            if (this[ASYNC])
+                defer(() => this[RESUME]());
+            else
+                this[RESUME]();
+        }
+        return dest;
+    }
+    /**
+     * Fully unhook a piped destination stream.
+     *
+     * If the destination stream was the only consumer of this stream (ie,
+     * there are no other piped destinations or `'data'` event listeners)
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    unpipe(dest) {
+        const p = this[PIPES].find(p => p.dest === dest);
+        if (p) {
+            if (this[PIPES].length === 1) {
+                if (this[FLOWING] && this[DATALISTENERS] === 0) {
+                    this[FLOWING] = false;
+                }
+                this[PIPES] = [];
+            }
+            else
+                this[PIPES].splice(this[PIPES].indexOf(p), 1);
+            p.unpipe();
+        }
+    }
+    /**
+     * Alias for {@link Minipass#on}
+     */
+    addListener(ev, handler) {
+        return this.on(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.on`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * - Adding a 'data' event handler will trigger the flow of data
+     *
+     * - Adding a 'readable' event handler when there is data waiting to be read
+     *   will cause 'readable' to be emitted immediately.
+     *
+     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
+     *   already passed will cause the event to be emitted immediately and all
+     *   handlers removed.
+     *
+     * - Adding an 'error' event handler after an error has been emitted will
+     *   cause the event to be re-emitted immediately with the error previously
+     *   raised.
+     */
+    on(ev, handler) {
+        const ret = super.on(ev, handler);
+        if (ev === 'data') {
+            this[DISCARDED] = false;
+            this[DATALISTENERS]++;
+            if (!this[PIPES].length && !this[FLOWING]) {
+                this[RESUME]();
+            }
+        }
+        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
+            super.emit('readable');
+        }
+        else if (isEndish(ev) && this[EMITTED_END]) {
+            super.emit(ev);
+            this.removeAllListeners(ev);
+        }
+        else if (ev === 'error' && this[EMITTED_ERROR]) {
+            const h = handler;
+            if (this[ASYNC])
+                defer(() => h.call(this, this[EMITTED_ERROR]));
+            else
+                h.call(this, this[EMITTED_ERROR]);
+        }
+        return ret;
+    }
+    /**
+     * Alias for {@link Minipass#off}
+     */
+    removeListener(ev, handler) {
+        return this.off(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.off`
+     *
+     * If a 'data' event handler is removed, and it was the last consumer
+     * (ie, there are no pipe destinations or other 'data' event listeners),
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    off(ev, handler) {
+        const ret = super.off(ev, handler);
+        // if we previously had listeners, and now we don't, and we don't
+        // have any pipes, then stop the flow, unless it's been explicitly
+        // put in a discarded flowing state via stream.resume().
+        if (ev === 'data') {
+            this[DATALISTENERS] = this.listeners('data').length;
+            if (this[DATALISTENERS] === 0 &&
+                !this[DISCARDED] &&
+                !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * Mostly identical to `EventEmitter.removeAllListeners`
+     *
+     * If all 'data' event handlers are removed, and they were the last consumer
+     * (ie, there are no pipe destinations), then the flow of data will stop
+     * until there is another consumer or {@link Minipass#resume} is explicitly
+     * called.
+     */
+    removeAllListeners(ev) {
+        const ret = super.removeAllListeners(ev);
+        if (ev === 'data' || ev === undefined) {
+            this[DATALISTENERS] = 0;
+            if (!this[DISCARDED] && !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * true if the 'end' event has been emitted
+     */
+    get emittedEnd() {
+        return this[EMITTED_END];
+    }
+    [MAYBE_EMIT_END]() {
+        if (!this[EMITTING_END] &&
+            !this[EMITTED_END] &&
+            !this[DESTROYED] &&
+            this[BUFFER].length === 0 &&
+            this[EOF]) {
+            this[EMITTING_END] = true;
+            this.emit('end');
+            this.emit('prefinish');
+            this.emit('finish');
+            if (this[CLOSED])
+                this.emit('close');
+            this[EMITTING_END] = false;
+        }
+    }
+    /**
+     * Mostly identical to `EventEmitter.emit`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * If the stream has been destroyed, and the event is something other
+     * than 'close' or 'error', then `false` is returned and no handlers
+     * are called.
+     *
+     * If the event is 'end', and has already been emitted, then the event
+     * is ignored. If the stream is in a paused or non-flowing state, then
+     * the event will be deferred until data flow resumes. If the stream is
+     * async, then handlers will be called on the next tick rather than
+     * immediately.
+     *
+     * If the event is 'close', and 'end' has not yet been emitted, then
+     * the event will be deferred until after 'end' is emitted.
+     *
+     * If the event is 'error', and an AbortSignal was provided for the stream,
+     * and there are no listeners, then the event is ignored, matching the
+     * behavior of node core streams in the presense of an AbortSignal.
+     *
+     * If the event is 'finish' or 'prefinish', then all listeners will be
+     * removed after emitting the event, to prevent double-firing.
+     */
+    emit(ev, ...args) {
+        const data = args[0];
+        // error and close are only events allowed after calling destroy()
+        if (ev !== 'error' &&
+            ev !== 'close' &&
+            ev !== DESTROYED &&
+            this[DESTROYED]) {
+            return false;
+        }
+        else if (ev === 'data') {
+            return !this[OBJECTMODE] && !data
+                ? false
+                : this[ASYNC]
+                    ? (defer(() => this[EMITDATA](data)), true)
+                    : this[EMITDATA](data);
+        }
+        else if (ev === 'end') {
+            return this[EMITEND]();
+        }
+        else if (ev === 'close') {
+            this[CLOSED] = true;
+            // don't emit close before 'end' and 'finish'
+            if (!this[EMITTED_END] && !this[DESTROYED])
+                return false;
+            const ret = super.emit('close');
+            this.removeAllListeners('close');
+            return ret;
+        }
+        else if (ev === 'error') {
+            this[EMITTED_ERROR] = data;
+            super.emit(ERROR, data);
+            const ret = !this[SIGNAL] || this.listeners('error').length
+                ? super.emit('error', data)
+                : false;
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'resume') {
+            const ret = super.emit('resume');
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'finish' || ev === 'prefinish') {
+            const ret = super.emit(ev);
+            this.removeAllListeners(ev);
+            return ret;
+        }
+        // Some other unknown event
+        const ret = super.emit(ev, ...args);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITDATA](data) {
+        for (const p of this[PIPES]) {
+            if (p.dest.write(data) === false)
+                this.pause();
+        }
+        const ret = this[DISCARDED] ? false : super.emit('data', data);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITEND]() {
+        if (this[EMITTED_END])
+            return false;
+        this[EMITTED_END] = true;
+        this.readable = false;
+        return this[ASYNC]
+            ? (defer(() => this[EMITEND2]()), true)
+            : this[EMITEND2]();
+    }
+    [EMITEND2]() {
+        if (this[DECODER]) {
+            const data = this[DECODER].end();
+            if (data) {
+                for (const p of this[PIPES]) {
+                    p.dest.write(data);
+                }
+                if (!this[DISCARDED])
+                    super.emit('data', data);
+            }
+        }
+        for (const p of this[PIPES]) {
+            p.end();
+        }
+        const ret = super.emit('end');
+        this.removeAllListeners('end');
+        return ret;
+    }
+    /**
+     * Return a Promise that resolves to an array of all emitted data once
+     * the stream ends.
+     */
+    async collect() {
+        const buf = Object.assign([], {
+            dataLength: 0,
+        });
+        if (!this[OBJECTMODE])
+            buf.dataLength = 0;
+        // set the promise first, in case an error is raised
+        // by triggering the flow here.
+        const p = this.promise();
+        this.on('data', c => {
+            buf.push(c);
+            if (!this[OBJECTMODE])
+                buf.dataLength += c.length;
+        });
+        await p;
+        return buf;
+    }
+    /**
+     * Return a Promise that resolves to the concatenation of all emitted data
+     * once the stream ends.
+     *
+     * Not allowed on objectMode streams.
+     */
+    async concat() {
+        if (this[OBJECTMODE]) {
+            throw new Error('cannot concat in objectMode');
+        }
+        const buf = await this.collect();
+        return (this[ENCODING]
+            ? buf.join('')
+            : Buffer.concat(buf, buf.dataLength));
+    }
+    /**
+     * Return a void Promise that resolves once the stream ends.
+     */
+    async promise() {
+        return new Promise((resolve, reject) => {
+            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
+            this.on('error', er => reject(er));
+            this.on('end', () => resolve());
+        });
+    }
+    /**
+     * Asynchronous `for await of` iteration.
+     *
+     * This will continue emitting all chunks until the stream terminates.
+     */
+    [Symbol.asyncIterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = async () => {
+            this.pause();
+            stopped = true;
+            return { value: undefined, done: true };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const res = this.read();
+            if (res !== null)
+                return Promise.resolve({ done: false, value: res });
+            if (this[EOF])
+                return stop();
+            let resolve;
+            let reject;
+            const onerr = (er) => {
+                this.off('data', ondata);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                reject(er);
+            };
+            const ondata = (value) => {
+                this.off('error', onerr);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                this.pause();
+                resolve({ value, done: !!this[EOF] });
+            };
+            const onend = () => {
+                this.off('error', onerr);
+                this.off('data', ondata);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                resolve({ done: true, value: undefined });
+            };
+            const ondestroy = () => onerr(new Error('stream destroyed'));
+            return new Promise((res, rej) => {
+                reject = rej;
+                resolve = res;
+                this.once(DESTROYED, ondestroy);
+                this.once('error', onerr);
+                this.once('end', onend);
+                this.once('data', ondata);
+            });
+        };
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Synchronous `for of` iteration.
+     *
+     * The iteration will terminate when the internal buffer runs out, even
+     * if the stream has not yet terminated.
+     */
+    [Symbol.iterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = () => {
+            this.pause();
+            this.off(ERROR, stop);
+            this.off(DESTROYED, stop);
+            this.off('end', stop);
+            stopped = true;
+            return { done: true, value: undefined };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const value = this.read();
+            return value === null ? stop() : { done: false, value };
+        };
+        this.once('end', stop);
+        this.once(ERROR, stop);
+        this.once(DESTROYED, stop);
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.iterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Destroy a stream, preventing it from being used for any further purpose.
+     *
+     * If the stream has a `close()` method, then it will be called on
+     * destruction.
+     *
+     * After destruction, any attempt to write data, read data, or emit most
+     * events will be ignored.
+     *
+     * If an error argument is provided, then it will be emitted in an
+     * 'error' event.
+     */
+    destroy(er) {
+        if (this[DESTROYED]) {
+            if (er)
+                this.emit('error', er);
+            else
+                this.emit(DESTROYED);
+            return this;
+        }
+        this[DESTROYED] = true;
+        this[DISCARDED] = true;
+        // throw away all buffered data, it's never coming out
+        this[BUFFER].length = 0;
+        this[BUFFERLENGTH] = 0;
+        const wc = this;
+        if (typeof wc.close === 'function' && !this[CLOSED])
+            wc.close();
+        if (er)
+            this.emit('error', er);
+        // if no error to emit, still reject pending promises
+        else
+            this.emit(DESTROYED);
+        return this;
+    }
+    /**
+     * Alias for {@link isStream}
+     *
+     * Former export location, maintained for backwards compatibility.
+     *
+     * @deprecated
+     */
+    static get isStream() {
+        return isStream;
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/package.json b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/package.json b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/package.json
new file mode 100644
index 0000000000000..6faaa247a5bc6
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/package.json
@@ -0,0 +1,82 @@
+{
+  "name": "minipass",
+  "version": "7.0.3",
+  "description": "minimal implementation of a PassThrough stream",
+  "main": "./dist/cjs/index.js",
+  "module": "./dist/mjs/index.js",
+  "types": "./dist/cjs/index.js",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/mjs/index.d.ts",
+        "default": "./dist/mjs/index.js"
+      },
+      "require": {
+        "types": "./dist/cjs/index.d.ts",
+        "default": "./dist/cjs/index.js"
+      }
+    },
+    "./package.json": "./package.json"
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "preprepare": "rm -rf dist",
+    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "c8 tap",
+    "snap": "c8 tap",
+    "format": "prettier --write . --loglevel warn",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+  },
+  "tap": {
+    "coverage": false,
+    "node-arg": [
+      "--enable-source-maps",
+      "--no-warnings",
+      "--loader",
+      "ts-node/esm"
+    ],
+    "ts": false
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "devDependencies": {
+    "@types/node": "^20.1.2",
+    "@types/tap": "^15.0.8",
+    "c8": "^7.13.0",
+    "prettier": "^2.6.2",
+    "tap": "^16.3.0",
+    "ts-node": "^10.9.1",
+    "typedoc": "^0.24.8",
+    "typescript": "^5.1.3",
+    "end-of-stream": "^1.4.0",
+    "node-abort-controller": "^3.1.1",
+    "sync-content": "^1.0.2",
+    "through2": "^2.0.3"
+  },
+  "repository": "https://github.com/isaacs/minipass",
+  "keywords": [
+    "passthrough",
+    "stream"
+  ],
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "ISC",
+  "engines": {
+    "node": ">=16 || 14 >=14.17"
+  }
+}
diff --git a/node_modules/sigstore/node_modules/cacache/package.json b/node_modules/sigstore/node_modules/cacache/package.json
new file mode 100644
index 0000000000000..ab58cb8b7c50f
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/package.json
@@ -0,0 +1,82 @@
+{
+  "name": "cacache",
+  "version": "17.1.4",
+  "cache-version": {
+    "content": "2",
+    "index": "5"
+  },
+  "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "coverage": "tap",
+    "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
+    "lint": "eslint \"**/*.js\"",
+    "npmclilint": "npmcli-lint",
+    "lintfix": "npm run lint -- --fix",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "posttest": "npm run lint",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/cacache.git"
+  },
+  "keywords": [
+    "cache",
+    "caching",
+    "content-addressable",
+    "sri",
+    "sri hash",
+    "subresource integrity",
+    "cache",
+    "storage",
+    "store",
+    "file store",
+    "filesystem",
+    "disk cache",
+    "disk storage"
+  ],
+  "license": "ISC",
+  "dependencies": {
+    "@npmcli/fs": "^3.1.0",
+    "fs-minipass": "^3.0.0",
+    "glob": "^10.2.2",
+    "lru-cache": "^7.7.1",
+    "minipass": "^7.0.3",
+    "minipass-collect": "^1.0.2",
+    "minipass-flush": "^1.0.5",
+    "minipass-pipeline": "^1.2.4",
+    "p-map": "^4.0.0",
+    "ssri": "^10.0.0",
+    "tar": "^6.1.11",
+    "unique-filename": "^3.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "tap": "^16.0.0"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "windowsCI": false,
+    "version": "4.18.0",
+    "publish": "true"
+  },
+  "author": "GitHub Inc.",
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/LICENSE.md b/node_modules/tuf-js/node_modules/cacache/LICENSE.md
new file mode 100644
index 0000000000000..8d28acf866d93
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/LICENSE.md
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/path.js b/node_modules/tuf-js/node_modules/cacache/lib/content/path.js
new file mode 100644
index 0000000000000..ad5a76a4f73f2
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/content/path.js
@@ -0,0 +1,29 @@
+'use strict'
+
+const contentVer = require('../../package.json')['cache-version'].content
+const hashToSegments = require('../util/hash-to-segments')
+const path = require('path')
+const ssri = require('ssri')
+
+// Current format of content file path:
+//
+// sha512-BaSE64Hex= ->
+// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
+//
+module.exports = contentPath
+
+function contentPath (cache, integrity) {
+  const sri = ssri.parse(integrity, { single: true })
+  // contentPath is the *strongest* algo given
+  return path.join(
+    contentDir(cache),
+    sri.algorithm,
+    ...hashToSegments(sri.hexDigest())
+  )
+}
+
+module.exports.contentDir = contentDir
+
+function contentDir (cache) {
+  return path.join(cache, `content-v${contentVer}`)
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/read.js b/node_modules/tuf-js/node_modules/cacache/lib/content/read.js
new file mode 100644
index 0000000000000..f41b539df65dc
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/content/read.js
@@ -0,0 +1,166 @@
+'use strict'
+
+const fs = require('fs/promises')
+const fsm = require('fs-minipass')
+const ssri = require('ssri')
+const contentPath = require('./path')
+const Pipeline = require('minipass-pipeline')
+
+module.exports = read
+
+const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
+async function read (cache, integrity, opts = {}) {
+  const { size } = opts
+  const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
+    // get size
+    const stat = await fs.stat(cpath)
+    return { stat, cpath, sri }
+  })
+  if (typeof size === 'number' && stat.size !== size) {
+    throw sizeError(size, stat.size)
+  }
+
+  if (stat.size > MAX_SINGLE_READ_SIZE) {
+    return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
+  }
+
+  const data = await fs.readFile(cpath, { encoding: null })
+  if (!ssri.checkData(data, sri)) {
+    throw integrityError(sri, cpath)
+  }
+
+  return data
+}
+
+const readPipeline = (cpath, size, sri, stream) => {
+  stream.push(
+    new fsm.ReadStream(cpath, {
+      size,
+      readSize: MAX_SINGLE_READ_SIZE,
+    }),
+    ssri.integrityStream({
+      integrity: sri,
+      size,
+    })
+  )
+  return stream
+}
+
+module.exports.stream = readStream
+module.exports.readStream = readStream
+
+function readStream (cache, integrity, opts = {}) {
+  const { size } = opts
+  const stream = new Pipeline()
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
+      // just stat to ensure it exists
+      const stat = await fs.stat(cpath)
+      return { stat, cpath, sri }
+    })
+    if (typeof size === 'number' && size !== stat.size) {
+      return stream.emit('error', sizeError(size, stat.size))
+    }
+
+    return readPipeline(cpath, stat.size, sri, stream)
+  }).catch(err => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.copy = copy
+
+function copy (cache, integrity, dest) {
+  return withContentSri(cache, integrity, (cpath, sri) => {
+    return fs.copyFile(cpath, dest)
+  })
+}
+
+module.exports.hasContent = hasContent
+
+async function hasContent (cache, integrity) {
+  if (!integrity) {
+    return false
+  }
+
+  try {
+    return await withContentSri(cache, integrity, async (cpath, sri) => {
+      const stat = await fs.stat(cpath)
+      return { size: stat.size, sri, stat }
+    })
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return false
+    }
+
+    if (err.code === 'EPERM') {
+      /* istanbul ignore else */
+      if (process.platform !== 'win32') {
+        throw err
+      } else {
+        return false
+      }
+    }
+  }
+}
+
+async function withContentSri (cache, integrity, fn) {
+  const sri = ssri.parse(integrity)
+  // If `integrity` has multiple entries, pick the first digest
+  // with available local data.
+  const algo = sri.pickAlgorithm()
+  const digests = sri[algo]
+
+  if (digests.length <= 1) {
+    const cpath = contentPath(cache, digests[0])
+    return fn(cpath, digests[0])
+  } else {
+    // Can't use race here because a generic error can happen before
+    // a ENOENT error, and can happen before a valid result
+    const results = await Promise.all(digests.map(async (meta) => {
+      try {
+        return await withContentSri(cache, meta, fn)
+      } catch (err) {
+        if (err.code === 'ENOENT') {
+          return Object.assign(
+            new Error('No matching content found for ' + sri.toString()),
+            { code: 'ENOENT' }
+          )
+        }
+        return err
+      }
+    }))
+    // Return the first non error if it is found
+    const result = results.find((r) => !(r instanceof Error))
+    if (result) {
+      return result
+    }
+
+    // Throw the No matching content found error
+    const enoentError = results.find((r) => r.code === 'ENOENT')
+    if (enoentError) {
+      throw enoentError
+    }
+
+    // Throw generic error
+    throw results.find((r) => r instanceof Error)
+  }
+}
+
+function sizeError (expected, found) {
+  /* eslint-disable-next-line max-len */
+  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+  err.expected = expected
+  err.found = found
+  err.code = 'EBADSIZE'
+  return err
+}
+
+function integrityError (sri, path) {
+  const err = new Error(`Integrity verification failed for ${sri} (${path})`)
+  err.code = 'EINTEGRITY'
+  err.sri = sri
+  err.path = path
+  return err
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js b/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js
new file mode 100644
index 0000000000000..ce58d679e4cb2
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js
@@ -0,0 +1,18 @@
+'use strict'
+
+const fs = require('fs/promises')
+const contentPath = require('./path')
+const { hasContent } = require('./read')
+
+module.exports = rm
+
+async function rm (cache, integrity) {
+  const content = await hasContent(cache, integrity)
+  // ~pretty~ sure we can't end up with a content lacking sri, but be safe
+  if (content && content.sri) {
+    await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
+    return true
+  } else {
+    return false
+  }
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/write.js b/node_modules/tuf-js/node_modules/cacache/lib/content/write.js
new file mode 100644
index 0000000000000..7146146581287
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/content/write.js
@@ -0,0 +1,205 @@
+'use strict'
+
+const events = require('events')
+
+const contentPath = require('./path')
+const fs = require('fs/promises')
+const { moveFile } = require('@npmcli/fs')
+const { Minipass } = require('minipass')
+const Pipeline = require('minipass-pipeline')
+const Flush = require('minipass-flush')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+const fsm = require('fs-minipass')
+
+module.exports = write
+
+// Cache of move operations in process so we don't duplicate
+const moveOperations = new Map()
+
+async function write (cache, data, opts = {}) {
+  const { algorithms, size, integrity } = opts
+
+  if (typeof size === 'number' && data.length !== size) {
+    throw sizeError(size, data.length)
+  }
+
+  const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
+  if (integrity && !ssri.checkData(data, integrity, opts)) {
+    throw checksumError(integrity, sri)
+  }
+
+  for (const algo in sri) {
+    const tmp = await makeTmp(cache, opts)
+    const hash = sri[algo].toString()
+    try {
+      await fs.writeFile(tmp.target, data, { flag: 'wx' })
+      await moveToDestination(tmp, cache, hash, opts)
+    } finally {
+      if (!tmp.moved) {
+        await fs.rm(tmp.target, { recursive: true, force: true })
+      }
+    }
+  }
+  return { integrity: sri, size: data.length }
+}
+
+module.exports.stream = writeStream
+
+// writes proxied to the 'inputStream' that is passed to the Promise
+// 'end' is deferred until content is handled.
+class CacacheWriteStream extends Flush {
+  constructor (cache, opts) {
+    super()
+    this.opts = opts
+    this.cache = cache
+    this.inputStream = new Minipass()
+    this.inputStream.on('error', er => this.emit('error', er))
+    this.inputStream.on('drain', () => this.emit('drain'))
+    this.handleContentP = null
+  }
+
+  write (chunk, encoding, cb) {
+    if (!this.handleContentP) {
+      this.handleContentP = handleContent(
+        this.inputStream,
+        this.cache,
+        this.opts
+      )
+    }
+    return this.inputStream.write(chunk, encoding, cb)
+  }
+
+  flush (cb) {
+    this.inputStream.end(() => {
+      if (!this.handleContentP) {
+        const e = new Error('Cache input stream was empty')
+        e.code = 'ENODATA'
+        // empty streams are probably emitting end right away.
+        // defer this one tick by rejecting a promise on it.
+        return Promise.reject(e).catch(cb)
+      }
+      // eslint-disable-next-line promise/catch-or-return
+      this.handleContentP.then(
+        (res) => {
+          res.integrity && this.emit('integrity', res.integrity)
+          // eslint-disable-next-line promise/always-return
+          res.size !== null && this.emit('size', res.size)
+          cb()
+        },
+        (er) => cb(er)
+      )
+    })
+  }
+}
+
+function writeStream (cache, opts = {}) {
+  return new CacacheWriteStream(cache, opts)
+}
+
+async function handleContent (inputStream, cache, opts) {
+  const tmp = await makeTmp(cache, opts)
+  try {
+    const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
+    await moveToDestination(
+      tmp,
+      cache,
+      res.integrity,
+      opts
+    )
+    return res
+  } finally {
+    if (!tmp.moved) {
+      await fs.rm(tmp.target, { recursive: true, force: true })
+    }
+  }
+}
+
+async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
+  const outStream = new fsm.WriteStream(tmpTarget, {
+    flags: 'wx',
+  })
+
+  if (opts.integrityEmitter) {
+    // we need to create these all simultaneously since they can fire in any order
+    const [integrity, size] = await Promise.all([
+      events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
+      events.once(opts.integrityEmitter, 'size').then(res => res[0]),
+      new Pipeline(inputStream, outStream).promise(),
+    ])
+    return { integrity, size }
+  }
+
+  let integrity
+  let size
+  const hashStream = ssri.integrityStream({
+    integrity: opts.integrity,
+    algorithms: opts.algorithms,
+    size: opts.size,
+  })
+  hashStream.on('integrity', i => {
+    integrity = i
+  })
+  hashStream.on('size', s => {
+    size = s
+  })
+
+  const pipeline = new Pipeline(inputStream, hashStream, outStream)
+  await pipeline.promise()
+  return { integrity, size }
+}
+
+async function makeTmp (cache, opts) {
+  const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+  await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
+  return {
+    target: tmpTarget,
+    moved: false,
+  }
+}
+
+async function moveToDestination (tmp, cache, sri, opts) {
+  const destination = contentPath(cache, sri)
+  const destDir = path.dirname(destination)
+  if (moveOperations.has(destination)) {
+    return moveOperations.get(destination)
+  }
+  moveOperations.set(
+    destination,
+    fs.mkdir(destDir, { recursive: true })
+      .then(async () => {
+        await moveFile(tmp.target, destination, { overwrite: false })
+        tmp.moved = true
+        return tmp.moved
+      })
+      .catch(err => {
+        if (!err.message.startsWith('The destination file exists')) {
+          throw Object.assign(err, { code: 'EEXIST' })
+        }
+      }).finally(() => {
+        moveOperations.delete(destination)
+      })
+
+  )
+  return moveOperations.get(destination)
+}
+
+function sizeError (expected, found) {
+  /* eslint-disable-next-line max-len */
+  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+  err.expected = expected
+  err.found = found
+  err.code = 'EBADSIZE'
+  return err
+}
+
+function checksumError (expected, found) {
+  const err = new Error(`Integrity check failed:
+  Wanted: ${expected}
+   Found: ${found}`)
+  err.code = 'EINTEGRITY'
+  err.expected = expected
+  err.found = found
+  return err
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js b/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js
new file mode 100644
index 0000000000000..722a37af5ce15
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js
@@ -0,0 +1,330 @@
+'use strict'
+
+const crypto = require('crypto')
+const {
+  appendFile,
+  mkdir,
+  readFile,
+  readdir,
+  rm,
+  writeFile,
+} = require('fs/promises')
+const { Minipass } = require('minipass')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+
+const contentPath = require('./content/path')
+const hashToSegments = require('./util/hash-to-segments')
+const indexV = require('../package.json')['cache-version'].index
+const { moveFile } = require('@npmcli/fs')
+
+module.exports.NotFoundError = class NotFoundError extends Error {
+  constructor (cache, key) {
+    super(`No cache entry for ${key} found in ${cache}`)
+    this.code = 'ENOENT'
+    this.cache = cache
+    this.key = key
+  }
+}
+
+module.exports.compact = compact
+
+async function compact (cache, key, matchFn, opts = {}) {
+  const bucket = bucketPath(cache, key)
+  const entries = await bucketEntries(bucket)
+  const newEntries = []
+  // we loop backwards because the bottom-most result is the newest
+  // since we add new entries with appendFile
+  for (let i = entries.length - 1; i >= 0; --i) {
+    const entry = entries[i]
+    // a null integrity could mean either a delete was appended
+    // or the user has simply stored an index that does not map
+    // to any content. we determine if the user wants to keep the
+    // null integrity based on the validateEntry function passed in options.
+    // if the integrity is null and no validateEntry is provided, we break
+    // as we consider the null integrity to be a deletion of everything
+    // that came before it.
+    if (entry.integrity === null && !opts.validateEntry) {
+      break
+    }
+
+    // if this entry is valid, and it is either the first entry or
+    // the newEntries array doesn't already include an entry that
+    // matches this one based on the provided matchFn, then we add
+    // it to the beginning of our list
+    if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
+      (newEntries.length === 0 ||
+        !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
+      newEntries.unshift(entry)
+    }
+  }
+
+  const newIndex = '\n' + newEntries.map((entry) => {
+    const stringified = JSON.stringify(entry)
+    const hash = hashEntry(stringified)
+    return `${hash}\t${stringified}`
+  }).join('\n')
+
+  const setup = async () => {
+    const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+    await mkdir(path.dirname(target), { recursive: true })
+    return {
+      target,
+      moved: false,
+    }
+  }
+
+  const teardown = async (tmp) => {
+    if (!tmp.moved) {
+      return rm(tmp.target, { recursive: true, force: true })
+    }
+  }
+
+  const write = async (tmp) => {
+    await writeFile(tmp.target, newIndex, { flag: 'wx' })
+    await mkdir(path.dirname(bucket), { recursive: true })
+    // we use @npmcli/move-file directly here because we
+    // want to overwrite the existing file
+    await moveFile(tmp.target, bucket)
+    tmp.moved = true
+  }
+
+  // write the file atomically
+  const tmp = await setup()
+  try {
+    await write(tmp)
+  } finally {
+    await teardown(tmp)
+  }
+
+  // we reverse the list we generated such that the newest
+  // entries come first in order to make looping through them easier
+  // the true passed to formatEntry tells it to keep null
+  // integrity values, if they made it this far it's because
+  // validateEntry returned true, and as such we should return it
+  return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
+}
+
+module.exports.insert = insert
+
+async function insert (cache, key, integrity, opts = {}) {
+  const { metadata, size, time } = opts
+  const bucket = bucketPath(cache, key)
+  const entry = {
+    key,
+    integrity: integrity && ssri.stringify(integrity),
+    time: time || Date.now(),
+    size,
+    metadata,
+  }
+  try {
+    await mkdir(path.dirname(bucket), { recursive: true })
+    const stringified = JSON.stringify(entry)
+    // NOTE - Cleverness ahoy!
+    //
+    // This works because it's tremendously unlikely for an entry to corrupt
+    // another while still preserving the string length of the JSON in
+    // question. So, we just slap the length in there and verify it on read.
+    //
+    // Thanks to @isaacs for the whiteboarding session that ended up with
+    // this.
+    await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return undefined
+    }
+
+    throw err
+  }
+  return formatEntry(cache, entry)
+}
+
+module.exports.find = find
+
+async function find (cache, key) {
+  const bucket = bucketPath(cache, key)
+  try {
+    const entries = await bucketEntries(bucket)
+    return entries.reduce((latest, next) => {
+      if (next && next.key === key) {
+        return formatEntry(cache, next)
+      } else {
+        return latest
+      }
+    }, null)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return null
+    } else {
+      throw err
+    }
+  }
+}
+
+module.exports.delete = del
+
+function del (cache, key, opts = {}) {
+  if (!opts.removeFully) {
+    return insert(cache, key, null, opts)
+  }
+
+  const bucket = bucketPath(cache, key)
+  return rm(bucket, { recursive: true, force: true })
+}
+
+module.exports.lsStream = lsStream
+
+function lsStream (cache) {
+  const indexDir = bucketDir(cache)
+  const stream = new Minipass({ objectMode: true })
+
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const buckets = await readdirOrEmpty(indexDir)
+    await Promise.all(buckets.map(async (bucket) => {
+      const bucketPath = path.join(indexDir, bucket)
+      const subbuckets = await readdirOrEmpty(bucketPath)
+      await Promise.all(subbuckets.map(async (subbucket) => {
+        const subbucketPath = path.join(bucketPath, subbucket)
+
+        // "/cachename//./*"
+        const subbucketEntries = await readdirOrEmpty(subbucketPath)
+        await Promise.all(subbucketEntries.map(async (entry) => {
+          const entryPath = path.join(subbucketPath, entry)
+          try {
+            const entries = await bucketEntries(entryPath)
+            // using a Map here prevents duplicate keys from showing up
+            // twice, I guess?
+            const reduced = entries.reduce((acc, entry) => {
+              acc.set(entry.key, entry)
+              return acc
+            }, new Map())
+            // reduced is a map of key => entry
+            for (const entry of reduced.values()) {
+              const formatted = formatEntry(cache, entry)
+              if (formatted) {
+                stream.write(formatted)
+              }
+            }
+          } catch (err) {
+            if (err.code === 'ENOENT') {
+              return undefined
+            }
+            throw err
+          }
+        }))
+      }))
+    }))
+    stream.end()
+    return stream
+  }).catch(err => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.ls = ls
+
+async function ls (cache) {
+  const entries = await lsStream(cache).collect()
+  return entries.reduce((acc, xs) => {
+    acc[xs.key] = xs
+    return acc
+  }, {})
+}
+
+module.exports.bucketEntries = bucketEntries
+
+async function bucketEntries (bucket, filter) {
+  const data = await readFile(bucket, 'utf8')
+  return _bucketEntries(data, filter)
+}
+
+function _bucketEntries (data, filter) {
+  const entries = []
+  data.split('\n').forEach((entry) => {
+    if (!entry) {
+      return
+    }
+
+    const pieces = entry.split('\t')
+    if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
+      // Hash is no good! Corruption or malice? Doesn't matter!
+      // EJECT EJECT
+      return
+    }
+    let obj
+    try {
+      obj = JSON.parse(pieces[1])
+    } catch (_) {
+      // eslint-ignore-next-line no-empty-block
+    }
+    // coverage disabled here, no need to test with an entry that parses to something falsey
+    // istanbul ignore else
+    if (obj) {
+      entries.push(obj)
+    }
+  })
+  return entries
+}
+
+module.exports.bucketDir = bucketDir
+
+function bucketDir (cache) {
+  return path.join(cache, `index-v${indexV}`)
+}
+
+module.exports.bucketPath = bucketPath
+
+function bucketPath (cache, key) {
+  const hashed = hashKey(key)
+  return path.join.apply(
+    path,
+    [bucketDir(cache)].concat(hashToSegments(hashed))
+  )
+}
+
+module.exports.hashKey = hashKey
+
+function hashKey (key) {
+  return hash(key, 'sha256')
+}
+
+module.exports.hashEntry = hashEntry
+
+function hashEntry (str) {
+  return hash(str, 'sha1')
+}
+
+function hash (str, digest) {
+  return crypto
+    .createHash(digest)
+    .update(str)
+    .digest('hex')
+}
+
+function formatEntry (cache, entry, keepAll) {
+  // Treat null digests as deletions. They'll shadow any previous entries.
+  if (!entry.integrity && !keepAll) {
+    return null
+  }
+
+  return {
+    key: entry.key,
+    integrity: entry.integrity,
+    path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
+    size: entry.size,
+    time: entry.time,
+    metadata: entry.metadata,
+  }
+}
+
+function readdirOrEmpty (dir) {
+  return readdir(dir).catch((err) => {
+    if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
+      return []
+    }
+
+    throw err
+  })
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/get.js b/node_modules/tuf-js/node_modules/cacache/lib/get.js
new file mode 100644
index 0000000000000..80ec206c7ecaa
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/get.js
@@ -0,0 +1,170 @@
+'use strict'
+
+const Collect = require('minipass-collect')
+const { Minipass } = require('minipass')
+const Pipeline = require('minipass-pipeline')
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const read = require('./content/read')
+
+async function getData (cache, key, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return {
+      metadata: memoized.entry.metadata,
+      data: memoized.data,
+      integrity: memoized.entry.integrity,
+      size: memoized.entry.size,
+    }
+  }
+
+  const entry = await index.find(cache, key, opts)
+  if (!entry) {
+    throw new index.NotFoundError(cache, key)
+  }
+  const data = await read(cache, entry.integrity, { integrity, size })
+  if (memoize) {
+    memo.put(cache, entry, data, opts)
+  }
+
+  return {
+    data,
+    metadata: entry.metadata,
+    size: entry.size,
+    integrity: entry.integrity,
+  }
+}
+module.exports = getData
+
+async function getDataByDigest (cache, key, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get.byDigest(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return memoized
+  }
+
+  const res = await read(cache, key, { integrity, size })
+  if (memoize) {
+    memo.put.byDigest(cache, key, res, opts)
+  }
+  return res
+}
+module.exports.byDigest = getDataByDigest
+
+const getMemoizedStream = (memoized) => {
+  const stream = new Minipass()
+  stream.on('newListener', function (ev, cb) {
+    ev === 'metadata' && cb(memoized.entry.metadata)
+    ev === 'integrity' && cb(memoized.entry.integrity)
+    ev === 'size' && cb(memoized.entry.size)
+  })
+  stream.end(memoized.data)
+  return stream
+}
+
+function getStream (cache, key, opts = {}) {
+  const { memoize, size } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return getMemoizedStream(memoized)
+  }
+
+  const stream = new Pipeline()
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const entry = await index.find(cache, key)
+    if (!entry) {
+      throw new index.NotFoundError(cache, key)
+    }
+
+    stream.emit('metadata', entry.metadata)
+    stream.emit('integrity', entry.integrity)
+    stream.emit('size', entry.size)
+    stream.on('newListener', function (ev, cb) {
+      ev === 'metadata' && cb(entry.metadata)
+      ev === 'integrity' && cb(entry.integrity)
+      ev === 'size' && cb(entry.size)
+    })
+
+    const src = read.readStream(
+      cache,
+      entry.integrity,
+      { ...opts, size: typeof size !== 'number' ? entry.size : size }
+    )
+
+    if (memoize) {
+      const memoStream = new Collect.PassThrough()
+      memoStream.on('collect', data => memo.put(cache, entry, data, opts))
+      stream.unshift(memoStream)
+    }
+    stream.unshift(src)
+    return stream
+  }).catch((err) => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.stream = getStream
+
+function getStreamDigest (cache, integrity, opts = {}) {
+  const { memoize } = opts
+  const memoized = memo.get.byDigest(cache, integrity, opts)
+  if (memoized && memoize !== false) {
+    const stream = new Minipass()
+    stream.end(memoized)
+    return stream
+  } else {
+    const stream = read.readStream(cache, integrity, opts)
+    if (!memoize) {
+      return stream
+    }
+
+    const memoStream = new Collect.PassThrough()
+    memoStream.on('collect', data => memo.put.byDigest(
+      cache,
+      integrity,
+      data,
+      opts
+    ))
+    return new Pipeline(stream, memoStream)
+  }
+}
+
+module.exports.stream.byDigest = getStreamDigest
+
+function info (cache, key, opts = {}) {
+  const { memoize } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return Promise.resolve(memoized.entry)
+  } else {
+    return index.find(cache, key)
+  }
+}
+module.exports.info = info
+
+async function copy (cache, key, dest, opts = {}) {
+  const entry = await index.find(cache, key, opts)
+  if (!entry) {
+    throw new index.NotFoundError(cache, key)
+  }
+  await read.copy(cache, entry.integrity, dest, opts)
+  return {
+    metadata: entry.metadata,
+    size: entry.size,
+    integrity: entry.integrity,
+  }
+}
+
+module.exports.copy = copy
+
+async function copyByDigest (cache, key, dest, opts = {}) {
+  await read.copy(cache, key, dest, opts)
+  return key
+}
+
+module.exports.copy.byDigest = copyByDigest
+
+module.exports.hasContent = read.hasContent
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/index.js b/node_modules/tuf-js/node_modules/cacache/lib/index.js
new file mode 100644
index 0000000000000..c9b0da5f3a271
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/index.js
@@ -0,0 +1,42 @@
+'use strict'
+
+const get = require('./get.js')
+const put = require('./put.js')
+const rm = require('./rm.js')
+const verify = require('./verify.js')
+const { clearMemoized } = require('./memoization.js')
+const tmp = require('./util/tmp.js')
+const index = require('./entry-index.js')
+
+module.exports.index = {}
+module.exports.index.compact = index.compact
+module.exports.index.insert = index.insert
+
+module.exports.ls = index.ls
+module.exports.ls.stream = index.lsStream
+
+module.exports.get = get
+module.exports.get.byDigest = get.byDigest
+module.exports.get.stream = get.stream
+module.exports.get.stream.byDigest = get.stream.byDigest
+module.exports.get.copy = get.copy
+module.exports.get.copy.byDigest = get.copy.byDigest
+module.exports.get.info = get.info
+module.exports.get.hasContent = get.hasContent
+
+module.exports.put = put
+module.exports.put.stream = put.stream
+
+module.exports.rm = rm.entry
+module.exports.rm.all = rm.all
+module.exports.rm.entry = module.exports.rm
+module.exports.rm.content = rm.content
+
+module.exports.clearMemoized = clearMemoized
+
+module.exports.tmp = {}
+module.exports.tmp.mkdir = tmp.mkdir
+module.exports.tmp.withTmp = tmp.withTmp
+
+module.exports.verify = verify
+module.exports.verify.lastRun = verify.lastRun
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/memoization.js b/node_modules/tuf-js/node_modules/cacache/lib/memoization.js
new file mode 100644
index 0000000000000..0ff604a479c9c
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/memoization.js
@@ -0,0 +1,72 @@
+'use strict'
+
+const LRU = require('lru-cache')
+
+const MEMOIZED = new LRU({
+  max: 500,
+  maxSize: 50 * 1024 * 1024, // 50MB
+  ttl: 3 * 60 * 1000, // 3 minutes
+  sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
+})
+
+module.exports.clearMemoized = clearMemoized
+
+function clearMemoized () {
+  const old = {}
+  MEMOIZED.forEach((v, k) => {
+    old[k] = v
+  })
+  MEMOIZED.clear()
+  return old
+}
+
+module.exports.put = put
+
+function put (cache, entry, data, opts) {
+  pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
+  putDigest(cache, entry.integrity, data, opts)
+}
+
+module.exports.put.byDigest = putDigest
+
+function putDigest (cache, integrity, data, opts) {
+  pickMem(opts).set(`digest:${cache}:${integrity}`, data)
+}
+
+module.exports.get = get
+
+function get (cache, key, opts) {
+  return pickMem(opts).get(`key:${cache}:${key}`)
+}
+
+module.exports.get.byDigest = getDigest
+
+function getDigest (cache, integrity, opts) {
+  return pickMem(opts).get(`digest:${cache}:${integrity}`)
+}
+
+class ObjProxy {
+  constructor (obj) {
+    this.obj = obj
+  }
+
+  get (key) {
+    return this.obj[key]
+  }
+
+  set (key, val) {
+    this.obj[key] = val
+  }
+}
+
+function pickMem (opts) {
+  if (!opts || !opts.memoize) {
+    return MEMOIZED
+  } else if (opts.memoize.get && opts.memoize.set) {
+    return opts.memoize
+  } else if (typeof opts.memoize === 'object') {
+    return new ObjProxy(opts.memoize)
+  } else {
+    return MEMOIZED
+  }
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/put.js b/node_modules/tuf-js/node_modules/cacache/lib/put.js
new file mode 100644
index 0000000000000..9fc932d5f6dec
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/put.js
@@ -0,0 +1,80 @@
+'use strict'
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const write = require('./content/write')
+const Flush = require('minipass-flush')
+const { PassThrough } = require('minipass-collect')
+const Pipeline = require('minipass-pipeline')
+
+const putOpts = (opts) => ({
+  algorithms: ['sha512'],
+  ...opts,
+})
+
+module.exports = putData
+
+async function putData (cache, key, data, opts = {}) {
+  const { memoize } = opts
+  opts = putOpts(opts)
+  const res = await write(cache, data, opts)
+  const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
+  if (memoize) {
+    memo.put(cache, entry, data, opts)
+  }
+
+  return res.integrity
+}
+
+module.exports.stream = putStream
+
+function putStream (cache, key, opts = {}) {
+  const { memoize } = opts
+  opts = putOpts(opts)
+  let integrity
+  let size
+  let error
+
+  let memoData
+  const pipeline = new Pipeline()
+  // first item in the pipeline is the memoizer, because we need
+  // that to end first and get the collected data.
+  if (memoize) {
+    const memoizer = new PassThrough().on('collect', data => {
+      memoData = data
+    })
+    pipeline.push(memoizer)
+  }
+
+  // contentStream is a write-only, not a passthrough
+  // no data comes out of it.
+  const contentStream = write.stream(cache, opts)
+    .on('integrity', (int) => {
+      integrity = int
+    })
+    .on('size', (s) => {
+      size = s
+    })
+    .on('error', (err) => {
+      error = err
+    })
+
+  pipeline.push(contentStream)
+
+  // last but not least, we write the index and emit hash and size,
+  // and memoize if we're doing that
+  pipeline.push(new Flush({
+    async flush () {
+      if (!error) {
+        const entry = await index.insert(cache, key, integrity, { ...opts, size })
+        if (memoize && memoData) {
+          memo.put(cache, entry, memoData, opts)
+        }
+        pipeline.emit('integrity', integrity)
+        pipeline.emit('size', size)
+      }
+    },
+  }))
+
+  return pipeline
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/rm.js b/node_modules/tuf-js/node_modules/cacache/lib/rm.js
new file mode 100644
index 0000000000000..a94760c7cf243
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/rm.js
@@ -0,0 +1,31 @@
+'use strict'
+
+const { rm } = require('fs/promises')
+const glob = require('./util/glob.js')
+const index = require('./entry-index')
+const memo = require('./memoization')
+const path = require('path')
+const rmContent = require('./content/rm')
+
+module.exports = entry
+module.exports.entry = entry
+
+function entry (cache, key, opts) {
+  memo.clearMemoized()
+  return index.delete(cache, key, opts)
+}
+
+module.exports.content = content
+
+function content (cache, integrity) {
+  memo.clearMemoized()
+  return rmContent(cache, integrity)
+}
+
+module.exports.all = all
+
+async function all (cache) {
+  memo.clearMemoized()
+  const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
+  return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js b/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js
new file mode 100644
index 0000000000000..8500c1c16a429
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js
@@ -0,0 +1,7 @@
+'use strict'
+
+const { glob } = require('glob')
+const path = require('path')
+
+const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep)
+module.exports = (path, options) => glob(globify(path), options)
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js
new file mode 100644
index 0000000000000..445599b503808
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js
@@ -0,0 +1,7 @@
+'use strict'
+
+module.exports = hashToSegments
+
+function hashToSegments (hash) {
+  return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js b/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js
new file mode 100644
index 0000000000000..0bf5302136ebe
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js
@@ -0,0 +1,26 @@
+'use strict'
+
+const { withTempDir } = require('@npmcli/fs')
+const fs = require('fs/promises')
+const path = require('path')
+
+module.exports.mkdir = mktmpdir
+
+async function mktmpdir (cache, opts = {}) {
+  const { tmpPrefix } = opts
+  const tmpDir = path.join(cache, 'tmp')
+  await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
+  // do not use path.join(), it drops the trailing / if tmpPrefix is unset
+  const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
+  return fs.mkdtemp(target, { owner: 'inherit' })
+}
+
+module.exports.withTmp = withTmp
+
+function withTmp (cache, opts, cb) {
+  if (!cb) {
+    cb = opts
+    opts = {}
+  }
+  return withTempDir(path.join(cache, 'tmp'), cb, opts)
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/verify.js b/node_modules/tuf-js/node_modules/cacache/lib/verify.js
new file mode 100644
index 0000000000000..62e85c946490f
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/verify.js
@@ -0,0 +1,257 @@
+'use strict'
+
+const {
+  mkdir,
+  readFile,
+  rm,
+  stat,
+  truncate,
+  writeFile,
+} = require('fs/promises')
+const pMap = require('p-map')
+const contentPath = require('./content/path')
+const fsm = require('fs-minipass')
+const glob = require('./util/glob.js')
+const index = require('./entry-index')
+const path = require('path')
+const ssri = require('ssri')
+
+const hasOwnProperty = (obj, key) =>
+  Object.prototype.hasOwnProperty.call(obj, key)
+
+const verifyOpts = (opts) => ({
+  concurrency: 20,
+  log: { silly () {} },
+  ...opts,
+})
+
+module.exports = verify
+
+async function verify (cache, opts) {
+  opts = verifyOpts(opts)
+  opts.log.silly('verify', 'verifying cache at', cache)
+
+  const steps = [
+    markStartTime,
+    fixPerms,
+    garbageCollect,
+    rebuildIndex,
+    cleanTmp,
+    writeVerifile,
+    markEndTime,
+  ]
+
+  const stats = {}
+  for (const step of steps) {
+    const label = step.name
+    const start = new Date()
+    const s = await step(cache, opts)
+    if (s) {
+      Object.keys(s).forEach((k) => {
+        stats[k] = s[k]
+      })
+    }
+    const end = new Date()
+    if (!stats.runTime) {
+      stats.runTime = {}
+    }
+    stats.runTime[label] = end - start
+  }
+  stats.runTime.total = stats.endTime - stats.startTime
+  opts.log.silly(
+    'verify',
+    'verification finished for',
+    cache,
+    'in',
+    `${stats.runTime.total}ms`
+  )
+  return stats
+}
+
+async function markStartTime (cache, opts) {
+  return { startTime: new Date() }
+}
+
+async function markEndTime (cache, opts) {
+  return { endTime: new Date() }
+}
+
+async function fixPerms (cache, opts) {
+  opts.log.silly('verify', 'fixing cache permissions')
+  await mkdir(cache, { recursive: true })
+  return null
+}
+
+// Implements a naive mark-and-sweep tracing garbage collector.
+//
+// The algorithm is basically as follows:
+// 1. Read (and filter) all index entries ("pointers")
+// 2. Mark each integrity value as "live"
+// 3. Read entire filesystem tree in `content-vX/` dir
+// 4. If content is live, verify its checksum and delete it if it fails
+// 5. If content is not marked as live, rm it.
+//
+async function garbageCollect (cache, opts) {
+  opts.log.silly('verify', 'garbage collecting content')
+  const indexStream = index.lsStream(cache)
+  const liveContent = new Set()
+  indexStream.on('data', (entry) => {
+    if (opts.filter && !opts.filter(entry)) {
+      return
+    }
+
+    // integrity is stringified, re-parse it so we can get each hash
+    const integrity = ssri.parse(entry.integrity)
+    for (const algo in integrity) {
+      liveContent.add(integrity[algo].toString())
+    }
+  })
+  await new Promise((resolve, reject) => {
+    indexStream.on('end', resolve).on('error', reject)
+  })
+  const contentDir = contentPath.contentDir(cache)
+  const files = await glob(path.join(contentDir, '**'), {
+    follow: false,
+    nodir: true,
+    nosort: true,
+  })
+  const stats = {
+    verifiedContent: 0,
+    reclaimedCount: 0,
+    reclaimedSize: 0,
+    badContentCount: 0,
+    keptSize: 0,
+  }
+  await pMap(
+    files,
+    async (f) => {
+      const split = f.split(/[/\\]/)
+      const digest = split.slice(split.length - 3).join('')
+      const algo = split[split.length - 4]
+      const integrity = ssri.fromHex(digest, algo)
+      if (liveContent.has(integrity.toString())) {
+        const info = await verifyContent(f, integrity)
+        if (!info.valid) {
+          stats.reclaimedCount++
+          stats.badContentCount++
+          stats.reclaimedSize += info.size
+        } else {
+          stats.verifiedContent++
+          stats.keptSize += info.size
+        }
+      } else {
+        // No entries refer to this content. We can delete.
+        stats.reclaimedCount++
+        const s = await stat(f)
+        await rm(f, { recursive: true, force: true })
+        stats.reclaimedSize += s.size
+      }
+      return stats
+    },
+    { concurrency: opts.concurrency }
+  )
+  return stats
+}
+
+async function verifyContent (filepath, sri) {
+  const contentInfo = {}
+  try {
+    const { size } = await stat(filepath)
+    contentInfo.size = size
+    contentInfo.valid = true
+    await ssri.checkStream(new fsm.ReadStream(filepath), sri)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return { size: 0, valid: false }
+    }
+    if (err.code !== 'EINTEGRITY') {
+      throw err
+    }
+
+    await rm(filepath, { recursive: true, force: true })
+    contentInfo.valid = false
+  }
+  return contentInfo
+}
+
+async function rebuildIndex (cache, opts) {
+  opts.log.silly('verify', 'rebuilding index')
+  const entries = await index.ls(cache)
+  const stats = {
+    missingContent: 0,
+    rejectedEntries: 0,
+    totalEntries: 0,
+  }
+  const buckets = {}
+  for (const k in entries) {
+    /* istanbul ignore else */
+    if (hasOwnProperty(entries, k)) {
+      const hashed = index.hashKey(k)
+      const entry = entries[k]
+      const excluded = opts.filter && !opts.filter(entry)
+      excluded && stats.rejectedEntries++
+      if (buckets[hashed] && !excluded) {
+        buckets[hashed].push(entry)
+      } else if (buckets[hashed] && excluded) {
+        // skip
+      } else if (excluded) {
+        buckets[hashed] = []
+        buckets[hashed]._path = index.bucketPath(cache, k)
+      } else {
+        buckets[hashed] = [entry]
+        buckets[hashed]._path = index.bucketPath(cache, k)
+      }
+    }
+  }
+  await pMap(
+    Object.keys(buckets),
+    (key) => {
+      return rebuildBucket(cache, buckets[key], stats, opts)
+    },
+    { concurrency: opts.concurrency }
+  )
+  return stats
+}
+
+async function rebuildBucket (cache, bucket, stats, opts) {
+  await truncate(bucket._path)
+  // This needs to be serialized because cacache explicitly
+  // lets very racy bucket conflicts clobber each other.
+  for (const entry of bucket) {
+    const content = contentPath(cache, entry.integrity)
+    try {
+      await stat(content)
+      await index.insert(cache, entry.key, entry.integrity, {
+        metadata: entry.metadata,
+        size: entry.size,
+        time: entry.time,
+      })
+      stats.totalEntries++
+    } catch (err) {
+      if (err.code === 'ENOENT') {
+        stats.rejectedEntries++
+        stats.missingContent++
+      } else {
+        throw err
+      }
+    }
+  }
+}
+
+function cleanTmp (cache, opts) {
+  opts.log.silly('verify', 'cleaning tmp directory')
+  return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
+}
+
+async function writeVerifile (cache, opts) {
+  const verifile = path.join(cache, '_lastverified')
+  opts.log.silly('verify', 'writing verifile to ' + verifile)
+  return writeFile(verifile, `${Date.now()}`)
+}
+
+module.exports.lastRun = lastRun
+
+async function lastRun (cache) {
+  const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
+  return new Date(+data)
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/LICENSE b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/LICENSE
new file mode 100644
index 0000000000000..97f8e32ed82e4
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/index.js b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
new file mode 100644
index 0000000000000..b6cdae8eb514b
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
@@ -0,0 +1,1028 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0;
+const proc = typeof process === 'object' && process
+    ? process
+    : {
+        stdout: null,
+        stderr: null,
+    };
+const events_1 = require("events");
+const stream_1 = __importDefault(require("stream"));
+const string_decoder_1 = require("string_decoder");
+/**
+ * Return true if the argument is a Minipass stream, Node stream, or something
+ * else that Minipass can interact with.
+ */
+const isStream = (s) => !!s &&
+    typeof s === 'object' &&
+    (s instanceof Minipass ||
+        s instanceof stream_1.default ||
+        (0, exports.isReadable)(s) ||
+        (0, exports.isWritable)(s));
+exports.isStream = isStream;
+/**
+ * Return true if the argument is a valid {@link Minipass.Readable}
+ */
+const isReadable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof events_1.EventEmitter &&
+    typeof s.pipe === 'function' &&
+    // node core Writable streams have a pipe() method, but it throws
+    s.pipe !== stream_1.default.Writable.prototype.pipe;
+exports.isReadable = isReadable;
+/**
+ * Return true if the argument is a valid {@link Minipass.Writable}
+ */
+const isWritable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof events_1.EventEmitter &&
+    typeof s.write === 'function' &&
+    typeof s.end === 'function';
+exports.isWritable = isWritable;
+const EOF = Symbol('EOF');
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
+const EMITTED_END = Symbol('emittedEnd');
+const EMITTING_END = Symbol('emittingEnd');
+const EMITTED_ERROR = Symbol('emittedError');
+const CLOSED = Symbol('closed');
+const READ = Symbol('read');
+const FLUSH = Symbol('flush');
+const FLUSHCHUNK = Symbol('flushChunk');
+const ENCODING = Symbol('encoding');
+const DECODER = Symbol('decoder');
+const FLOWING = Symbol('flowing');
+const PAUSED = Symbol('paused');
+const RESUME = Symbol('resume');
+const BUFFER = Symbol('buffer');
+const PIPES = Symbol('pipes');
+const BUFFERLENGTH = Symbol('bufferLength');
+const BUFFERPUSH = Symbol('bufferPush');
+const BUFFERSHIFT = Symbol('bufferShift');
+const OBJECTMODE = Symbol('objectMode');
+// internal event when stream is destroyed
+const DESTROYED = Symbol('destroyed');
+// internal event when stream has an error
+const ERROR = Symbol('error');
+const EMITDATA = Symbol('emitData');
+const EMITEND = Symbol('emitEnd');
+const EMITEND2 = Symbol('emitEnd2');
+const ASYNC = Symbol('async');
+const ABORT = Symbol('abort');
+const ABORTED = Symbol('aborted');
+const SIGNAL = Symbol('signal');
+const DATALISTENERS = Symbol('dataListeners');
+const DISCARDED = Symbol('discarded');
+const defer = (fn) => Promise.resolve().then(fn);
+const nodefer = (fn) => fn();
+const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
+const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
+    (!!b &&
+        typeof b === 'object' &&
+        b.constructor &&
+        b.constructor.name === 'ArrayBuffer' &&
+        b.byteLength >= 0);
+const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
+/**
+ * Internal class representing a pipe to a destination stream.
+ *
+ * @internal
+ */
+class Pipe {
+    src;
+    dest;
+    opts;
+    ondrain;
+    constructor(src, dest, opts) {
+        this.src = src;
+        this.dest = dest;
+        this.opts = opts;
+        this.ondrain = () => src[RESUME]();
+        this.dest.on('drain', this.ondrain);
+    }
+    unpipe() {
+        this.dest.removeListener('drain', this.ondrain);
+    }
+    // only here for the prototype
+    /* c8 ignore start */
+    proxyErrors(_er) { }
+    /* c8 ignore stop */
+    end() {
+        this.unpipe();
+        if (this.opts.end)
+            this.dest.end();
+    }
+}
+/**
+ * Internal class representing a pipe to a destination stream where
+ * errors are proxied.
+ *
+ * @internal
+ */
+class PipeProxyErrors extends Pipe {
+    unpipe() {
+        this.src.removeListener('error', this.proxyErrors);
+        super.unpipe();
+    }
+    constructor(src, dest, opts) {
+        super(src, dest, opts);
+        this.proxyErrors = er => dest.emit('error', er);
+        src.on('error', this.proxyErrors);
+    }
+}
+const isObjectModeOptions = (o) => !!o.objectMode;
+const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
+/**
+ * Main export, the Minipass class
+ *
+ * `RType` is the type of data emitted, defaults to Buffer
+ *
+ * `WType` is the type of data to be written, if RType is buffer or string,
+ * then any {@link Minipass.ContiguousData} is allowed.
+ *
+ * `Events` is the set of event handler signatures that this object
+ * will emit, see {@link Minipass.Events}
+ */
+class Minipass extends events_1.EventEmitter {
+    [FLOWING] = false;
+    [PAUSED] = false;
+    [PIPES] = [];
+    [BUFFER] = [];
+    [OBJECTMODE];
+    [ENCODING];
+    [ASYNC];
+    [DECODER];
+    [EOF] = false;
+    [EMITTED_END] = false;
+    [EMITTING_END] = false;
+    [CLOSED] = false;
+    [EMITTED_ERROR] = null;
+    [BUFFERLENGTH] = 0;
+    [DESTROYED] = false;
+    [SIGNAL];
+    [ABORTED] = false;
+    [DATALISTENERS] = 0;
+    [DISCARDED] = false;
+    /**
+     * true if the stream can be written
+     */
+    writable = true;
+    /**
+     * true if the stream can be read
+     */
+    readable = true;
+    /**
+     * If `RType` is Buffer, then options do not need to be provided.
+     * Otherwise, an options object must be provided to specify either
+     * {@link Minipass.SharedOptions.objectMode} or
+     * {@link Minipass.SharedOptions.encoding}, as appropriate.
+     */
+    constructor(...args) {
+        const options = (args[0] ||
+            {});
+        super();
+        if (options.objectMode && typeof options.encoding === 'string') {
+            throw new TypeError('Encoding and objectMode may not be used together');
+        }
+        if (isObjectModeOptions(options)) {
+            this[OBJECTMODE] = true;
+            this[ENCODING] = null;
+        }
+        else if (isEncodingOptions(options)) {
+            this[ENCODING] = options.encoding;
+            this[OBJECTMODE] = false;
+        }
+        else {
+            this[OBJECTMODE] = false;
+            this[ENCODING] = null;
+        }
+        this[ASYNC] = !!options.async;
+        this[DECODER] = this[ENCODING]
+            ? new string_decoder_1.StringDecoder(this[ENCODING])
+            : null;
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposeBuffer === true) {
+            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
+        }
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposePipes === true) {
+            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
+        }
+        const { signal } = options;
+        if (signal) {
+            this[SIGNAL] = signal;
+            if (signal.aborted) {
+                this[ABORT]();
+            }
+            else {
+                signal.addEventListener('abort', () => this[ABORT]());
+            }
+        }
+    }
+    /**
+     * The amount of data stored in the buffer waiting to be read.
+     *
+     * For Buffer strings, this will be the total byte length.
+     * For string encoding streams, this will be the string character length,
+     * according to JavaScript's `string.length` logic.
+     * For objectMode streams, this is a count of the items waiting to be
+     * emitted.
+     */
+    get bufferLength() {
+        return this[BUFFERLENGTH];
+    }
+    /**
+     * The `BufferEncoding` currently in use, or `null`
+     */
+    get encoding() {
+        return this[ENCODING];
+    }
+    /**
+     * @deprecated - This is a read only property
+     */
+    set encoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * @deprecated - Encoding may only be set at instantiation time
+     */
+    setEncoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * True if this is an objectMode stream
+     */
+    get objectMode() {
+        return this[OBJECTMODE];
+    }
+    /**
+     * @deprecated - This is a read-only property
+     */
+    set objectMode(_om) {
+        throw new Error('objectMode must be set at instantiation time');
+    }
+    /**
+     * true if this is an async stream
+     */
+    get ['async']() {
+        return this[ASYNC];
+    }
+    /**
+     * Set to true to make this stream async.
+     *
+     * Once set, it cannot be unset, as this would potentially cause incorrect
+     * behavior.  Ie, a sync stream can be made async, but an async stream
+     * cannot be safely made sync.
+     */
+    set ['async'](a) {
+        this[ASYNC] = this[ASYNC] || !!a;
+    }
+    // drop everything and get out of the flow completely
+    [ABORT]() {
+        this[ABORTED] = true;
+        this.emit('abort', this[SIGNAL]?.reason);
+        this.destroy(this[SIGNAL]?.reason);
+    }
+    /**
+     * True if the stream has been aborted.
+     */
+    get aborted() {
+        return this[ABORTED];
+    }
+    /**
+     * No-op setter. Stream aborted status is set via the AbortSignal provided
+     * in the constructor options.
+     */
+    set aborted(_) { }
+    write(chunk, encoding, cb) {
+        if (this[ABORTED])
+            return false;
+        if (this[EOF])
+            throw new Error('write after end');
+        if (this[DESTROYED]) {
+            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
+            return true;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (!encoding)
+            encoding = 'utf8';
+        const fn = this[ASYNC] ? defer : nodefer;
+        // convert array buffers and typed array views into buffers
+        // at some point in the future, we may want to do the opposite!
+        // leave strings and buffers as-is
+        // anything is only allowed if in object mode, so throw
+        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+            if (isArrayBufferView(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
+            }
+            else if (isArrayBufferLike(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk);
+            }
+            else if (typeof chunk !== 'string') {
+                throw new Error('Non-contiguous data written to non-objectMode stream');
+            }
+        }
+        // handle object mode up front, since it's simpler
+        // this yields better performance, fewer checks later.
+        if (this[OBJECTMODE]) {
+            // maybe impossible?
+            /* c8 ignore start */
+            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+                this[FLUSH](true);
+            /* c8 ignore stop */
+            if (this[FLOWING])
+                this.emit('data', chunk);
+            else
+                this[BUFFERPUSH](chunk);
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // at this point the chunk is a buffer or string
+        // don't buffer it up or send it to the decoder
+        if (!chunk.length) {
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // fast-path writing strings of same encoding to a stream with
+        // an empty buffer, skipping the buffer/decoder dance
+        if (typeof chunk === 'string' &&
+            // unless it is a string already ready for us to use
+            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = Buffer.from(chunk, encoding);
+        }
+        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = this[DECODER].write(chunk);
+        }
+        // Note: flushing CAN potentially switch us into not-flowing mode
+        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+            this[FLUSH](true);
+        if (this[FLOWING])
+            this.emit('data', chunk);
+        else
+            this[BUFFERPUSH](chunk);
+        if (this[BUFFERLENGTH] !== 0)
+            this.emit('readable');
+        if (cb)
+            fn(cb);
+        return this[FLOWING];
+    }
+    /**
+     * Low-level explicit read method.
+     *
+     * In objectMode, the argument is ignored, and one item is returned if
+     * available.
+     *
+     * `n` is the number of bytes (or in the case of encoding streams,
+     * characters) to consume. If `n` is not provided, then the entire buffer
+     * is returned, or `null` is returned if no data is available.
+     *
+     * If `n` is greater that the amount of data in the internal buffer,
+     * then `null` is returned.
+     */
+    read(n) {
+        if (this[DESTROYED])
+            return null;
+        this[DISCARDED] = false;
+        if (this[BUFFERLENGTH] === 0 ||
+            n === 0 ||
+            (n && n > this[BUFFERLENGTH])) {
+            this[MAYBE_EMIT_END]();
+            return null;
+        }
+        if (this[OBJECTMODE])
+            n = null;
+        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
+            // not object mode, so if we have an encoding, then RType is string
+            // otherwise, must be Buffer
+            this[BUFFER] = [
+                (this[ENCODING]
+                    ? this[BUFFER].join('')
+                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
+            ];
+        }
+        const ret = this[READ](n || null, this[BUFFER][0]);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [READ](n, chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERSHIFT]();
+        else {
+            const c = chunk;
+            if (n === c.length || n === null)
+                this[BUFFERSHIFT]();
+            else if (typeof c === 'string') {
+                this[BUFFER][0] = c.slice(n);
+                chunk = c.slice(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+            else {
+                this[BUFFER][0] = c.subarray(n);
+                chunk = c.subarray(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+        }
+        this.emit('data', chunk);
+        if (!this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+        return chunk;
+    }
+    end(chunk, encoding, cb) {
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (chunk !== undefined)
+            this.write(chunk, encoding);
+        if (cb)
+            this.once('end', cb);
+        this[EOF] = true;
+        this.writable = false;
+        // if we haven't written anything, then go ahead and emit,
+        // even if we're not reading.
+        // we'll re-emit if a new 'end' listener is added anyway.
+        // This makes MP more suitable to write-only use cases.
+        if (this[FLOWING] || !this[PAUSED])
+            this[MAYBE_EMIT_END]();
+        return this;
+    }
+    // don't let the internal resume be overwritten
+    [RESUME]() {
+        if (this[DESTROYED])
+            return;
+        if (!this[DATALISTENERS] && !this[PIPES].length) {
+            this[DISCARDED] = true;
+        }
+        this[PAUSED] = false;
+        this[FLOWING] = true;
+        this.emit('resume');
+        if (this[BUFFER].length)
+            this[FLUSH]();
+        else if (this[EOF])
+            this[MAYBE_EMIT_END]();
+        else
+            this.emit('drain');
+    }
+    /**
+     * Resume the stream if it is currently in a paused state
+     *
+     * If called when there are no pipe destinations or `data` event listeners,
+     * this will place the stream in a "discarded" state, where all data will
+     * be thrown away. The discarded state is removed if a pipe destination or
+     * data handler is added, if pause() is called, or if any synchronous or
+     * asynchronous iteration is started.
+     */
+    resume() {
+        return this[RESUME]();
+    }
+    /**
+     * Pause the stream
+     */
+    pause() {
+        this[FLOWING] = false;
+        this[PAUSED] = true;
+        this[DISCARDED] = false;
+    }
+    /**
+     * true if the stream has been forcibly destroyed
+     */
+    get destroyed() {
+        return this[DESTROYED];
+    }
+    /**
+     * true if the stream is currently in a flowing state, meaning that
+     * any writes will be immediately emitted.
+     */
+    get flowing() {
+        return this[FLOWING];
+    }
+    /**
+     * true if the stream is currently in a paused state
+     */
+    get paused() {
+        return this[PAUSED];
+    }
+    [BUFFERPUSH](chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] += 1;
+        else
+            this[BUFFERLENGTH] += chunk.length;
+        this[BUFFER].push(chunk);
+    }
+    [BUFFERSHIFT]() {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] -= 1;
+        else
+            this[BUFFERLENGTH] -= this[BUFFER][0].length;
+        return this[BUFFER].shift();
+    }
+    [FLUSH](noDrain = false) {
+        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
+            this[BUFFER].length);
+        if (!noDrain && !this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+    }
+    [FLUSHCHUNK](chunk) {
+        this.emit('data', chunk);
+        return this[FLOWING];
+    }
+    /**
+     * Pipe all data emitted by this stream into the destination provided.
+     *
+     * Triggers the flow of data.
+     */
+    pipe(dest, opts) {
+        if (this[DESTROYED])
+            return dest;
+        this[DISCARDED] = false;
+        const ended = this[EMITTED_END];
+        opts = opts || {};
+        if (dest === proc.stdout || dest === proc.stderr)
+            opts.end = false;
+        else
+            opts.end = opts.end !== false;
+        opts.proxyErrors = !!opts.proxyErrors;
+        // piping an ended stream ends immediately
+        if (ended) {
+            if (opts.end)
+                dest.end();
+        }
+        else {
+            // "as" here just ignores the WType, which pipes don't care about,
+            // since they're only consuming from us, and writing to the dest
+            this[PIPES].push(!opts.proxyErrors
+                ? new Pipe(this, dest, opts)
+                : new PipeProxyErrors(this, dest, opts));
+            if (this[ASYNC])
+                defer(() => this[RESUME]());
+            else
+                this[RESUME]();
+        }
+        return dest;
+    }
+    /**
+     * Fully unhook a piped destination stream.
+     *
+     * If the destination stream was the only consumer of this stream (ie,
+     * there are no other piped destinations or `'data'` event listeners)
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    unpipe(dest) {
+        const p = this[PIPES].find(p => p.dest === dest);
+        if (p) {
+            if (this[PIPES].length === 1) {
+                if (this[FLOWING] && this[DATALISTENERS] === 0) {
+                    this[FLOWING] = false;
+                }
+                this[PIPES] = [];
+            }
+            else
+                this[PIPES].splice(this[PIPES].indexOf(p), 1);
+            p.unpipe();
+        }
+    }
+    /**
+     * Alias for {@link Minipass#on}
+     */
+    addListener(ev, handler) {
+        return this.on(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.on`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * - Adding a 'data' event handler will trigger the flow of data
+     *
+     * - Adding a 'readable' event handler when there is data waiting to be read
+     *   will cause 'readable' to be emitted immediately.
+     *
+     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
+     *   already passed will cause the event to be emitted immediately and all
+     *   handlers removed.
+     *
+     * - Adding an 'error' event handler after an error has been emitted will
+     *   cause the event to be re-emitted immediately with the error previously
+     *   raised.
+     */
+    on(ev, handler) {
+        const ret = super.on(ev, handler);
+        if (ev === 'data') {
+            this[DISCARDED] = false;
+            this[DATALISTENERS]++;
+            if (!this[PIPES].length && !this[FLOWING]) {
+                this[RESUME]();
+            }
+        }
+        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
+            super.emit('readable');
+        }
+        else if (isEndish(ev) && this[EMITTED_END]) {
+            super.emit(ev);
+            this.removeAllListeners(ev);
+        }
+        else if (ev === 'error' && this[EMITTED_ERROR]) {
+            const h = handler;
+            if (this[ASYNC])
+                defer(() => h.call(this, this[EMITTED_ERROR]));
+            else
+                h.call(this, this[EMITTED_ERROR]);
+        }
+        return ret;
+    }
+    /**
+     * Alias for {@link Minipass#off}
+     */
+    removeListener(ev, handler) {
+        return this.off(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.off`
+     *
+     * If a 'data' event handler is removed, and it was the last consumer
+     * (ie, there are no pipe destinations or other 'data' event listeners),
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    off(ev, handler) {
+        const ret = super.off(ev, handler);
+        // if we previously had listeners, and now we don't, and we don't
+        // have any pipes, then stop the flow, unless it's been explicitly
+        // put in a discarded flowing state via stream.resume().
+        if (ev === 'data') {
+            this[DATALISTENERS] = this.listeners('data').length;
+            if (this[DATALISTENERS] === 0 &&
+                !this[DISCARDED] &&
+                !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * Mostly identical to `EventEmitter.removeAllListeners`
+     *
+     * If all 'data' event handlers are removed, and they were the last consumer
+     * (ie, there are no pipe destinations), then the flow of data will stop
+     * until there is another consumer or {@link Minipass#resume} is explicitly
+     * called.
+     */
+    removeAllListeners(ev) {
+        const ret = super.removeAllListeners(ev);
+        if (ev === 'data' || ev === undefined) {
+            this[DATALISTENERS] = 0;
+            if (!this[DISCARDED] && !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * true if the 'end' event has been emitted
+     */
+    get emittedEnd() {
+        return this[EMITTED_END];
+    }
+    [MAYBE_EMIT_END]() {
+        if (!this[EMITTING_END] &&
+            !this[EMITTED_END] &&
+            !this[DESTROYED] &&
+            this[BUFFER].length === 0 &&
+            this[EOF]) {
+            this[EMITTING_END] = true;
+            this.emit('end');
+            this.emit('prefinish');
+            this.emit('finish');
+            if (this[CLOSED])
+                this.emit('close');
+            this[EMITTING_END] = false;
+        }
+    }
+    /**
+     * Mostly identical to `EventEmitter.emit`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * If the stream has been destroyed, and the event is something other
+     * than 'close' or 'error', then `false` is returned and no handlers
+     * are called.
+     *
+     * If the event is 'end', and has already been emitted, then the event
+     * is ignored. If the stream is in a paused or non-flowing state, then
+     * the event will be deferred until data flow resumes. If the stream is
+     * async, then handlers will be called on the next tick rather than
+     * immediately.
+     *
+     * If the event is 'close', and 'end' has not yet been emitted, then
+     * the event will be deferred until after 'end' is emitted.
+     *
+     * If the event is 'error', and an AbortSignal was provided for the stream,
+     * and there are no listeners, then the event is ignored, matching the
+     * behavior of node core streams in the presense of an AbortSignal.
+     *
+     * If the event is 'finish' or 'prefinish', then all listeners will be
+     * removed after emitting the event, to prevent double-firing.
+     */
+    emit(ev, ...args) {
+        const data = args[0];
+        // error and close are only events allowed after calling destroy()
+        if (ev !== 'error' &&
+            ev !== 'close' &&
+            ev !== DESTROYED &&
+            this[DESTROYED]) {
+            return false;
+        }
+        else if (ev === 'data') {
+            return !this[OBJECTMODE] && !data
+                ? false
+                : this[ASYNC]
+                    ? (defer(() => this[EMITDATA](data)), true)
+                    : this[EMITDATA](data);
+        }
+        else if (ev === 'end') {
+            return this[EMITEND]();
+        }
+        else if (ev === 'close') {
+            this[CLOSED] = true;
+            // don't emit close before 'end' and 'finish'
+            if (!this[EMITTED_END] && !this[DESTROYED])
+                return false;
+            const ret = super.emit('close');
+            this.removeAllListeners('close');
+            return ret;
+        }
+        else if (ev === 'error') {
+            this[EMITTED_ERROR] = data;
+            super.emit(ERROR, data);
+            const ret = !this[SIGNAL] || this.listeners('error').length
+                ? super.emit('error', data)
+                : false;
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'resume') {
+            const ret = super.emit('resume');
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'finish' || ev === 'prefinish') {
+            const ret = super.emit(ev);
+            this.removeAllListeners(ev);
+            return ret;
+        }
+        // Some other unknown event
+        const ret = super.emit(ev, ...args);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITDATA](data) {
+        for (const p of this[PIPES]) {
+            if (p.dest.write(data) === false)
+                this.pause();
+        }
+        const ret = this[DISCARDED] ? false : super.emit('data', data);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITEND]() {
+        if (this[EMITTED_END])
+            return false;
+        this[EMITTED_END] = true;
+        this.readable = false;
+        return this[ASYNC]
+            ? (defer(() => this[EMITEND2]()), true)
+            : this[EMITEND2]();
+    }
+    [EMITEND2]() {
+        if (this[DECODER]) {
+            const data = this[DECODER].end();
+            if (data) {
+                for (const p of this[PIPES]) {
+                    p.dest.write(data);
+                }
+                if (!this[DISCARDED])
+                    super.emit('data', data);
+            }
+        }
+        for (const p of this[PIPES]) {
+            p.end();
+        }
+        const ret = super.emit('end');
+        this.removeAllListeners('end');
+        return ret;
+    }
+    /**
+     * Return a Promise that resolves to an array of all emitted data once
+     * the stream ends.
+     */
+    async collect() {
+        const buf = Object.assign([], {
+            dataLength: 0,
+        });
+        if (!this[OBJECTMODE])
+            buf.dataLength = 0;
+        // set the promise first, in case an error is raised
+        // by triggering the flow here.
+        const p = this.promise();
+        this.on('data', c => {
+            buf.push(c);
+            if (!this[OBJECTMODE])
+                buf.dataLength += c.length;
+        });
+        await p;
+        return buf;
+    }
+    /**
+     * Return a Promise that resolves to the concatenation of all emitted data
+     * once the stream ends.
+     *
+     * Not allowed on objectMode streams.
+     */
+    async concat() {
+        if (this[OBJECTMODE]) {
+            throw new Error('cannot concat in objectMode');
+        }
+        const buf = await this.collect();
+        return (this[ENCODING]
+            ? buf.join('')
+            : Buffer.concat(buf, buf.dataLength));
+    }
+    /**
+     * Return a void Promise that resolves once the stream ends.
+     */
+    async promise() {
+        return new Promise((resolve, reject) => {
+            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
+            this.on('error', er => reject(er));
+            this.on('end', () => resolve());
+        });
+    }
+    /**
+     * Asynchronous `for await of` iteration.
+     *
+     * This will continue emitting all chunks until the stream terminates.
+     */
+    [Symbol.asyncIterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = async () => {
+            this.pause();
+            stopped = true;
+            return { value: undefined, done: true };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const res = this.read();
+            if (res !== null)
+                return Promise.resolve({ done: false, value: res });
+            if (this[EOF])
+                return stop();
+            let resolve;
+            let reject;
+            const onerr = (er) => {
+                this.off('data', ondata);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                reject(er);
+            };
+            const ondata = (value) => {
+                this.off('error', onerr);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                this.pause();
+                resolve({ value, done: !!this[EOF] });
+            };
+            const onend = () => {
+                this.off('error', onerr);
+                this.off('data', ondata);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                resolve({ done: true, value: undefined });
+            };
+            const ondestroy = () => onerr(new Error('stream destroyed'));
+            return new Promise((res, rej) => {
+                reject = rej;
+                resolve = res;
+                this.once(DESTROYED, ondestroy);
+                this.once('error', onerr);
+                this.once('end', onend);
+                this.once('data', ondata);
+            });
+        };
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Synchronous `for of` iteration.
+     *
+     * The iteration will terminate when the internal buffer runs out, even
+     * if the stream has not yet terminated.
+     */
+    [Symbol.iterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = () => {
+            this.pause();
+            this.off(ERROR, stop);
+            this.off(DESTROYED, stop);
+            this.off('end', stop);
+            stopped = true;
+            return { done: true, value: undefined };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const value = this.read();
+            return value === null ? stop() : { done: false, value };
+        };
+        this.once('end', stop);
+        this.once(ERROR, stop);
+        this.once(DESTROYED, stop);
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.iterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Destroy a stream, preventing it from being used for any further purpose.
+     *
+     * If the stream has a `close()` method, then it will be called on
+     * destruction.
+     *
+     * After destruction, any attempt to write data, read data, or emit most
+     * events will be ignored.
+     *
+     * If an error argument is provided, then it will be emitted in an
+     * 'error' event.
+     */
+    destroy(er) {
+        if (this[DESTROYED]) {
+            if (er)
+                this.emit('error', er);
+            else
+                this.emit(DESTROYED);
+            return this;
+        }
+        this[DESTROYED] = true;
+        this[DISCARDED] = true;
+        // throw away all buffered data, it's never coming out
+        this[BUFFER].length = 0;
+        this[BUFFERLENGTH] = 0;
+        const wc = this;
+        if (typeof wc.close === 'function' && !this[CLOSED])
+            wc.close();
+        if (er)
+            this.emit('error', er);
+        // if no error to emit, still reject pending promises
+        else
+            this.emit(DESTROYED);
+        return this;
+    }
+    /**
+     * Alias for {@link isStream}
+     *
+     * Former export location, maintained for backwards compatibility.
+     *
+     * @deprecated
+     */
+    static get isStream() {
+        return exports.isStream;
+    }
+}
+exports.Minipass = Minipass;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/package.json b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/index.js b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
new file mode 100644
index 0000000000000..b65fafbae43a4
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
@@ -0,0 +1,1018 @@
+const proc = typeof process === 'object' && process
+    ? process
+    : {
+        stdout: null,
+        stderr: null,
+    };
+import { EventEmitter } from 'events';
+import Stream from 'stream';
+import { StringDecoder } from 'string_decoder';
+/**
+ * Return true if the argument is a Minipass stream, Node stream, or something
+ * else that Minipass can interact with.
+ */
+export const isStream = (s) => !!s &&
+    typeof s === 'object' &&
+    (s instanceof Minipass ||
+        s instanceof Stream ||
+        isReadable(s) ||
+        isWritable(s));
+/**
+ * Return true if the argument is a valid {@link Minipass.Readable}
+ */
+export const isReadable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof EventEmitter &&
+    typeof s.pipe === 'function' &&
+    // node core Writable streams have a pipe() method, but it throws
+    s.pipe !== Stream.Writable.prototype.pipe;
+/**
+ * Return true if the argument is a valid {@link Minipass.Writable}
+ */
+export const isWritable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof EventEmitter &&
+    typeof s.write === 'function' &&
+    typeof s.end === 'function';
+const EOF = Symbol('EOF');
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
+const EMITTED_END = Symbol('emittedEnd');
+const EMITTING_END = Symbol('emittingEnd');
+const EMITTED_ERROR = Symbol('emittedError');
+const CLOSED = Symbol('closed');
+const READ = Symbol('read');
+const FLUSH = Symbol('flush');
+const FLUSHCHUNK = Symbol('flushChunk');
+const ENCODING = Symbol('encoding');
+const DECODER = Symbol('decoder');
+const FLOWING = Symbol('flowing');
+const PAUSED = Symbol('paused');
+const RESUME = Symbol('resume');
+const BUFFER = Symbol('buffer');
+const PIPES = Symbol('pipes');
+const BUFFERLENGTH = Symbol('bufferLength');
+const BUFFERPUSH = Symbol('bufferPush');
+const BUFFERSHIFT = Symbol('bufferShift');
+const OBJECTMODE = Symbol('objectMode');
+// internal event when stream is destroyed
+const DESTROYED = Symbol('destroyed');
+// internal event when stream has an error
+const ERROR = Symbol('error');
+const EMITDATA = Symbol('emitData');
+const EMITEND = Symbol('emitEnd');
+const EMITEND2 = Symbol('emitEnd2');
+const ASYNC = Symbol('async');
+const ABORT = Symbol('abort');
+const ABORTED = Symbol('aborted');
+const SIGNAL = Symbol('signal');
+const DATALISTENERS = Symbol('dataListeners');
+const DISCARDED = Symbol('discarded');
+const defer = (fn) => Promise.resolve().then(fn);
+const nodefer = (fn) => fn();
+const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
+const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
+    (!!b &&
+        typeof b === 'object' &&
+        b.constructor &&
+        b.constructor.name === 'ArrayBuffer' &&
+        b.byteLength >= 0);
+const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
+/**
+ * Internal class representing a pipe to a destination stream.
+ *
+ * @internal
+ */
+class Pipe {
+    src;
+    dest;
+    opts;
+    ondrain;
+    constructor(src, dest, opts) {
+        this.src = src;
+        this.dest = dest;
+        this.opts = opts;
+        this.ondrain = () => src[RESUME]();
+        this.dest.on('drain', this.ondrain);
+    }
+    unpipe() {
+        this.dest.removeListener('drain', this.ondrain);
+    }
+    // only here for the prototype
+    /* c8 ignore start */
+    proxyErrors(_er) { }
+    /* c8 ignore stop */
+    end() {
+        this.unpipe();
+        if (this.opts.end)
+            this.dest.end();
+    }
+}
+/**
+ * Internal class representing a pipe to a destination stream where
+ * errors are proxied.
+ *
+ * @internal
+ */
+class PipeProxyErrors extends Pipe {
+    unpipe() {
+        this.src.removeListener('error', this.proxyErrors);
+        super.unpipe();
+    }
+    constructor(src, dest, opts) {
+        super(src, dest, opts);
+        this.proxyErrors = er => dest.emit('error', er);
+        src.on('error', this.proxyErrors);
+    }
+}
+const isObjectModeOptions = (o) => !!o.objectMode;
+const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
+/**
+ * Main export, the Minipass class
+ *
+ * `RType` is the type of data emitted, defaults to Buffer
+ *
+ * `WType` is the type of data to be written, if RType is buffer or string,
+ * then any {@link Minipass.ContiguousData} is allowed.
+ *
+ * `Events` is the set of event handler signatures that this object
+ * will emit, see {@link Minipass.Events}
+ */
+export class Minipass extends EventEmitter {
+    [FLOWING] = false;
+    [PAUSED] = false;
+    [PIPES] = [];
+    [BUFFER] = [];
+    [OBJECTMODE];
+    [ENCODING];
+    [ASYNC];
+    [DECODER];
+    [EOF] = false;
+    [EMITTED_END] = false;
+    [EMITTING_END] = false;
+    [CLOSED] = false;
+    [EMITTED_ERROR] = null;
+    [BUFFERLENGTH] = 0;
+    [DESTROYED] = false;
+    [SIGNAL];
+    [ABORTED] = false;
+    [DATALISTENERS] = 0;
+    [DISCARDED] = false;
+    /**
+     * true if the stream can be written
+     */
+    writable = true;
+    /**
+     * true if the stream can be read
+     */
+    readable = true;
+    /**
+     * If `RType` is Buffer, then options do not need to be provided.
+     * Otherwise, an options object must be provided to specify either
+     * {@link Minipass.SharedOptions.objectMode} or
+     * {@link Minipass.SharedOptions.encoding}, as appropriate.
+     */
+    constructor(...args) {
+        const options = (args[0] ||
+            {});
+        super();
+        if (options.objectMode && typeof options.encoding === 'string') {
+            throw new TypeError('Encoding and objectMode may not be used together');
+        }
+        if (isObjectModeOptions(options)) {
+            this[OBJECTMODE] = true;
+            this[ENCODING] = null;
+        }
+        else if (isEncodingOptions(options)) {
+            this[ENCODING] = options.encoding;
+            this[OBJECTMODE] = false;
+        }
+        else {
+            this[OBJECTMODE] = false;
+            this[ENCODING] = null;
+        }
+        this[ASYNC] = !!options.async;
+        this[DECODER] = this[ENCODING]
+            ? new StringDecoder(this[ENCODING])
+            : null;
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposeBuffer === true) {
+            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
+        }
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposePipes === true) {
+            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
+        }
+        const { signal } = options;
+        if (signal) {
+            this[SIGNAL] = signal;
+            if (signal.aborted) {
+                this[ABORT]();
+            }
+            else {
+                signal.addEventListener('abort', () => this[ABORT]());
+            }
+        }
+    }
+    /**
+     * The amount of data stored in the buffer waiting to be read.
+     *
+     * For Buffer strings, this will be the total byte length.
+     * For string encoding streams, this will be the string character length,
+     * according to JavaScript's `string.length` logic.
+     * For objectMode streams, this is a count of the items waiting to be
+     * emitted.
+     */
+    get bufferLength() {
+        return this[BUFFERLENGTH];
+    }
+    /**
+     * The `BufferEncoding` currently in use, or `null`
+     */
+    get encoding() {
+        return this[ENCODING];
+    }
+    /**
+     * @deprecated - This is a read only property
+     */
+    set encoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * @deprecated - Encoding may only be set at instantiation time
+     */
+    setEncoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * True if this is an objectMode stream
+     */
+    get objectMode() {
+        return this[OBJECTMODE];
+    }
+    /**
+     * @deprecated - This is a read-only property
+     */
+    set objectMode(_om) {
+        throw new Error('objectMode must be set at instantiation time');
+    }
+    /**
+     * true if this is an async stream
+     */
+    get ['async']() {
+        return this[ASYNC];
+    }
+    /**
+     * Set to true to make this stream async.
+     *
+     * Once set, it cannot be unset, as this would potentially cause incorrect
+     * behavior.  Ie, a sync stream can be made async, but an async stream
+     * cannot be safely made sync.
+     */
+    set ['async'](a) {
+        this[ASYNC] = this[ASYNC] || !!a;
+    }
+    // drop everything and get out of the flow completely
+    [ABORT]() {
+        this[ABORTED] = true;
+        this.emit('abort', this[SIGNAL]?.reason);
+        this.destroy(this[SIGNAL]?.reason);
+    }
+    /**
+     * True if the stream has been aborted.
+     */
+    get aborted() {
+        return this[ABORTED];
+    }
+    /**
+     * No-op setter. Stream aborted status is set via the AbortSignal provided
+     * in the constructor options.
+     */
+    set aborted(_) { }
+    write(chunk, encoding, cb) {
+        if (this[ABORTED])
+            return false;
+        if (this[EOF])
+            throw new Error('write after end');
+        if (this[DESTROYED]) {
+            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
+            return true;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (!encoding)
+            encoding = 'utf8';
+        const fn = this[ASYNC] ? defer : nodefer;
+        // convert array buffers and typed array views into buffers
+        // at some point in the future, we may want to do the opposite!
+        // leave strings and buffers as-is
+        // anything is only allowed if in object mode, so throw
+        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+            if (isArrayBufferView(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
+            }
+            else if (isArrayBufferLike(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk);
+            }
+            else if (typeof chunk !== 'string') {
+                throw new Error('Non-contiguous data written to non-objectMode stream');
+            }
+        }
+        // handle object mode up front, since it's simpler
+        // this yields better performance, fewer checks later.
+        if (this[OBJECTMODE]) {
+            // maybe impossible?
+            /* c8 ignore start */
+            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+                this[FLUSH](true);
+            /* c8 ignore stop */
+            if (this[FLOWING])
+                this.emit('data', chunk);
+            else
+                this[BUFFERPUSH](chunk);
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // at this point the chunk is a buffer or string
+        // don't buffer it up or send it to the decoder
+        if (!chunk.length) {
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // fast-path writing strings of same encoding to a stream with
+        // an empty buffer, skipping the buffer/decoder dance
+        if (typeof chunk === 'string' &&
+            // unless it is a string already ready for us to use
+            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = Buffer.from(chunk, encoding);
+        }
+        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = this[DECODER].write(chunk);
+        }
+        // Note: flushing CAN potentially switch us into not-flowing mode
+        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+            this[FLUSH](true);
+        if (this[FLOWING])
+            this.emit('data', chunk);
+        else
+            this[BUFFERPUSH](chunk);
+        if (this[BUFFERLENGTH] !== 0)
+            this.emit('readable');
+        if (cb)
+            fn(cb);
+        return this[FLOWING];
+    }
+    /**
+     * Low-level explicit read method.
+     *
+     * In objectMode, the argument is ignored, and one item is returned if
+     * available.
+     *
+     * `n` is the number of bytes (or in the case of encoding streams,
+     * characters) to consume. If `n` is not provided, then the entire buffer
+     * is returned, or `null` is returned if no data is available.
+     *
+     * If `n` is greater that the amount of data in the internal buffer,
+     * then `null` is returned.
+     */
+    read(n) {
+        if (this[DESTROYED])
+            return null;
+        this[DISCARDED] = false;
+        if (this[BUFFERLENGTH] === 0 ||
+            n === 0 ||
+            (n && n > this[BUFFERLENGTH])) {
+            this[MAYBE_EMIT_END]();
+            return null;
+        }
+        if (this[OBJECTMODE])
+            n = null;
+        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
+            // not object mode, so if we have an encoding, then RType is string
+            // otherwise, must be Buffer
+            this[BUFFER] = [
+                (this[ENCODING]
+                    ? this[BUFFER].join('')
+                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
+            ];
+        }
+        const ret = this[READ](n || null, this[BUFFER][0]);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [READ](n, chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERSHIFT]();
+        else {
+            const c = chunk;
+            if (n === c.length || n === null)
+                this[BUFFERSHIFT]();
+            else if (typeof c === 'string') {
+                this[BUFFER][0] = c.slice(n);
+                chunk = c.slice(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+            else {
+                this[BUFFER][0] = c.subarray(n);
+                chunk = c.subarray(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+        }
+        this.emit('data', chunk);
+        if (!this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+        return chunk;
+    }
+    end(chunk, encoding, cb) {
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (chunk !== undefined)
+            this.write(chunk, encoding);
+        if (cb)
+            this.once('end', cb);
+        this[EOF] = true;
+        this.writable = false;
+        // if we haven't written anything, then go ahead and emit,
+        // even if we're not reading.
+        // we'll re-emit if a new 'end' listener is added anyway.
+        // This makes MP more suitable to write-only use cases.
+        if (this[FLOWING] || !this[PAUSED])
+            this[MAYBE_EMIT_END]();
+        return this;
+    }
+    // don't let the internal resume be overwritten
+    [RESUME]() {
+        if (this[DESTROYED])
+            return;
+        if (!this[DATALISTENERS] && !this[PIPES].length) {
+            this[DISCARDED] = true;
+        }
+        this[PAUSED] = false;
+        this[FLOWING] = true;
+        this.emit('resume');
+        if (this[BUFFER].length)
+            this[FLUSH]();
+        else if (this[EOF])
+            this[MAYBE_EMIT_END]();
+        else
+            this.emit('drain');
+    }
+    /**
+     * Resume the stream if it is currently in a paused state
+     *
+     * If called when there are no pipe destinations or `data` event listeners,
+     * this will place the stream in a "discarded" state, where all data will
+     * be thrown away. The discarded state is removed if a pipe destination or
+     * data handler is added, if pause() is called, or if any synchronous or
+     * asynchronous iteration is started.
+     */
+    resume() {
+        return this[RESUME]();
+    }
+    /**
+     * Pause the stream
+     */
+    pause() {
+        this[FLOWING] = false;
+        this[PAUSED] = true;
+        this[DISCARDED] = false;
+    }
+    /**
+     * true if the stream has been forcibly destroyed
+     */
+    get destroyed() {
+        return this[DESTROYED];
+    }
+    /**
+     * true if the stream is currently in a flowing state, meaning that
+     * any writes will be immediately emitted.
+     */
+    get flowing() {
+        return this[FLOWING];
+    }
+    /**
+     * true if the stream is currently in a paused state
+     */
+    get paused() {
+        return this[PAUSED];
+    }
+    [BUFFERPUSH](chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] += 1;
+        else
+            this[BUFFERLENGTH] += chunk.length;
+        this[BUFFER].push(chunk);
+    }
+    [BUFFERSHIFT]() {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] -= 1;
+        else
+            this[BUFFERLENGTH] -= this[BUFFER][0].length;
+        return this[BUFFER].shift();
+    }
+    [FLUSH](noDrain = false) {
+        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
+            this[BUFFER].length);
+        if (!noDrain && !this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+    }
+    [FLUSHCHUNK](chunk) {
+        this.emit('data', chunk);
+        return this[FLOWING];
+    }
+    /**
+     * Pipe all data emitted by this stream into the destination provided.
+     *
+     * Triggers the flow of data.
+     */
+    pipe(dest, opts) {
+        if (this[DESTROYED])
+            return dest;
+        this[DISCARDED] = false;
+        const ended = this[EMITTED_END];
+        opts = opts || {};
+        if (dest === proc.stdout || dest === proc.stderr)
+            opts.end = false;
+        else
+            opts.end = opts.end !== false;
+        opts.proxyErrors = !!opts.proxyErrors;
+        // piping an ended stream ends immediately
+        if (ended) {
+            if (opts.end)
+                dest.end();
+        }
+        else {
+            // "as" here just ignores the WType, which pipes don't care about,
+            // since they're only consuming from us, and writing to the dest
+            this[PIPES].push(!opts.proxyErrors
+                ? new Pipe(this, dest, opts)
+                : new PipeProxyErrors(this, dest, opts));
+            if (this[ASYNC])
+                defer(() => this[RESUME]());
+            else
+                this[RESUME]();
+        }
+        return dest;
+    }
+    /**
+     * Fully unhook a piped destination stream.
+     *
+     * If the destination stream was the only consumer of this stream (ie,
+     * there are no other piped destinations or `'data'` event listeners)
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    unpipe(dest) {
+        const p = this[PIPES].find(p => p.dest === dest);
+        if (p) {
+            if (this[PIPES].length === 1) {
+                if (this[FLOWING] && this[DATALISTENERS] === 0) {
+                    this[FLOWING] = false;
+                }
+                this[PIPES] = [];
+            }
+            else
+                this[PIPES].splice(this[PIPES].indexOf(p), 1);
+            p.unpipe();
+        }
+    }
+    /**
+     * Alias for {@link Minipass#on}
+     */
+    addListener(ev, handler) {
+        return this.on(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.on`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * - Adding a 'data' event handler will trigger the flow of data
+     *
+     * - Adding a 'readable' event handler when there is data waiting to be read
+     *   will cause 'readable' to be emitted immediately.
+     *
+     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
+     *   already passed will cause the event to be emitted immediately and all
+     *   handlers removed.
+     *
+     * - Adding an 'error' event handler after an error has been emitted will
+     *   cause the event to be re-emitted immediately with the error previously
+     *   raised.
+     */
+    on(ev, handler) {
+        const ret = super.on(ev, handler);
+        if (ev === 'data') {
+            this[DISCARDED] = false;
+            this[DATALISTENERS]++;
+            if (!this[PIPES].length && !this[FLOWING]) {
+                this[RESUME]();
+            }
+        }
+        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
+            super.emit('readable');
+        }
+        else if (isEndish(ev) && this[EMITTED_END]) {
+            super.emit(ev);
+            this.removeAllListeners(ev);
+        }
+        else if (ev === 'error' && this[EMITTED_ERROR]) {
+            const h = handler;
+            if (this[ASYNC])
+                defer(() => h.call(this, this[EMITTED_ERROR]));
+            else
+                h.call(this, this[EMITTED_ERROR]);
+        }
+        return ret;
+    }
+    /**
+     * Alias for {@link Minipass#off}
+     */
+    removeListener(ev, handler) {
+        return this.off(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.off`
+     *
+     * If a 'data' event handler is removed, and it was the last consumer
+     * (ie, there are no pipe destinations or other 'data' event listeners),
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    off(ev, handler) {
+        const ret = super.off(ev, handler);
+        // if we previously had listeners, and now we don't, and we don't
+        // have any pipes, then stop the flow, unless it's been explicitly
+        // put in a discarded flowing state via stream.resume().
+        if (ev === 'data') {
+            this[DATALISTENERS] = this.listeners('data').length;
+            if (this[DATALISTENERS] === 0 &&
+                !this[DISCARDED] &&
+                !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * Mostly identical to `EventEmitter.removeAllListeners`
+     *
+     * If all 'data' event handlers are removed, and they were the last consumer
+     * (ie, there are no pipe destinations), then the flow of data will stop
+     * until there is another consumer or {@link Minipass#resume} is explicitly
+     * called.
+     */
+    removeAllListeners(ev) {
+        const ret = super.removeAllListeners(ev);
+        if (ev === 'data' || ev === undefined) {
+            this[DATALISTENERS] = 0;
+            if (!this[DISCARDED] && !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * true if the 'end' event has been emitted
+     */
+    get emittedEnd() {
+        return this[EMITTED_END];
+    }
+    [MAYBE_EMIT_END]() {
+        if (!this[EMITTING_END] &&
+            !this[EMITTED_END] &&
+            !this[DESTROYED] &&
+            this[BUFFER].length === 0 &&
+            this[EOF]) {
+            this[EMITTING_END] = true;
+            this.emit('end');
+            this.emit('prefinish');
+            this.emit('finish');
+            if (this[CLOSED])
+                this.emit('close');
+            this[EMITTING_END] = false;
+        }
+    }
+    /**
+     * Mostly identical to `EventEmitter.emit`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * If the stream has been destroyed, and the event is something other
+     * than 'close' or 'error', then `false` is returned and no handlers
+     * are called.
+     *
+     * If the event is 'end', and has already been emitted, then the event
+     * is ignored. If the stream is in a paused or non-flowing state, then
+     * the event will be deferred until data flow resumes. If the stream is
+     * async, then handlers will be called on the next tick rather than
+     * immediately.
+     *
+     * If the event is 'close', and 'end' has not yet been emitted, then
+     * the event will be deferred until after 'end' is emitted.
+     *
+     * If the event is 'error', and an AbortSignal was provided for the stream,
+     * and there are no listeners, then the event is ignored, matching the
+     * behavior of node core streams in the presense of an AbortSignal.
+     *
+     * If the event is 'finish' or 'prefinish', then all listeners will be
+     * removed after emitting the event, to prevent double-firing.
+     */
+    emit(ev, ...args) {
+        const data = args[0];
+        // error and close are only events allowed after calling destroy()
+        if (ev !== 'error' &&
+            ev !== 'close' &&
+            ev !== DESTROYED &&
+            this[DESTROYED]) {
+            return false;
+        }
+        else if (ev === 'data') {
+            return !this[OBJECTMODE] && !data
+                ? false
+                : this[ASYNC]
+                    ? (defer(() => this[EMITDATA](data)), true)
+                    : this[EMITDATA](data);
+        }
+        else if (ev === 'end') {
+            return this[EMITEND]();
+        }
+        else if (ev === 'close') {
+            this[CLOSED] = true;
+            // don't emit close before 'end' and 'finish'
+            if (!this[EMITTED_END] && !this[DESTROYED])
+                return false;
+            const ret = super.emit('close');
+            this.removeAllListeners('close');
+            return ret;
+        }
+        else if (ev === 'error') {
+            this[EMITTED_ERROR] = data;
+            super.emit(ERROR, data);
+            const ret = !this[SIGNAL] || this.listeners('error').length
+                ? super.emit('error', data)
+                : false;
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'resume') {
+            const ret = super.emit('resume');
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'finish' || ev === 'prefinish') {
+            const ret = super.emit(ev);
+            this.removeAllListeners(ev);
+            return ret;
+        }
+        // Some other unknown event
+        const ret = super.emit(ev, ...args);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITDATA](data) {
+        for (const p of this[PIPES]) {
+            if (p.dest.write(data) === false)
+                this.pause();
+        }
+        const ret = this[DISCARDED] ? false : super.emit('data', data);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITEND]() {
+        if (this[EMITTED_END])
+            return false;
+        this[EMITTED_END] = true;
+        this.readable = false;
+        return this[ASYNC]
+            ? (defer(() => this[EMITEND2]()), true)
+            : this[EMITEND2]();
+    }
+    [EMITEND2]() {
+        if (this[DECODER]) {
+            const data = this[DECODER].end();
+            if (data) {
+                for (const p of this[PIPES]) {
+                    p.dest.write(data);
+                }
+                if (!this[DISCARDED])
+                    super.emit('data', data);
+            }
+        }
+        for (const p of this[PIPES]) {
+            p.end();
+        }
+        const ret = super.emit('end');
+        this.removeAllListeners('end');
+        return ret;
+    }
+    /**
+     * Return a Promise that resolves to an array of all emitted data once
+     * the stream ends.
+     */
+    async collect() {
+        const buf = Object.assign([], {
+            dataLength: 0,
+        });
+        if (!this[OBJECTMODE])
+            buf.dataLength = 0;
+        // set the promise first, in case an error is raised
+        // by triggering the flow here.
+        const p = this.promise();
+        this.on('data', c => {
+            buf.push(c);
+            if (!this[OBJECTMODE])
+                buf.dataLength += c.length;
+        });
+        await p;
+        return buf;
+    }
+    /**
+     * Return a Promise that resolves to the concatenation of all emitted data
+     * once the stream ends.
+     *
+     * Not allowed on objectMode streams.
+     */
+    async concat() {
+        if (this[OBJECTMODE]) {
+            throw new Error('cannot concat in objectMode');
+        }
+        const buf = await this.collect();
+        return (this[ENCODING]
+            ? buf.join('')
+            : Buffer.concat(buf, buf.dataLength));
+    }
+    /**
+     * Return a void Promise that resolves once the stream ends.
+     */
+    async promise() {
+        return new Promise((resolve, reject) => {
+            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
+            this.on('error', er => reject(er));
+            this.on('end', () => resolve());
+        });
+    }
+    /**
+     * Asynchronous `for await of` iteration.
+     *
+     * This will continue emitting all chunks until the stream terminates.
+     */
+    [Symbol.asyncIterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = async () => {
+            this.pause();
+            stopped = true;
+            return { value: undefined, done: true };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const res = this.read();
+            if (res !== null)
+                return Promise.resolve({ done: false, value: res });
+            if (this[EOF])
+                return stop();
+            let resolve;
+            let reject;
+            const onerr = (er) => {
+                this.off('data', ondata);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                reject(er);
+            };
+            const ondata = (value) => {
+                this.off('error', onerr);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                this.pause();
+                resolve({ value, done: !!this[EOF] });
+            };
+            const onend = () => {
+                this.off('error', onerr);
+                this.off('data', ondata);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                resolve({ done: true, value: undefined });
+            };
+            const ondestroy = () => onerr(new Error('stream destroyed'));
+            return new Promise((res, rej) => {
+                reject = rej;
+                resolve = res;
+                this.once(DESTROYED, ondestroy);
+                this.once('error', onerr);
+                this.once('end', onend);
+                this.once('data', ondata);
+            });
+        };
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Synchronous `for of` iteration.
+     *
+     * The iteration will terminate when the internal buffer runs out, even
+     * if the stream has not yet terminated.
+     */
+    [Symbol.iterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = () => {
+            this.pause();
+            this.off(ERROR, stop);
+            this.off(DESTROYED, stop);
+            this.off('end', stop);
+            stopped = true;
+            return { done: true, value: undefined };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const value = this.read();
+            return value === null ? stop() : { done: false, value };
+        };
+        this.once('end', stop);
+        this.once(ERROR, stop);
+        this.once(DESTROYED, stop);
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.iterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Destroy a stream, preventing it from being used for any further purpose.
+     *
+     * If the stream has a `close()` method, then it will be called on
+     * destruction.
+     *
+     * After destruction, any attempt to write data, read data, or emit most
+     * events will be ignored.
+     *
+     * If an error argument is provided, then it will be emitted in an
+     * 'error' event.
+     */
+    destroy(er) {
+        if (this[DESTROYED]) {
+            if (er)
+                this.emit('error', er);
+            else
+                this.emit(DESTROYED);
+            return this;
+        }
+        this[DESTROYED] = true;
+        this[DISCARDED] = true;
+        // throw away all buffered data, it's never coming out
+        this[BUFFER].length = 0;
+        this[BUFFERLENGTH] = 0;
+        const wc = this;
+        if (typeof wc.close === 'function' && !this[CLOSED])
+            wc.close();
+        if (er)
+            this.emit('error', er);
+        // if no error to emit, still reject pending promises
+        else
+            this.emit(DESTROYED);
+        return this;
+    }
+    /**
+     * Alias for {@link isStream}
+     *
+     * Former export location, maintained for backwards compatibility.
+     *
+     * @deprecated
+     */
+    static get isStream() {
+        return isStream;
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/package.json b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/package.json b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/package.json
new file mode 100644
index 0000000000000..6faaa247a5bc6
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/package.json
@@ -0,0 +1,82 @@
+{
+  "name": "minipass",
+  "version": "7.0.3",
+  "description": "minimal implementation of a PassThrough stream",
+  "main": "./dist/cjs/index.js",
+  "module": "./dist/mjs/index.js",
+  "types": "./dist/cjs/index.js",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/mjs/index.d.ts",
+        "default": "./dist/mjs/index.js"
+      },
+      "require": {
+        "types": "./dist/cjs/index.d.ts",
+        "default": "./dist/cjs/index.js"
+      }
+    },
+    "./package.json": "./package.json"
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "preprepare": "rm -rf dist",
+    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "c8 tap",
+    "snap": "c8 tap",
+    "format": "prettier --write . --loglevel warn",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+  },
+  "tap": {
+    "coverage": false,
+    "node-arg": [
+      "--enable-source-maps",
+      "--no-warnings",
+      "--loader",
+      "ts-node/esm"
+    ],
+    "ts": false
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "devDependencies": {
+    "@types/node": "^20.1.2",
+    "@types/tap": "^15.0.8",
+    "c8": "^7.13.0",
+    "prettier": "^2.6.2",
+    "tap": "^16.3.0",
+    "ts-node": "^10.9.1",
+    "typedoc": "^0.24.8",
+    "typescript": "^5.1.3",
+    "end-of-stream": "^1.4.0",
+    "node-abort-controller": "^3.1.1",
+    "sync-content": "^1.0.2",
+    "through2": "^2.0.3"
+  },
+  "repository": "https://github.com/isaacs/minipass",
+  "keywords": [
+    "passthrough",
+    "stream"
+  ],
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "ISC",
+  "engines": {
+    "node": ">=16 || 14 >=14.17"
+  }
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/package.json b/node_modules/tuf-js/node_modules/cacache/package.json
new file mode 100644
index 0000000000000..ab58cb8b7c50f
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/package.json
@@ -0,0 +1,82 @@
+{
+  "name": "cacache",
+  "version": "17.1.4",
+  "cache-version": {
+    "content": "2",
+    "index": "5"
+  },
+  "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "coverage": "tap",
+    "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
+    "lint": "eslint \"**/*.js\"",
+    "npmclilint": "npmcli-lint",
+    "lintfix": "npm run lint -- --fix",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "posttest": "npm run lint",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/cacache.git"
+  },
+  "keywords": [
+    "cache",
+    "caching",
+    "content-addressable",
+    "sri",
+    "sri hash",
+    "subresource integrity",
+    "cache",
+    "storage",
+    "store",
+    "file store",
+    "filesystem",
+    "disk cache",
+    "disk storage"
+  ],
+  "license": "ISC",
+  "dependencies": {
+    "@npmcli/fs": "^3.1.0",
+    "fs-minipass": "^3.0.0",
+    "glob": "^10.2.2",
+    "lru-cache": "^7.7.1",
+    "minipass": "^7.0.3",
+    "minipass-collect": "^1.0.2",
+    "minipass-flush": "^1.0.5",
+    "minipass-pipeline": "^1.2.4",
+    "p-map": "^4.0.0",
+    "ssri": "^10.0.0",
+    "tar": "^6.1.11",
+    "unique-filename": "^3.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "tap": "^16.0.0"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "windowsCI": false,
+    "version": "4.18.0",
+    "publish": "true"
+  },
+  "author": "GitHub Inc.",
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/package-lock.json b/package-lock.json
index 805c6503cdc0c..11c73d2843e2f 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -96,7 +96,7 @@
         "@npmcli/run-script": "^6.0.2",
         "abbrev": "^2.0.0",
         "archy": "~1.0.0",
-        "cacache": "^17.1.4",
+        "cacache": "^18.0.0",
         "chalk": "^5.3.0",
         "ci-info": "^3.8.0",
         "cli-columns": "^4.0.0",
@@ -2482,6 +2482,28 @@
         "node": "^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/metavuln-calculator/node_modules/cacache": {
+      "version": "17.1.4",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
+      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
+      "dependencies": {
+        "@npmcli/fs": "^3.1.0",
+        "fs-minipass": "^3.0.0",
+        "glob": "^10.2.2",
+        "lru-cache": "^7.7.1",
+        "minipass": "^7.0.3",
+        "minipass-collect": "^1.0.2",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "p-map": "^4.0.0",
+        "ssri": "^10.0.0",
+        "tar": "^6.1.11",
+        "unique-filename": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/mock-globals": {
       "resolved": "mock-globals",
       "link": true
@@ -3576,15 +3598,15 @@
       }
     },
     "node_modules/cacache": {
-      "version": "17.1.4",
-      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
-      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
+      "version": "18.0.0",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-18.0.0.tgz",
+      "integrity": "sha512-I7mVOPl3PUCeRub1U8YoGz2Lqv9WOBpobZ8RyWFXmReuILz+3OAyTa5oH3QPdtKZD7N0Yk00aLfzn0qvp8dZ1w==",
       "inBundle": true,
       "dependencies": {
         "@npmcli/fs": "^3.1.0",
         "fs-minipass": "^3.0.0",
         "glob": "^10.2.2",
-        "lru-cache": "^7.7.1",
+        "lru-cache": "^10.0.1",
         "minipass": "^7.0.3",
         "minipass-collect": "^1.0.2",
         "minipass-flush": "^1.0.5",
@@ -3595,7 +3617,16 @@
         "unique-filename": "^3.0.0"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
+    "node_modules/cacache/node_modules/lru-cache": {
+      "version": "10.0.1",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz",
+      "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==",
+      "inBundle": true,
+      "engines": {
+        "node": "14 || >=16.14"
       }
     },
     "node_modules/caching-transform": {
@@ -7982,6 +8013,29 @@
         "node": "^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/make-fetch-happen/node_modules/cacache": {
+      "version": "17.1.4",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
+      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
+      "inBundle": true,
+      "dependencies": {
+        "@npmcli/fs": "^3.1.0",
+        "fs-minipass": "^3.0.0",
+        "glob": "^10.2.2",
+        "lru-cache": "^7.7.1",
+        "minipass": "^7.0.3",
+        "minipass-collect": "^1.0.2",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "p-map": "^4.0.0",
+        "ssri": "^10.0.0",
+        "tar": "^6.1.11",
+        "unique-filename": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/map-obj": {
       "version": "4.3.0",
       "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz",
@@ -9399,6 +9453,84 @@
         "concat-map": "0.0.1"
       }
     },
+    "node_modules/node-gyp/node_modules/cacache": {
+      "version": "17.1.4",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
+      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
+      "inBundle": true,
+      "dependencies": {
+        "@npmcli/fs": "^3.1.0",
+        "fs-minipass": "^3.0.0",
+        "glob": "^10.2.2",
+        "lru-cache": "^7.7.1",
+        "minipass": "^7.0.3",
+        "minipass-collect": "^1.0.2",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "p-map": "^4.0.0",
+        "ssri": "^10.0.0",
+        "tar": "^6.1.11",
+        "unique-filename": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
+      "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
+      "inBundle": true,
+      "dependencies": {
+        "balanced-match": "^1.0.0"
+      }
+    },
+    "node_modules/node-gyp/node_modules/cacache/node_modules/glob": {
+      "version": "10.3.3",
+      "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.3.tgz",
+      "integrity": "sha512-92vPiMb/iqpmEgsOoIDvTjc50wf9CCCvMzsi6W0JLPeUKE8TWP1a73PgqSrqy7iAZxaSD1YdzU7QZR5LF51MJw==",
+      "inBundle": true,
+      "dependencies": {
+        "foreground-child": "^3.1.0",
+        "jackspeak": "^2.0.3",
+        "minimatch": "^9.0.1",
+        "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0",
+        "path-scurry": "^1.10.1"
+      },
+      "bin": {
+        "glob": "dist/cjs/src/bin.js"
+      },
+      "engines": {
+        "node": ">=16 || 14 >=14.17"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
+    "node_modules/node-gyp/node_modules/cacache/node_modules/minimatch": {
+      "version": "9.0.3",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz",
+      "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==",
+      "inBundle": true,
+      "dependencies": {
+        "brace-expansion": "^2.0.1"
+      },
+      "engines": {
+        "node": ">=16 || 14 >=14.17"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
+    "node_modules/node-gyp/node_modules/cacache/node_modules/minipass": {
+      "version": "7.0.3",
+      "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz",
+      "integrity": "sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg==",
+      "inBundle": true,
+      "engines": {
+        "node": ">=16 || 14 >=14.17"
+      }
+    },
     "node_modules/node-gyp/node_modules/gauge": {
       "version": "4.0.4",
       "resolved": "https://registry.npmjs.org/gauge/-/gauge-4.0.4.tgz",
@@ -10322,6 +10454,29 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/pacote/node_modules/cacache": {
+      "version": "17.1.4",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
+      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
+      "inBundle": true,
+      "dependencies": {
+        "@npmcli/fs": "^3.1.0",
+        "fs-minipass": "^3.0.0",
+        "glob": "^10.2.2",
+        "lru-cache": "^7.7.1",
+        "minipass": "^7.0.3",
+        "minipass-collect": "^1.0.2",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "p-map": "^4.0.0",
+        "ssri": "^10.0.0",
+        "tar": "^6.1.11",
+        "unique-filename": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/parent-module": {
       "version": "1.0.1",
       "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
@@ -11670,6 +11825,38 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/sigstore/node_modules/cacache": {
+      "version": "17.1.4",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
+      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
+      "inBundle": true,
+      "dependencies": {
+        "@npmcli/fs": "^3.1.0",
+        "fs-minipass": "^3.0.0",
+        "glob": "^10.2.2",
+        "lru-cache": "^7.7.1",
+        "minipass": "^7.0.3",
+        "minipass-collect": "^1.0.2",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "p-map": "^4.0.0",
+        "ssri": "^10.0.0",
+        "tar": "^6.1.11",
+        "unique-filename": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/sigstore/node_modules/cacache/node_modules/minipass": {
+      "version": "7.0.3",
+      "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz",
+      "integrity": "sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg==",
+      "inBundle": true,
+      "engines": {
+        "node": ">=16 || 14 >=14.17"
+      }
+    },
     "node_modules/sigstore/node_modules/make-fetch-happen": {
       "version": "11.1.1",
       "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz",
@@ -14831,6 +15018,38 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/tuf-js/node_modules/cacache": {
+      "version": "17.1.4",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
+      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
+      "inBundle": true,
+      "dependencies": {
+        "@npmcli/fs": "^3.1.0",
+        "fs-minipass": "^3.0.0",
+        "glob": "^10.2.2",
+        "lru-cache": "^7.7.1",
+        "minipass": "^7.0.3",
+        "minipass-collect": "^1.0.2",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "p-map": "^4.0.0",
+        "ssri": "^10.0.0",
+        "tar": "^6.1.11",
+        "unique-filename": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/tuf-js/node_modules/cacache/node_modules/minipass": {
+      "version": "7.0.3",
+      "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz",
+      "integrity": "sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg==",
+      "inBundle": true,
+      "engines": {
+        "node": ">=16 || 14 >=14.17"
+      }
+    },
     "node_modules/tuf-js/node_modules/make-fetch-happen": {
       "version": "11.1.1",
       "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz",
@@ -15833,7 +16052,7 @@
         "@npmcli/query": "^3.0.0",
         "@npmcli/run-script": "^6.0.0",
         "bin-links": "^4.0.1",
-        "cacache": "^17.1.4",
+        "cacache": "^18.0.0",
         "common-ancestor-path": "^1.0.1",
         "hosted-git-info": "^6.1.1",
         "json-parse-even-better-errors": "^3.0.0",
diff --git a/package.json b/package.json
index e16660472c2c1..e74281c6f093f 100644
--- a/package.json
+++ b/package.json
@@ -61,7 +61,7 @@
     "@npmcli/run-script": "^6.0.2",
     "abbrev": "^2.0.0",
     "archy": "~1.0.0",
-    "cacache": "^17.1.4",
+    "cacache": "^18.0.0",
     "chalk": "^5.3.0",
     "ci-info": "^3.8.0",
     "cli-columns": "^4.0.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 8389434649e82..0d851ed3ca9e0 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -14,7 +14,7 @@
     "@npmcli/query": "^3.0.0",
     "@npmcli/run-script": "^6.0.0",
     "bin-links": "^4.0.1",
-    "cacache": "^17.1.4",
+    "cacache": "^18.0.0",
     "common-ancestor-path": "^1.0.1",
     "hosted-git-info": "^6.1.1",
     "json-parse-even-better-errors": "^3.0.0",

From 39d8732a66950e242142bf7ccd13bfa317ca422a Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Tue, 15 Aug 2023 12:10:37 -0700
Subject: [PATCH 31/68] deps: hosted-git-info@7.0.0

---
 node_modules/.gitignore                       |  11 +-
 node_modules/hosted-git-info/lib/index.js     |   4 +-
 .../node_modules/lru-cache/LICENSE            |   0
 .../node_modules/lru-cache/dist/cjs/index.js  |   0
 .../lru-cache/dist/cjs/index.min.js           |   0
 .../lru-cache/dist/cjs/package.json           |   0
 .../node_modules/lru-cache/dist/mjs/index.js  |   0
 .../lru-cache/dist/mjs/index.min.js           |   0
 .../lru-cache/dist/mjs/package.json           |   0
 .../node_modules/lru-cache/package.json       |   0
 node_modules/hosted-git-info/package.json     |  17 +-
 .../node_modules/hosted-git-info/LICENSE      |   0
 .../hosted-git-info/lib/from-url.js           |   0
 .../node_modules/hosted-git-info/lib/hosts.js |   0
 .../node_modules/hosted-git-info/lib/index.js |   4 +-
 .../hosted-git-info/lib/parse-url.js          |   0
 .../node_modules/hosted-git-info/package.json |  17 +-
 .../node_modules/hosted-git-info/LICENSE      |  13 +
 .../hosted-git-info/lib/from-url.js           | 122 ++++++++++
 .../node_modules/hosted-git-info/lib/hosts.js | 228 ++++++++++++++++++
 .../node_modules/hosted-git-info/lib/index.js | 179 ++++++++++++++
 .../hosted-git-info/lib/parse-url.js          |  78 ++++++
 .../node_modules/hosted-git-info/package.json |  59 +++++
 package-lock.json                             |  80 +++---
 package.json                                  |   2 +-
 workspaces/arborist/package.json              |   2 +-
 26 files changed, 763 insertions(+), 53 deletions(-)
 rename node_modules/{@npmcli/package-json => hosted-git-info}/node_modules/lru-cache/LICENSE (100%)
 rename node_modules/{@npmcli/package-json => hosted-git-info}/node_modules/lru-cache/dist/cjs/index.js (100%)
 rename node_modules/{@npmcli/package-json => hosted-git-info}/node_modules/lru-cache/dist/cjs/index.min.js (100%)
 rename node_modules/{@npmcli/package-json => hosted-git-info}/node_modules/lru-cache/dist/cjs/package.json (100%)
 rename node_modules/{@npmcli/package-json => hosted-git-info}/node_modules/lru-cache/dist/mjs/index.js (100%)
 rename node_modules/{@npmcli/package-json => hosted-git-info}/node_modules/lru-cache/dist/mjs/index.min.js (100%)
 rename node_modules/{@npmcli/package-json => hosted-git-info}/node_modules/lru-cache/dist/mjs/package.json (100%)
 rename node_modules/{@npmcli/package-json => hosted-git-info}/node_modules/lru-cache/package.json (100%)
 rename node_modules/{@npmcli/package-json => normalize-package-data}/node_modules/hosted-git-info/LICENSE (100%)
 rename node_modules/{@npmcli/package-json => normalize-package-data}/node_modules/hosted-git-info/lib/from-url.js (100%)
 rename node_modules/{@npmcli/package-json => normalize-package-data}/node_modules/hosted-git-info/lib/hosts.js (100%)
 rename node_modules/{@npmcli/package-json => normalize-package-data}/node_modules/hosted-git-info/lib/index.js (98%)
 rename node_modules/{@npmcli/package-json => normalize-package-data}/node_modules/hosted-git-info/lib/parse-url.js (100%)
 rename node_modules/{@npmcli/package-json => normalize-package-data}/node_modules/hosted-git-info/package.json (83%)
 create mode 100644 node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE
 create mode 100644 node_modules/npm-package-arg/node_modules/hosted-git-info/lib/from-url.js
 create mode 100644 node_modules/npm-package-arg/node_modules/hosted-git-info/lib/hosts.js
 create mode 100644 node_modules/npm-package-arg/node_modules/hosted-git-info/lib/index.js
 create mode 100644 node_modules/npm-package-arg/node_modules/hosted-git-info/lib/parse-url.js
 create mode 100644 node_modules/npm-package-arg/node_modules/hosted-git-info/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 6b6963e6939f7..76cfc9b818e9f 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -36,8 +36,6 @@
 !/@npmcli/package-json
 !/@npmcli/package-json/node_modules/
 /@npmcli/package-json/node_modules/*
-!/@npmcli/package-json/node_modules/hosted-git-info
-!/@npmcli/package-json/node_modules/lru-cache
 !/@npmcli/package-json/node_modules/normalize-package-data
 !/@npmcli/promise-spawn
 !/@npmcli/query
@@ -124,6 +122,9 @@
 !/has-unicode
 !/has
 !/hosted-git-info
+!/hosted-git-info/node_modules/
+/hosted-git-info/node_modules/*
+!/hosted-git-info/node_modules/lru-cache
 !/http-cache-semantics
 !/http-proxy-agent
 !/https-proxy-agent
@@ -211,11 +212,17 @@
 !/node-gyp/node_modules/which
 !/nopt
 !/normalize-package-data
+!/normalize-package-data/node_modules/
+/normalize-package-data/node_modules/*
+!/normalize-package-data/node_modules/hosted-git-info
 !/npm-audit-report
 !/npm-bundled
 !/npm-install-checks
 !/npm-normalize-package-bin
 !/npm-package-arg
+!/npm-package-arg/node_modules/
+/npm-package-arg/node_modules/*
+!/npm-package-arg/node_modules/hosted-git-info
 !/npm-packlist
 !/npm-pick-manifest
 !/npm-profile
diff --git a/node_modules/hosted-git-info/lib/index.js b/node_modules/hosted-git-info/lib/index.js
index a7339c217e9a3..0c9d0b08c866b 100644
--- a/node_modules/hosted-git-info/lib/index.js
+++ b/node_modules/hosted-git-info/lib/index.js
@@ -1,11 +1,11 @@
 'use strict'
 
-const LRU = require('lru-cache')
+const { LRUCache } = require('lru-cache')
 const hosts = require('./hosts.js')
 const fromUrl = require('./from-url.js')
 const parseUrl = require('./parse-url.js')
 
-const cache = new LRU({ max: 1000 })
+const cache = new LRUCache({ max: 1000 })
 
 class GitHost {
   constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/LICENSE b/node_modules/hosted-git-info/node_modules/lru-cache/LICENSE
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/lru-cache/LICENSE
rename to node_modules/hosted-git-info/node_modules/lru-cache/LICENSE
diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/cjs/index.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/index.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/lru-cache/dist/cjs/index.js
rename to node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/index.js
diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/cjs/index.min.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/index.min.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/lru-cache/dist/cjs/index.min.js
rename to node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/index.min.js
diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/cjs/package.json b/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/package.json
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/lru-cache/dist/cjs/package.json
rename to node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/package.json
diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/mjs/index.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/index.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/lru-cache/dist/mjs/index.js
rename to node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/index.js
diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/mjs/index.min.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/index.min.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/lru-cache/dist/mjs/index.min.js
rename to node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/index.min.js
diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/mjs/package.json b/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/package.json
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/lru-cache/dist/mjs/package.json
rename to node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/package.json
diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/package.json b/node_modules/hosted-git-info/node_modules/lru-cache/package.json
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/lru-cache/package.json
rename to node_modules/hosted-git-info/node_modules/lru-cache/package.json
diff --git a/node_modules/hosted-git-info/package.json b/node_modules/hosted-git-info/package.json
index 612259948afe7..262a6c20fcf00 100644
--- a/node_modules/hosted-git-info/package.json
+++ b/node_modules/hosted-git-info/package.json
@@ -1,6 +1,6 @@
 {
   "name": "hosted-git-info",
-  "version": "6.1.1",
+  "version": "7.0.0",
   "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
   "main": "./lib/index.js",
   "repository": {
@@ -30,11 +30,11 @@
     "template-oss-apply": "template-oss-apply --force"
   },
   "dependencies": {
-    "lru-cache": "^7.5.1"
+    "lru-cache": "^10.0.1"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.7.1",
+    "@npmcli/template-oss": "4.18.0",
     "tap": "^16.0.1"
   },
   "files": [
@@ -42,7 +42,7 @@
     "lib/"
   ],
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "tap": {
     "color": 1,
@@ -54,6 +54,13 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.7.1"
+    "version": "4.18.0",
+    "publish": "true",
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ]
   }
 }
diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/LICENSE b/node_modules/normalize-package-data/node_modules/hosted-git-info/LICENSE
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/hosted-git-info/LICENSE
rename to node_modules/normalize-package-data/node_modules/hosted-git-info/LICENSE
diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/from-url.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/from-url.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/from-url.js
rename to node_modules/normalize-package-data/node_modules/hosted-git-info/lib/from-url.js
diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/hosts.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/hosts.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/hosts.js
rename to node_modules/normalize-package-data/node_modules/hosted-git-info/lib/hosts.js
diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/index.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/index.js
similarity index 98%
rename from node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/index.js
rename to node_modules/normalize-package-data/node_modules/hosted-git-info/lib/index.js
index 0c9d0b08c866b..a7339c217e9a3 100644
--- a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/index.js
+++ b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/index.js
@@ -1,11 +1,11 @@
 'use strict'
 
-const { LRUCache } = require('lru-cache')
+const LRU = require('lru-cache')
 const hosts = require('./hosts.js')
 const fromUrl = require('./from-url.js')
 const parseUrl = require('./parse-url.js')
 
-const cache = new LRUCache({ max: 1000 })
+const cache = new LRU({ max: 1000 })
 
 class GitHost {
   constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/parse-url.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/parse-url.js
rename to node_modules/normalize-package-data/node_modules/hosted-git-info/lib/parse-url.js
diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/package.json b/node_modules/normalize-package-data/node_modules/hosted-git-info/package.json
similarity index 83%
rename from node_modules/@npmcli/package-json/node_modules/hosted-git-info/package.json
rename to node_modules/normalize-package-data/node_modules/hosted-git-info/package.json
index 262a6c20fcf00..612259948afe7 100644
--- a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/package.json
+++ b/node_modules/normalize-package-data/node_modules/hosted-git-info/package.json
@@ -1,6 +1,6 @@
 {
   "name": "hosted-git-info",
-  "version": "7.0.0",
+  "version": "6.1.1",
   "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
   "main": "./lib/index.js",
   "repository": {
@@ -30,11 +30,11 @@
     "template-oss-apply": "template-oss-apply --force"
   },
   "dependencies": {
-    "lru-cache": "^10.0.1"
+    "lru-cache": "^7.5.1"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.18.0",
+    "@npmcli/template-oss": "4.7.1",
     "tap": "^16.0.1"
   },
   "files": [
@@ -42,7 +42,7 @@
     "lib/"
   ],
   "engines": {
-    "node": "^16.14.0 || >=18.0.0"
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
   },
   "tap": {
     "color": 1,
@@ -54,13 +54,6 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.18.0",
-    "publish": "true",
-    "ciVersions": [
-      "16.14.0",
-      "16.x",
-      "18.0.0",
-      "18.x"
-    ]
+    "version": "4.7.1"
   }
 }
diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE b/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE
new file mode 100644
index 0000000000000..45055763dc838
--- /dev/null
+++ b/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE
@@ -0,0 +1,13 @@
+Copyright (c) 2015, Rebecca Turner
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/from-url.js b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/from-url.js
new file mode 100644
index 0000000000000..efc1247d59d12
--- /dev/null
+++ b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/from-url.js
@@ -0,0 +1,122 @@
+'use strict'
+
+const parseUrl = require('./parse-url')
+
+// look for github shorthand inputs, such as npm/cli
+const isGitHubShorthand = (arg) => {
+  // it cannot contain whitespace before the first #
+  // it cannot start with a / because that's probably an absolute file path
+  // but it must include a slash since repos are username/repository
+  // it cannot start with a . because that's probably a relative file path
+  // it cannot start with an @ because that's a scoped package if it passes the other tests
+  // it cannot contain a : before a # because that tells us that there's a protocol
+  // a second / may not exist before a #
+  const firstHash = arg.indexOf('#')
+  const firstSlash = arg.indexOf('/')
+  const secondSlash = arg.indexOf('/', firstSlash + 1)
+  const firstColon = arg.indexOf(':')
+  const firstSpace = /\s/.exec(arg)
+  const firstAt = arg.indexOf('@')
+
+  const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash)
+  const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash)
+  const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash)
+  const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash)
+  const hasSlash = firstSlash > 0
+  // if a # is found, what we really want to know is that the character
+  // immediately before # is not a /
+  const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/')
+  const doesNotStartWithDot = !arg.startsWith('.')
+
+  return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash &&
+    doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash &&
+    secondSlashOnlyAfterHash
+}
+
+module.exports = (giturl, opts, { gitHosts, protocols }) => {
+  if (!giturl) {
+    return
+  }
+
+  const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl
+  const parsed = parseUrl(correctedUrl, protocols)
+  if (!parsed) {
+    return
+  }
+
+  const gitHostShortcut = gitHosts.byShortcut[parsed.protocol]
+  const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.')
+    ? parsed.hostname.slice(4)
+    : parsed.hostname]
+  const gitHostName = gitHostShortcut || gitHostDomain
+  if (!gitHostName) {
+    return
+  }
+
+  const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain]
+  let auth = null
+  if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) {
+    auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}`
+  }
+
+  let committish = null
+  let user = null
+  let project = null
+  let defaultRepresentation = null
+
+  try {
+    if (gitHostShortcut) {
+      let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname
+      const firstAt = pathname.indexOf('@')
+      // we ignore auth for shortcuts, so just trim it out
+      if (firstAt > -1) {
+        pathname = pathname.slice(firstAt + 1)
+      }
+
+      const lastSlash = pathname.lastIndexOf('/')
+      if (lastSlash > -1) {
+        user = decodeURIComponent(pathname.slice(0, lastSlash))
+        // we want nulls only, never empty strings
+        if (!user) {
+          user = null
+        }
+        project = decodeURIComponent(pathname.slice(lastSlash + 1))
+      } else {
+        project = decodeURIComponent(pathname)
+      }
+
+      if (project.endsWith('.git')) {
+        project = project.slice(0, -4)
+      }
+
+      if (parsed.hash) {
+        committish = decodeURIComponent(parsed.hash.slice(1))
+      }
+
+      defaultRepresentation = 'shortcut'
+    } else {
+      if (!gitHostInfo.protocols.includes(parsed.protocol)) {
+        return
+      }
+
+      const segments = gitHostInfo.extract(parsed)
+      if (!segments) {
+        return
+      }
+
+      user = segments.user && decodeURIComponent(segments.user)
+      project = decodeURIComponent(segments.project)
+      committish = decodeURIComponent(segments.committish)
+      defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1)
+    }
+  } catch (err) {
+    /* istanbul ignore else */
+    if (err instanceof URIError) {
+      return
+    } else {
+      throw err
+    }
+  }
+
+  return [gitHostName, user, auth, project, committish, defaultRepresentation, opts]
+}
diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/hosts.js b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/hosts.js
new file mode 100644
index 0000000000000..013712b7842c8
--- /dev/null
+++ b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/hosts.js
@@ -0,0 +1,228 @@
+/* eslint-disable max-len */
+
+'use strict'
+
+const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
+const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
+const formatHashFragment = (f) => f.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-')
+
+const defaults = {
+  sshtemplate: ({ domain, user, project, committish }) =>
+    `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`,
+  sshurltemplate: ({ domain, user, project, committish }) =>
+    `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  edittemplate: ({ domain, user, project, committish, editpath, path }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`,
+  browsetemplate: ({ domain, user, project, committish, treepath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`,
+  browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) =>
+    `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
+  browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) =>
+    `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
+  docstemplate: ({ domain, user, project, treepath, committish }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`,
+  httpstemplate: ({ auth, domain, user, project, committish }) =>
+    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  filetemplate: ({ domain, user, project, committish, path }) =>
+    `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`,
+  shortcuttemplate: ({ type, user, project, committish }) =>
+    `${type}:${user}/${project}${maybeJoin('#', committish)}`,
+  pathtemplate: ({ user, project, committish }) =>
+    `${user}/${project}${maybeJoin('#', committish)}`,
+  bugstemplate: ({ domain, user, project }) =>
+    `https://${domain}/${user}/${project}/issues`,
+  hashformat: formatHashFragment,
+}
+
+const hosts = {}
+hosts.github = {
+  // First two are insecure and generally shouldn't be used any more, but
+  // they are still supported.
+  protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'github.com',
+  treepath: 'tree',
+  blobpath: 'blob',
+  editpath: 'edit',
+  filetemplate: ({ auth, user, project, committish, path }) =>
+    `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`,
+  gittemplate: ({ auth, domain, user, project, committish }) =>
+    `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    let [, user, project, type, committish] = url.pathname.split('/', 5)
+    if (type && type !== 'tree') {
+      return
+    }
+
+    if (!type) {
+      committish = url.hash.slice(1)
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish }
+  },
+}
+
+hosts.bitbucket = {
+  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'bitbucket.org',
+  treepath: 'src',
+  blobpath: 'src',
+  editpath: '?mode=edit',
+  edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+    if (['get'].includes(aux)) {
+      return
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+hosts.gitlab = {
+  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'gitlab.com',
+  treepath: 'tree',
+  blobpath: 'tree',
+  editpath: '-/edit',
+  httpstemplate: ({ auth, domain, user, project, committish }) =>
+    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    const path = url.pathname.slice(1)
+    if (path.includes('/-/') || path.includes('/archive.tar.gz')) {
+      return
+    }
+
+    const segments = path.split('/')
+    let project = segments.pop()
+    if (project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    const user = segments.join('/')
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+hosts.gist = {
+  protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'gist.github.com',
+  editpath: 'edit',
+  sshtemplate: ({ domain, project, committish }) =>
+    `git@${domain}:${project}.git${maybeJoin('#', committish)}`,
+  sshurltemplate: ({ domain, project, committish }) =>
+    `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`,
+  edittemplate: ({ domain, user, project, committish, editpath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`,
+  browsetemplate: ({ domain, project, committish }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
+  browsetreetemplate: ({ domain, project, committish, path, hashformat }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
+  browseblobtemplate: ({ domain, project, committish, path, hashformat }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
+  docstemplate: ({ domain, project, committish }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
+  httpstemplate: ({ domain, project, committish }) =>
+    `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`,
+  filetemplate: ({ user, project, committish, path }) =>
+    `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`,
+  shortcuttemplate: ({ type, project, committish }) =>
+    `${type}:${project}${maybeJoin('#', committish)}`,
+  pathtemplate: ({ project, committish }) =>
+    `${project}${maybeJoin('#', committish)}`,
+  bugstemplate: ({ domain, project }) =>
+    `https://${domain}/${project}`,
+  gittemplate: ({ domain, project, committish }) =>
+    `git://${domain}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ project, committish }) =>
+    `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+    if (aux === 'raw') {
+      return
+    }
+
+    if (!project) {
+      if (!user) {
+        return
+      }
+
+      project = user
+      user = null
+    }
+
+    if (project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+  hashformat: function (fragment) {
+    return fragment && 'file-' + formatHashFragment(fragment)
+  },
+}
+
+hosts.sourcehut = {
+  protocols: ['git+ssh:', 'https:'],
+  domain: 'git.sr.ht',
+  treepath: 'tree',
+  blobpath: 'tree',
+  filetemplate: ({ domain, user, project, committish, path }) =>
+    `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`,
+  httpstemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`,
+  bugstemplate: ({ user, project }) =>
+    `https://todo.sr.ht/${user}/${project}`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+
+    // tarball url
+    if (['archive'].includes(aux)) {
+      return
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+for (const [name, host] of Object.entries(hosts)) {
+  hosts[name] = Object.assign({}, defaults, host)
+}
+
+module.exports = hosts
diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/index.js b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/index.js
new file mode 100644
index 0000000000000..a7339c217e9a3
--- /dev/null
+++ b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/index.js
@@ -0,0 +1,179 @@
+'use strict'
+
+const LRU = require('lru-cache')
+const hosts = require('./hosts.js')
+const fromUrl = require('./from-url.js')
+const parseUrl = require('./parse-url.js')
+
+const cache = new LRU({ max: 1000 })
+
+class GitHost {
+  constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
+    Object.assign(this, GitHost.#gitHosts[type], {
+      type,
+      user,
+      auth,
+      project,
+      committish,
+      default: defaultRepresentation,
+      opts,
+    })
+  }
+
+  static #gitHosts = { byShortcut: {}, byDomain: {} }
+  static #protocols = {
+    'git+ssh:': { name: 'sshurl' },
+    'ssh:': { name: 'sshurl' },
+    'git+https:': { name: 'https', auth: true },
+    'git:': { auth: true },
+    'http:': { auth: true },
+    'https:': { auth: true },
+    'git+http:': { auth: true },
+  }
+
+  static addHost (name, host) {
+    GitHost.#gitHosts[name] = host
+    GitHost.#gitHosts.byDomain[host.domain] = name
+    GitHost.#gitHosts.byShortcut[`${name}:`] = name
+    GitHost.#protocols[`${name}:`] = { name }
+  }
+
+  static fromUrl (giturl, opts) {
+    if (typeof giturl !== 'string') {
+      return
+    }
+
+    const key = giturl + JSON.stringify(opts || {})
+
+    if (!cache.has(key)) {
+      const hostArgs = fromUrl(giturl, opts, {
+        gitHosts: GitHost.#gitHosts,
+        protocols: GitHost.#protocols,
+      })
+      cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined)
+    }
+
+    return cache.get(key)
+  }
+
+  static parseUrl (url) {
+    return parseUrl(url)
+  }
+
+  #fill (template, opts) {
+    if (typeof template !== 'function') {
+      return null
+    }
+
+    const options = { ...this, ...this.opts, ...opts }
+
+    // the path should always be set so we don't end up with 'undefined' in urls
+    if (!options.path) {
+      options.path = ''
+    }
+
+    // template functions will insert the leading slash themselves
+    if (options.path.startsWith('/')) {
+      options.path = options.path.slice(1)
+    }
+
+    if (options.noCommittish) {
+      options.committish = null
+    }
+
+    const result = template(options)
+    return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result
+  }
+
+  hash () {
+    return this.committish ? `#${this.committish}` : ''
+  }
+
+  ssh (opts) {
+    return this.#fill(this.sshtemplate, opts)
+  }
+
+  sshurl (opts) {
+    return this.#fill(this.sshurltemplate, opts)
+  }
+
+  browse (path, ...args) {
+    // not a string, treat path as opts
+    if (typeof path !== 'string') {
+      return this.#fill(this.browsetemplate, path)
+    }
+
+    if (typeof args[0] !== 'string') {
+      return this.#fill(this.browsetreetemplate, { ...args[0], path })
+    }
+
+    return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path })
+  }
+
+  // If the path is known to be a file, then browseFile should be used. For some hosts
+  // the url is the same as browse, but for others like GitHub a file can use both `/tree/`
+  // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/`
+  // path will redirect to a specific commit. Using the `/blob/` path avoids this and
+  // does not redirect to a different commit.
+  browseFile (path, ...args) {
+    if (typeof args[0] !== 'string') {
+      return this.#fill(this.browseblobtemplate, { ...args[0], path })
+    }
+
+    return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path })
+  }
+
+  docs (opts) {
+    return this.#fill(this.docstemplate, opts)
+  }
+
+  bugs (opts) {
+    return this.#fill(this.bugstemplate, opts)
+  }
+
+  https (opts) {
+    return this.#fill(this.httpstemplate, opts)
+  }
+
+  git (opts) {
+    return this.#fill(this.gittemplate, opts)
+  }
+
+  shortcut (opts) {
+    return this.#fill(this.shortcuttemplate, opts)
+  }
+
+  path (opts) {
+    return this.#fill(this.pathtemplate, opts)
+  }
+
+  tarball (opts) {
+    return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false })
+  }
+
+  file (path, opts) {
+    return this.#fill(this.filetemplate, { ...opts, path })
+  }
+
+  edit (path, opts) {
+    return this.#fill(this.edittemplate, { ...opts, path })
+  }
+
+  getDefaultRepresentation () {
+    return this.default
+  }
+
+  toString (opts) {
+    if (this.default && typeof this[this.default] === 'function') {
+      return this[this.default](opts)
+    }
+
+    return this.sshurl(opts)
+  }
+}
+
+for (const [name, host] of Object.entries(hosts)) {
+  GitHost.addHost(name, host)
+}
+
+module.exports = GitHost
diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/parse-url.js
new file mode 100644
index 0000000000000..7d5489c008ab4
--- /dev/null
+++ b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/parse-url.js
@@ -0,0 +1,78 @@
+const url = require('url')
+
+const lastIndexOfBefore = (str, char, beforeChar) => {
+  const startPosition = str.indexOf(beforeChar)
+  return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity)
+}
+
+const safeUrl = (u) => {
+  try {
+    return new url.URL(u)
+  } catch {
+    // this fn should never throw
+  }
+}
+
+// accepts input like git:github.com:user/repo and inserts the // after the first :
+const correctProtocol = (arg, protocols) => {
+  const firstColon = arg.indexOf(':')
+  const proto = arg.slice(0, firstColon + 1)
+  if (Object.prototype.hasOwnProperty.call(protocols, proto)) {
+    return arg
+  }
+
+  const firstAt = arg.indexOf('@')
+  if (firstAt > -1) {
+    if (firstAt > firstColon) {
+      return `git+ssh://${arg}`
+    } else {
+      return arg
+    }
+  }
+
+  const doubleSlash = arg.indexOf('//')
+  if (doubleSlash === firstColon + 1) {
+    return arg
+  }
+
+  return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}`
+}
+
+// attempt to correct an scp style url so that it will parse with `new URL()`
+const correctUrl = (giturl) => {
+  // ignore @ that come after the first hash since the denotes the start
+  // of a committish which can contain @ characters
+  const firstAt = lastIndexOfBefore(giturl, '@', '#')
+  // ignore colons that come after the hash since that could include colons such as:
+  // git@github.com:user/package-2#semver:^1.0.0
+  const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#')
+
+  if (lastColonBeforeHash > firstAt) {
+    // the last : comes after the first @ (or there is no @)
+    // like it would in:
+    // proto://hostname.com:user/repo
+    // username@hostname.com:user/repo
+    // :password@hostname.com:user/repo
+    // username:password@hostname.com:user/repo
+    // proto://username@hostname.com:user/repo
+    // proto://:password@hostname.com:user/repo
+    // proto://username:password@hostname.com:user/repo
+    // then we replace the last : with a / to create a valid path
+    giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1)
+  }
+
+  if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) {
+    // we have no : at all
+    // as it would be in:
+    // username@hostname.com/user/repo
+    // then we prepend a protocol
+    giturl = `git+ssh://${giturl}`
+  }
+
+  return giturl
+}
+
+module.exports = (giturl, protocols) => {
+  const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl
+  return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol))
+}
diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json b/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json
new file mode 100644
index 0000000000000..612259948afe7
--- /dev/null
+++ b/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json
@@ -0,0 +1,59 @@
+{
+  "name": "hosted-git-info",
+  "version": "6.1.1",
+  "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
+  "main": "./lib/index.js",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/hosted-git-info.git"
+  },
+  "keywords": [
+    "git",
+    "github",
+    "bitbucket",
+    "gitlab"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/hosted-git-info/issues"
+  },
+  "homepage": "https://github.com/npm/hosted-git-info",
+  "scripts": {
+    "posttest": "npm run lint",
+    "snap": "tap",
+    "test": "tap",
+    "test:coverage": "tap --coverage-report=html",
+    "lint": "eslint \"**/*.js\"",
+    "postlint": "template-oss-check",
+    "lintfix": "npm run lint -- --fix",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "dependencies": {
+    "lru-cache": "^7.5.1"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.7.1",
+    "tap": "^16.0.1"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "tap": {
+    "color": 1,
+    "coverage": true,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.7.1"
+  }
+}
diff --git a/package-lock.json b/package-lock.json
index 11c73d2843e2f..46ba9a0e343cf 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -106,7 +106,7 @@
         "fs-minipass": "^3.0.3",
         "glob": "^10.3.3",
         "graceful-fs": "^4.2.11",
-        "hosted-git-info": "^6.1.1",
+        "hosted-git-info": "^7.0.0",
         "ini": "^4.1.1",
         "init-package-json": "^5.0.0",
         "is-cidr": "^4.0.2",
@@ -2548,27 +2548,6 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/@npmcli/package-json/node_modules/hosted-git-info": {
-      "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.0.tgz",
-      "integrity": "sha512-ICclEpTLhHj+zCuSb2/usoNXSVkxUSIopre+b1w8NDY9Dntp9LO4vLdHYI336TH8sAqwrRgnSfdkBG2/YpisHA==",
-      "inBundle": true,
-      "dependencies": {
-        "lru-cache": "^10.0.1"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/package-json/node_modules/lru-cache": {
-      "version": "10.0.1",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz",
-      "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==",
-      "inBundle": true,
-      "engines": {
-        "node": "14 || >=16.14"
-      }
-    },
     "node_modules/@npmcli/package-json/node_modules/normalize-package-data": {
       "version": "6.0.0",
       "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.0.tgz",
@@ -2706,6 +2685,18 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/hosted-git-info": {
+      "version": "6.1.1",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
+      "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
+      "dev": true,
+      "dependencies": {
+        "lru-cache": "^7.5.1"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@octokit/auth-token": {
       "version": "3.0.3",
       "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-3.0.3.tgz",
@@ -6510,15 +6501,24 @@
       }
     },
     "node_modules/hosted-git-info": {
-      "version": "6.1.1",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
-      "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.0.tgz",
+      "integrity": "sha512-ICclEpTLhHj+zCuSb2/usoNXSVkxUSIopre+b1w8NDY9Dntp9LO4vLdHYI336TH8sAqwrRgnSfdkBG2/YpisHA==",
       "inBundle": true,
       "dependencies": {
-        "lru-cache": "^7.5.1"
+        "lru-cache": "^10.0.1"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
+    "node_modules/hosted-git-info/node_modules/lru-cache": {
+      "version": "10.0.1",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz",
+      "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==",
+      "inBundle": true,
+      "engines": {
+        "node": "14 || >=16.14"
       }
     },
     "node_modules/html-encoding-sniffer": {
@@ -9740,6 +9740,18 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/normalize-package-data/node_modules/hosted-git-info": {
+      "version": "6.1.1",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
+      "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
+      "inBundle": true,
+      "dependencies": {
+        "lru-cache": "^7.5.1"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/normalize-path": {
       "version": "3.0.0",
       "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
@@ -9812,6 +9824,18 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/npm-package-arg/node_modules/hosted-git-info": {
+      "version": "6.1.1",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
+      "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
+      "inBundle": true,
+      "dependencies": {
+        "lru-cache": "^7.5.1"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/npm-packlist": {
       "version": "7.0.4",
       "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-7.0.4.tgz",
@@ -16054,7 +16078,7 @@
         "bin-links": "^4.0.1",
         "cacache": "^18.0.0",
         "common-ancestor-path": "^1.0.1",
-        "hosted-git-info": "^6.1.1",
+        "hosted-git-info": "^7.0.0",
         "json-parse-even-better-errors": "^3.0.0",
         "json-stringify-nice": "^1.1.4",
         "minimatch": "^9.0.0",
diff --git a/package.json b/package.json
index e74281c6f093f..c405b20e7b246 100644
--- a/package.json
+++ b/package.json
@@ -71,7 +71,7 @@
     "fs-minipass": "^3.0.3",
     "glob": "^10.3.3",
     "graceful-fs": "^4.2.11",
-    "hosted-git-info": "^6.1.1",
+    "hosted-git-info": "^7.0.0",
     "ini": "^4.1.1",
     "init-package-json": "^5.0.0",
     "is-cidr": "^4.0.2",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 0d851ed3ca9e0..39f85e857008d 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -16,7 +16,7 @@
     "bin-links": "^4.0.1",
     "cacache": "^18.0.0",
     "common-ancestor-path": "^1.0.1",
-    "hosted-git-info": "^6.1.1",
+    "hosted-git-info": "^7.0.0",
     "json-parse-even-better-errors": "^3.0.0",
     "json-stringify-nice": "^1.1.4",
     "minimatch": "^9.0.0",

From 6d1987d97ab8d9a55f51a71d87882d03ca5a0627 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Tue, 15 Aug 2023 12:11:38 -0700
Subject: [PATCH 32/68] deps: make-fetch-happen@13.0.0

---
 node_modules/.gitignore                       |   13 +-
 node_modules/@npmcli/agent/lib/dns.js         |    2 +-
 .../agent/node_modules/lru-cache/LICENSE      |   15 +
 .../node_modules/lru-cache/dist/cjs/index.js  | 1404 +++++++++++++++++
 .../lru-cache/dist/cjs/index.min.js           |    2 +
 .../lru-cache/dist/cjs/package.json           |    3 +
 .../node_modules/lru-cache/dist/mjs/index.js  | 1400 ++++++++++++++++
 .../lru-cache/dist/mjs/index.min.js           |    2 +
 .../lru-cache/dist/mjs/package.json           |    3 +
 .../agent/node_modules/lru-cache/package.json |  108 ++
 node_modules/@npmcli/agent/package.json       |   18 +-
 node_modules/make-fetch-happen/package.json   |   10 +-
 .../node_modules/@npmcli/agent/lib/dns.js     |   51 +
 .../node_modules/@npmcli/agent/lib/errors.js  |   71 +
 .../node_modules/@npmcli/agent/lib/http.js    |   33 +
 .../node_modules/@npmcli/agent/lib/https.js   |   33 +
 .../node_modules/@npmcli/agent/lib/index.js   |  135 ++
 .../@npmcli/agent/lib/proxy/http.js           |  146 ++
 .../@npmcli/agent/lib/proxy/index.js          |   25 +
 .../@npmcli/agent/lib/proxy/null.js           |   97 ++
 .../@npmcli/agent/lib/proxy/socks.js          |  153 ++
 .../node_modules/@npmcli/agent/lib/util.js    |   33 +
 .../node_modules/@npmcli/agent/package.json   |   56 +
 .../node_modules/cacache/LICENSE.md           |    0
 .../node_modules/cacache/lib/content/path.js  |    0
 .../node_modules/cacache/lib/content/read.js  |    0
 .../node_modules/cacache/lib/content/rm.js    |    0
 .../node_modules/cacache/lib/content/write.js |    0
 .../node_modules/cacache/lib/entry-index.js   |    0
 .../node_modules/cacache/lib/get.js           |    0
 .../node_modules/cacache/lib/index.js         |    0
 .../node_modules/cacache/lib/memoization.js   |    0
 .../node_modules/cacache/lib/put.js           |    0
 .../node_modules/cacache/lib/rm.js            |    0
 .../node_modules/cacache/lib/util/glob.js     |    0
 .../cacache/lib/util/hash-to-segments.js      |    0
 .../node_modules/cacache/lib/util/tmp.js      |    0
 .../node_modules/cacache/lib/verify.js        |    0
 .../node_modules/cacache/package.json         |    0
 .../node_modules/make-fetch-happen/LICENSE    |   16 +
 .../make-fetch-happen/lib/cache/entry.js      |  469 ++++++
 .../make-fetch-happen/lib/cache/errors.js     |   11 +
 .../make-fetch-happen/lib/cache/index.js      |   49 +
 .../make-fetch-happen/lib/cache/key.js        |   17 +
 .../make-fetch-happen/lib/cache/policy.js     |  161 ++
 .../make-fetch-happen/lib/fetch.js            |  118 ++
 .../make-fetch-happen/lib/index.js            |   41 +
 .../make-fetch-happen/lib/options.js          |   54 +
 .../make-fetch-happen/lib/pipeline.js         |   41 +
 .../make-fetch-happen/lib/remote.js           |  127 ++
 .../make-fetch-happen/package.json            |   80 +
 package-lock.json                             |  114 +-
 package.json                                  |    2 +-
 53 files changed, 5062 insertions(+), 51 deletions(-)
 create mode 100644 node_modules/@npmcli/agent/node_modules/lru-cache/LICENSE
 create mode 100644 node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/index.js
 create mode 100644 node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/index.min.js
 create mode 100644 node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/package.json
 create mode 100644 node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/index.js
 create mode 100644 node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/index.min.js
 create mode 100644 node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/package.json
 create mode 100644 node_modules/@npmcli/agent/node_modules/lru-cache/package.json
 create mode 100644 node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/dns.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/errors.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/http.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/https.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/index.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/http.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/index.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/null.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/socks.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/util.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/@npmcli/agent/package.json
 rename node_modules/{make-fetch-happen => npm-registry-fetch}/node_modules/cacache/LICENSE.md (100%)
 rename node_modules/{make-fetch-happen => npm-registry-fetch}/node_modules/cacache/lib/content/path.js (100%)
 rename node_modules/{make-fetch-happen => npm-registry-fetch}/node_modules/cacache/lib/content/read.js (100%)
 rename node_modules/{make-fetch-happen => npm-registry-fetch}/node_modules/cacache/lib/content/rm.js (100%)
 rename node_modules/{make-fetch-happen => npm-registry-fetch}/node_modules/cacache/lib/content/write.js (100%)
 rename node_modules/{make-fetch-happen => npm-registry-fetch}/node_modules/cacache/lib/entry-index.js (100%)
 rename node_modules/{make-fetch-happen => npm-registry-fetch}/node_modules/cacache/lib/get.js (100%)
 rename node_modules/{make-fetch-happen => npm-registry-fetch}/node_modules/cacache/lib/index.js (100%)
 rename node_modules/{make-fetch-happen => npm-registry-fetch}/node_modules/cacache/lib/memoization.js (100%)
 rename node_modules/{make-fetch-happen => npm-registry-fetch}/node_modules/cacache/lib/put.js (100%)
 rename node_modules/{make-fetch-happen => npm-registry-fetch}/node_modules/cacache/lib/rm.js (100%)
 rename node_modules/{make-fetch-happen => npm-registry-fetch}/node_modules/cacache/lib/util/glob.js (100%)
 rename node_modules/{make-fetch-happen => npm-registry-fetch}/node_modules/cacache/lib/util/hash-to-segments.js (100%)
 rename node_modules/{make-fetch-happen => npm-registry-fetch}/node_modules/cacache/lib/util/tmp.js (100%)
 rename node_modules/{make-fetch-happen => npm-registry-fetch}/node_modules/cacache/lib/verify.js (100%)
 rename node_modules/{make-fetch-happen => npm-registry-fetch}/node_modules/cacache/package.json (100%)
 create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE
 create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/entry.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/errors.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/index.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/key.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/policy.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/fetch.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/index.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/options.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/pipeline.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/remote.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 76cfc9b818e9f..2884505632a13 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -19,6 +19,9 @@
 !/@npmcli/
 /@npmcli/*
 !/@npmcli/agent
+!/@npmcli/agent/node_modules/
+/@npmcli/agent/node_modules/*
+!/@npmcli/agent/node_modules/lru-cache
 !/@npmcli/disparity-colors
 !/@npmcli/fs
 !/@npmcli/git
@@ -153,9 +156,6 @@
 !/just-diff
 !/lru-cache
 !/make-fetch-happen
-!/make-fetch-happen/node_modules/
-/make-fetch-happen/node_modules/*
-!/make-fetch-happen/node_modules/cacache
 !/minimatch
 !/minipass-collect
 !/minipass-collect/node_modules/
@@ -227,6 +227,13 @@
 !/npm-pick-manifest
 !/npm-profile
 !/npm-registry-fetch
+!/npm-registry-fetch/node_modules/
+/npm-registry-fetch/node_modules/*
+!/npm-registry-fetch/node_modules/@npmcli/
+/npm-registry-fetch/node_modules/@npmcli/*
+!/npm-registry-fetch/node_modules/@npmcli/agent
+!/npm-registry-fetch/node_modules/cacache
+!/npm-registry-fetch/node_modules/make-fetch-happen
 !/npm-user-validate
 !/npmlog
 !/once
diff --git a/node_modules/@npmcli/agent/lib/dns.js b/node_modules/@npmcli/agent/lib/dns.js
index 10dcb8d471d10..7f1a7c9a80e19 100644
--- a/node_modules/@npmcli/agent/lib/dns.js
+++ b/node_modules/@npmcli/agent/lib/dns.js
@@ -1,6 +1,6 @@
 'use strict'
 
-const LRUCache = require('lru-cache')
+const { LRUCache } = require('lru-cache')
 const dns = require('dns')
 
 const defaultOptions = exports.defaultOptions = {
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/LICENSE b/node_modules/@npmcli/agent/node_modules/lru-cache/LICENSE
new file mode 100644
index 0000000000000..f785757cd63f8
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/lru-cache/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/index.js b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/index.js
new file mode 100644
index 0000000000000..02d76ec800a92
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/index.js
@@ -0,0 +1,1404 @@
+"use strict";
+/**
+ * @module LRUCache
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.LRUCache = void 0;
+const perf = typeof performance === 'object' &&
+    performance &&
+    typeof performance.now === 'function'
+    ? performance
+    : Date;
+const warned = new Set();
+/* c8 ignore start */
+const PROCESS = (typeof process === 'object' && !!process ? process : {});
+/* c8 ignore start */
+const emitWarning = (msg, type, code, fn) => {
+    typeof PROCESS.emitWarning === 'function'
+        ? PROCESS.emitWarning(msg, type, code, fn)
+        : console.error(`[${code}] ${type}: ${msg}`);
+};
+let AC = globalThis.AbortController;
+let AS = globalThis.AbortSignal;
+/* c8 ignore start */
+if (typeof AC === 'undefined') {
+    //@ts-ignore
+    AS = class AbortSignal {
+        onabort;
+        _onabort = [];
+        reason;
+        aborted = false;
+        addEventListener(_, fn) {
+            this._onabort.push(fn);
+        }
+    };
+    //@ts-ignore
+    AC = class AbortController {
+        constructor() {
+            warnACPolyfill();
+        }
+        signal = new AS();
+        abort(reason) {
+            if (this.signal.aborted)
+                return;
+            //@ts-ignore
+            this.signal.reason = reason;
+            //@ts-ignore
+            this.signal.aborted = true;
+            //@ts-ignore
+            for (const fn of this.signal._onabort) {
+                fn(reason);
+            }
+            this.signal.onabort?.(reason);
+        }
+    };
+    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
+    const warnACPolyfill = () => {
+        if (!printACPolyfillWarning)
+            return;
+        printACPolyfillWarning = false;
+        emitWarning('AbortController is not defined. If using lru-cache in ' +
+            'node 14, load an AbortController polyfill from the ' +
+            '`node-abort-controller` package. A minimal polyfill is ' +
+            'provided for use by LRUCache.fetch(), but it should not be ' +
+            'relied upon in other contexts (eg, passing it to other APIs that ' +
+            'use AbortController/AbortSignal might have undesirable effects). ' +
+            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
+    };
+}
+/* c8 ignore stop */
+const shouldWarn = (code) => !warned.has(code);
+const TYPE = Symbol('type');
+const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
+/* c8 ignore start */
+// This is a little bit ridiculous, tbh.
+// The maximum array length is 2^32-1 or thereabouts on most JS impls.
+// And well before that point, you're caching the entire world, I mean,
+// that's ~32GB of just integers for the next/prev links, plus whatever
+// else to hold that many keys and values.  Just filling the memory with
+// zeroes at init time is brutal when you get that big.
+// But why not be complete?
+// Maybe in the future, these limits will have expanded.
+const getUintArray = (max) => !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+        ? Uint8Array
+        : max <= Math.pow(2, 16)
+            ? Uint16Array
+            : max <= Math.pow(2, 32)
+                ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER
+                    ? ZeroArray
+                    : null;
+/* c8 ignore stop */
+class ZeroArray extends Array {
+    constructor(size) {
+        super(size);
+        this.fill(0);
+    }
+}
+class Stack {
+    heap;
+    length;
+    // private constructor
+    static #constructing = false;
+    static create(max) {
+        const HeapCls = getUintArray(max);
+        if (!HeapCls)
+            return [];
+        Stack.#constructing = true;
+        const s = new Stack(max, HeapCls);
+        Stack.#constructing = false;
+        return s;
+    }
+    constructor(max, HeapCls) {
+        /* c8 ignore start */
+        if (!Stack.#constructing) {
+            throw new TypeError('instantiate Stack using Stack.create(n)');
+        }
+        /* c8 ignore stop */
+        this.heap = new HeapCls(max);
+        this.length = 0;
+    }
+    push(n) {
+        this.heap[this.length++] = n;
+    }
+    pop() {
+        return this.heap[--this.length];
+    }
+}
+/**
+ * Default export, the thing you're using this module to get.
+ *
+ * All properties from the options object (with the exception of
+ * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as
+ * normal public members. (`max` and `maxBase` are read-only getters.)
+ * Changing any of these will alter the defaults for subsequent method calls,
+ * but is otherwise safe.
+ */
+class LRUCache {
+    // properties coming in from the options of these, only max and maxSize
+    // really *need* to be protected. The rest can be modified, as they just
+    // set defaults for various methods.
+    #max;
+    #maxSize;
+    #dispose;
+    #disposeAfter;
+    #fetchMethod;
+    /**
+     * {@link LRUCache.OptionsBase.ttl}
+     */
+    ttl;
+    /**
+     * {@link LRUCache.OptionsBase.ttlResolution}
+     */
+    ttlResolution;
+    /**
+     * {@link LRUCache.OptionsBase.ttlAutopurge}
+     */
+    ttlAutopurge;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnGet}
+     */
+    updateAgeOnGet;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnHas}
+     */
+    updateAgeOnHas;
+    /**
+     * {@link LRUCache.OptionsBase.allowStale}
+     */
+    allowStale;
+    /**
+     * {@link LRUCache.OptionsBase.noDisposeOnSet}
+     */
+    noDisposeOnSet;
+    /**
+     * {@link LRUCache.OptionsBase.noUpdateTTL}
+     */
+    noUpdateTTL;
+    /**
+     * {@link LRUCache.OptionsBase.maxEntrySize}
+     */
+    maxEntrySize;
+    /**
+     * {@link LRUCache.OptionsBase.sizeCalculation}
+     */
+    sizeCalculation;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
+     */
+    noDeleteOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
+     */
+    noDeleteOnStaleGet;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
+     */
+    allowStaleOnFetchAbort;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
+     */
+    allowStaleOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
+     */
+    ignoreFetchAbort;
+    // computed properties
+    #size;
+    #calculatedSize;
+    #keyMap;
+    #keyList;
+    #valList;
+    #next;
+    #prev;
+    #head;
+    #tail;
+    #free;
+    #disposed;
+    #sizes;
+    #starts;
+    #ttls;
+    #hasDispose;
+    #hasFetchMethod;
+    #hasDisposeAfter;
+    /**
+     * Do not call this method unless you need to inspect the
+     * inner workings of the cache.  If anything returned by this
+     * object is modified in any way, strange breakage may occur.
+     *
+     * These fields are private for a reason!
+     *
+     * @internal
+     */
+    static unsafeExposeInternals(c) {
+        return {
+            // properties
+            starts: c.#starts,
+            ttls: c.#ttls,
+            sizes: c.#sizes,
+            keyMap: c.#keyMap,
+            keyList: c.#keyList,
+            valList: c.#valList,
+            next: c.#next,
+            prev: c.#prev,
+            get head() {
+                return c.#head;
+            },
+            get tail() {
+                return c.#tail;
+            },
+            free: c.#free,
+            // methods
+            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
+            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
+            moveToTail: (index) => c.#moveToTail(index),
+            indexes: (options) => c.#indexes(options),
+            rindexes: (options) => c.#rindexes(options),
+            isStale: (index) => c.#isStale(index),
+        };
+    }
+    // Protected read-only members
+    /**
+     * {@link LRUCache.OptionsBase.max} (read-only)
+     */
+    get max() {
+        return this.#max;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.maxSize} (read-only)
+     */
+    get maxSize() {
+        return this.#maxSize;
+    }
+    /**
+     * The total computed size of items in the cache (read-only)
+     */
+    get calculatedSize() {
+        return this.#calculatedSize;
+    }
+    /**
+     * The number of items stored in the cache (read-only)
+     */
+    get size() {
+        return this.#size;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
+     */
+    get fetchMethod() {
+        return this.#fetchMethod;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.dispose} (read-only)
+     */
+    get dispose() {
+        return this.#dispose;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
+     */
+    get disposeAfter() {
+        return this.#disposeAfter;
+    }
+    constructor(options) {
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
+        if (max !== 0 && !isPosInt(max)) {
+            throw new TypeError('max option must be a nonnegative integer');
+        }
+        const UintArray = max ? getUintArray(max) : Array;
+        if (!UintArray) {
+            throw new Error('invalid max value: ' + max);
+        }
+        this.#max = max;
+        this.#maxSize = maxSize;
+        this.maxEntrySize = maxEntrySize || this.#maxSize;
+        this.sizeCalculation = sizeCalculation;
+        if (this.sizeCalculation) {
+            if (!this.#maxSize && !this.maxEntrySize) {
+                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
+            }
+            if (typeof this.sizeCalculation !== 'function') {
+                throw new TypeError('sizeCalculation set to non-function');
+            }
+        }
+        if (fetchMethod !== undefined &&
+            typeof fetchMethod !== 'function') {
+            throw new TypeError('fetchMethod must be a function if specified');
+        }
+        this.#fetchMethod = fetchMethod;
+        this.#hasFetchMethod = !!fetchMethod;
+        this.#keyMap = new Map();
+        this.#keyList = new Array(max).fill(undefined);
+        this.#valList = new Array(max).fill(undefined);
+        this.#next = new UintArray(max);
+        this.#prev = new UintArray(max);
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free = Stack.create(max);
+        this.#size = 0;
+        this.#calculatedSize = 0;
+        if (typeof dispose === 'function') {
+            this.#dispose = dispose;
+        }
+        if (typeof disposeAfter === 'function') {
+            this.#disposeAfter = disposeAfter;
+            this.#disposed = [];
+        }
+        else {
+            this.#disposeAfter = undefined;
+            this.#disposed = undefined;
+        }
+        this.#hasDispose = !!this.#dispose;
+        this.#hasDisposeAfter = !!this.#disposeAfter;
+        this.noDisposeOnSet = !!noDisposeOnSet;
+        this.noUpdateTTL = !!noUpdateTTL;
+        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
+        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
+        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
+        this.ignoreFetchAbort = !!ignoreFetchAbort;
+        // NB: maxEntrySize is set to maxSize if it's set
+        if (this.maxEntrySize !== 0) {
+            if (this.#maxSize !== 0) {
+                if (!isPosInt(this.#maxSize)) {
+                    throw new TypeError('maxSize must be a positive integer if specified');
+                }
+            }
+            if (!isPosInt(this.maxEntrySize)) {
+                throw new TypeError('maxEntrySize must be a positive integer if specified');
+            }
+            this.#initializeSizeTracking();
+        }
+        this.allowStale = !!allowStale;
+        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
+        this.updateAgeOnGet = !!updateAgeOnGet;
+        this.updateAgeOnHas = !!updateAgeOnHas;
+        this.ttlResolution =
+            isPosInt(ttlResolution) || ttlResolution === 0
+                ? ttlResolution
+                : 1;
+        this.ttlAutopurge = !!ttlAutopurge;
+        this.ttl = ttl || 0;
+        if (this.ttl) {
+            if (!isPosInt(this.ttl)) {
+                throw new TypeError('ttl must be a positive integer if specified');
+            }
+            this.#initializeTTLTracking();
+        }
+        // do not allow completely unbounded caches
+        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
+            throw new TypeError('At least one of max, maxSize, or ttl is required');
+        }
+        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
+            const code = 'LRU_CACHE_UNBOUNDED';
+            if (shouldWarn(code)) {
+                warned.add(code);
+                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
+                    'result in unbounded memory consumption.';
+                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
+            }
+        }
+    }
+    /**
+     * Return the remaining TTL time for a given entry key
+     */
+    getRemainingTTL(key) {
+        return this.#keyMap.has(key) ? Infinity : 0;
+    }
+    #initializeTTLTracking() {
+        const ttls = new ZeroArray(this.#max);
+        const starts = new ZeroArray(this.#max);
+        this.#ttls = ttls;
+        this.#starts = starts;
+        this.#setItemTTL = (index, ttl, start = perf.now()) => {
+            starts[index] = ttl !== 0 ? start : 0;
+            ttls[index] = ttl;
+            if (ttl !== 0 && this.ttlAutopurge) {
+                const t = setTimeout(() => {
+                    if (this.#isStale(index)) {
+                        this.delete(this.#keyList[index]);
+                    }
+                }, ttl + 1);
+                // unref() not supported on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+        };
+        this.#updateItemAge = index => {
+            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
+        };
+        this.#statusTTL = (status, index) => {
+            if (ttls[index]) {
+                const ttl = ttls[index];
+                const start = starts[index];
+                status.ttl = ttl;
+                status.start = start;
+                status.now = cachedNow || getNow();
+                const age = status.now - start;
+                status.remainingTTL = ttl - age;
+            }
+        };
+        // debounce calls to perf.now() to 1s so we're not hitting
+        // that costly call repeatedly.
+        let cachedNow = 0;
+        const getNow = () => {
+            const n = perf.now();
+            if (this.ttlResolution > 0) {
+                cachedNow = n;
+                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
+                // not available on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+            return n;
+        };
+        this.getRemainingTTL = key => {
+            const index = this.#keyMap.get(key);
+            if (index === undefined) {
+                return 0;
+            }
+            const ttl = ttls[index];
+            const start = starts[index];
+            if (ttl === 0 || start === 0) {
+                return Infinity;
+            }
+            const age = (cachedNow || getNow()) - start;
+            return ttl - age;
+        };
+        this.#isStale = index => {
+            return (ttls[index] !== 0 &&
+                starts[index] !== 0 &&
+                (cachedNow || getNow()) - starts[index] > ttls[index]);
+        };
+    }
+    // conditionally set private methods related to TTL
+    #updateItemAge = () => { };
+    #statusTTL = () => { };
+    #setItemTTL = () => { };
+    /* c8 ignore stop */
+    #isStale = () => false;
+    #initializeSizeTracking() {
+        const sizes = new ZeroArray(this.#max);
+        this.#calculatedSize = 0;
+        this.#sizes = sizes;
+        this.#removeItemSize = index => {
+            this.#calculatedSize -= sizes[index];
+            sizes[index] = 0;
+        };
+        this.#requireSize = (k, v, size, sizeCalculation) => {
+            // provisionally accept background fetches.
+            // actual value size will be checked when they return.
+            if (this.#isBackgroundFetch(v)) {
+                return 0;
+            }
+            if (!isPosInt(size)) {
+                if (sizeCalculation) {
+                    if (typeof sizeCalculation !== 'function') {
+                        throw new TypeError('sizeCalculation must be a function');
+                    }
+                    size = sizeCalculation(v, k);
+                    if (!isPosInt(size)) {
+                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
+                    }
+                }
+                else {
+                    throw new TypeError('invalid size value (must be positive integer). ' +
+                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
+                        'or size must be set.');
+                }
+            }
+            return size;
+        };
+        this.#addItemSize = (index, size, status) => {
+            sizes[index] = size;
+            if (this.#maxSize) {
+                const maxSize = this.#maxSize - sizes[index];
+                while (this.#calculatedSize > maxSize) {
+                    this.#evict(true);
+                }
+            }
+            this.#calculatedSize += sizes[index];
+            if (status) {
+                status.entrySize = size;
+                status.totalCalculatedSize = this.#calculatedSize;
+            }
+        };
+    }
+    #removeItemSize = _i => { };
+    #addItemSize = (_i, _s, _st) => { };
+    #requireSize = (_k, _v, size, sizeCalculation) => {
+        if (size || sizeCalculation) {
+            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
+        }
+        return 0;
+    };
+    *#indexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#tail; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#head) {
+                    break;
+                }
+                else {
+                    i = this.#prev[i];
+                }
+            }
+        }
+    }
+    *#rindexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#head; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#tail) {
+                    break;
+                }
+                else {
+                    i = this.#next[i];
+                }
+            }
+        }
+    }
+    #isValidIndex(index) {
+        return (index !== undefined &&
+            this.#keyMap.get(this.#keyList[index]) === index);
+    }
+    /**
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from most recently used to least recently used.
+     */
+    *entries() {
+        for (const i of this.#indexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.entries}
+     *
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from least recently used to most recently used.
+     */
+    *rentries() {
+        for (const i of this.#rindexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the keys in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *keys() {
+        for (const i of this.#indexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.keys}
+     *
+     * Return a generator yielding the keys in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rkeys() {
+        for (const i of this.#rindexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the values in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *values() {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.values}
+     *
+     * Return a generator yielding the values in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rvalues() {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Iterating over the cache itself yields the same results as
+     * {@link LRUCache.entries}
+     */
+    [Symbol.iterator]() {
+        return this.entries();
+    }
+    /**
+     * Find a value for which the supplied fn method returns a truthy value,
+     * similar to Array.find().  fn is called as fn(value, key, cache).
+     */
+    find(fn, getOptions = {}) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
+            if (value === undefined)
+                continue;
+            if (fn(value, this.#keyList[i], this)) {
+                return this.get(this.#keyList[i], getOptions);
+            }
+        }
+    }
+    /**
+     * Call the supplied function on each item in the cache, in order from
+     * most recently used to least recently used.  fn is called as
+     * fn(value, key, cache).  Does not update age or recenty of use.
+     * Does not iterate over stale values.
+     */
+    forEach(fn, thisp = this) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * The same as {@link LRUCache.forEach} but items are iterated over in
+     * reverse order.  (ie, less recently used items are iterated over first.)
+     */
+    rforEach(fn, thisp = this) {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * Delete any stale entries. Returns true if anything was removed,
+     * false otherwise.
+     */
+    purgeStale() {
+        let deleted = false;
+        for (const i of this.#rindexes({ allowStale: true })) {
+            if (this.#isStale(i)) {
+                this.delete(this.#keyList[i]);
+                deleted = true;
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
+     * passed to cache.load()
+     */
+    dump() {
+        const arr = [];
+        for (const i of this.#indexes({ allowStale: true })) {
+            const key = this.#keyList[i];
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
+            if (value === undefined || key === undefined)
+                continue;
+            const entry = { value };
+            if (this.#ttls && this.#starts) {
+                entry.ttl = this.#ttls[i];
+                // always dump the start relative to a portable timestamp
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = perf.now() - this.#starts[i];
+                entry.start = Math.floor(Date.now() - age);
+            }
+            if (this.#sizes) {
+                entry.size = this.#sizes[i];
+            }
+            arr.unshift([key, entry]);
+        }
+        return arr;
+    }
+    /**
+     * Reset the cache and load in the items in entries in the order listed.
+     * Note that the shape of the resulting cache may be different if the
+     * same options are not used in both caches.
+     */
+    load(arr) {
+        this.clear();
+        for (const [key, entry] of arr) {
+            if (entry.start) {
+                // entry.start is a portable timestamp, but we may be using
+                // node's performance.now(), so calculate the offset, so that
+                // we get the intended remaining TTL, no matter how long it's
+                // been on ice.
+                //
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = Date.now() - entry.start;
+                entry.start = perf.now() - age;
+            }
+            this.set(key, entry.value, entry);
+        }
+    }
+    /**
+     * Add a value to the cache.
+     *
+     * Note: if `undefined` is specified as a value, this is an alias for
+     * {@link LRUCache#delete}
+     */
+    set(k, v, setOptions = {}) {
+        if (v === undefined) {
+            this.delete(k);
+            return this;
+        }
+        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
+        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
+        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
+        // if the item doesn't fit, don't do anything
+        // NB: maxEntrySize set to maxSize by default
+        if (this.maxEntrySize && size > this.maxEntrySize) {
+            if (status) {
+                status.set = 'miss';
+                status.maxEntrySizeExceeded = true;
+            }
+            // have to delete, in case something is there already.
+            this.delete(k);
+            return this;
+        }
+        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
+        if (index === undefined) {
+            // addition
+            index = (this.#size === 0
+                ? this.#tail
+                : this.#free.length !== 0
+                    ? this.#free.pop()
+                    : this.#size === this.#max
+                        ? this.#evict(false)
+                        : this.#size);
+            this.#keyList[index] = k;
+            this.#valList[index] = v;
+            this.#keyMap.set(k, index);
+            this.#next[this.#tail] = index;
+            this.#prev[index] = this.#tail;
+            this.#tail = index;
+            this.#size++;
+            this.#addItemSize(index, size, status);
+            if (status)
+                status.set = 'add';
+            noUpdateTTL = false;
+        }
+        else {
+            // update
+            this.#moveToTail(index);
+            const oldVal = this.#valList[index];
+            if (v !== oldVal) {
+                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
+                    oldVal.__abortController.abort(new Error('replaced'));
+                    const { __staleWhileFetching: s } = oldVal;
+                    if (s !== undefined && !noDisposeOnSet) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(s, k, 'set');
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([s, k, 'set']);
+                        }
+                    }
+                }
+                else if (!noDisposeOnSet) {
+                    if (this.#hasDispose) {
+                        this.#dispose?.(oldVal, k, 'set');
+                    }
+                    if (this.#hasDisposeAfter) {
+                        this.#disposed?.push([oldVal, k, 'set']);
+                    }
+                }
+                this.#removeItemSize(index);
+                this.#addItemSize(index, size, status);
+                this.#valList[index] = v;
+                if (status) {
+                    status.set = 'replace';
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
+                        ? oldVal.__staleWhileFetching
+                        : oldVal;
+                    if (oldValue !== undefined)
+                        status.oldValue = oldValue;
+                }
+            }
+            else if (status) {
+                status.set = 'update';
+            }
+        }
+        if (ttl !== 0 && !this.#ttls) {
+            this.#initializeTTLTracking();
+        }
+        if (this.#ttls) {
+            if (!noUpdateTTL) {
+                this.#setItemTTL(index, ttl, start);
+            }
+            if (status)
+                this.#statusTTL(status, index);
+        }
+        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return this;
+    }
+    /**
+     * Evict the least recently used item, returning its value or
+     * `undefined` if cache is empty.
+     */
+    pop() {
+        try {
+            while (this.#size) {
+                const val = this.#valList[this.#head];
+                this.#evict(true);
+                if (this.#isBackgroundFetch(val)) {
+                    if (val.__staleWhileFetching) {
+                        return val.__staleWhileFetching;
+                    }
+                }
+                else if (val !== undefined) {
+                    return val;
+                }
+            }
+        }
+        finally {
+            if (this.#hasDisposeAfter && this.#disposed) {
+                const dt = this.#disposed;
+                let task;
+                while ((task = dt?.shift())) {
+                    this.#disposeAfter?.(...task);
+                }
+            }
+        }
+    }
+    #evict(free) {
+        const head = this.#head;
+        const k = this.#keyList[head];
+        const v = this.#valList[head];
+        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('evicted'));
+        }
+        else if (this.#hasDispose || this.#hasDisposeAfter) {
+            if (this.#hasDispose) {
+                this.#dispose?.(v, k, 'evict');
+            }
+            if (this.#hasDisposeAfter) {
+                this.#disposed?.push([v, k, 'evict']);
+            }
+        }
+        this.#removeItemSize(head);
+        // if we aren't about to use the index, then null these out
+        if (free) {
+            this.#keyList[head] = undefined;
+            this.#valList[head] = undefined;
+            this.#free.push(head);
+        }
+        if (this.#size === 1) {
+            this.#head = this.#tail = 0;
+            this.#free.length = 0;
+        }
+        else {
+            this.#head = this.#next[head];
+        }
+        this.#keyMap.delete(k);
+        this.#size--;
+        return head;
+    }
+    /**
+     * Check if a key is in the cache, without updating the recency of use.
+     * Will return false if the item is stale, even though it is technically
+     * in the cache.
+     *
+     * Will not update item age unless
+     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
+     */
+    has(k, hasOptions = {}) {
+        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v) &&
+                v.__staleWhileFetching === undefined) {
+                return false;
+            }
+            if (!this.#isStale(index)) {
+                if (updateAgeOnHas) {
+                    this.#updateItemAge(index);
+                }
+                if (status) {
+                    status.has = 'hit';
+                    this.#statusTTL(status, index);
+                }
+                return true;
+            }
+            else if (status) {
+                status.has = 'stale';
+                this.#statusTTL(status, index);
+            }
+        }
+        else if (status) {
+            status.has = 'miss';
+        }
+        return false;
+    }
+    /**
+     * Like {@link LRUCache#get} but doesn't update recency or delete stale
+     * items.
+     *
+     * Returns `undefined` if the item is stale, unless
+     * {@link LRUCache.OptionsBase.allowStale} is set.
+     */
+    peek(k, peekOptions = {}) {
+        const { allowStale = this.allowStale } = peekOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined &&
+            (allowStale || !this.#isStale(index))) {
+            const v = this.#valList[index];
+            // either stale and allowed, or forcing a refresh of non-stale value
+            return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        }
+    }
+    #backgroundFetch(k, index, options, context) {
+        const v = index === undefined ? undefined : this.#valList[index];
+        if (this.#isBackgroundFetch(v)) {
+            return v;
+        }
+        const ac = new AC();
+        const { signal } = options;
+        // when/if our AC signals, then stop listening to theirs.
+        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
+            signal: ac.signal,
+        });
+        const fetchOpts = {
+            signal: ac.signal,
+            options,
+            context,
+        };
+        const cb = (v, updateCache = false) => {
+            const { aborted } = ac.signal;
+            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
+            if (options.status) {
+                if (aborted && !updateCache) {
+                    options.status.fetchAborted = true;
+                    options.status.fetchError = ac.signal.reason;
+                    if (ignoreAbort)
+                        options.status.fetchAbortIgnored = true;
+                }
+                else {
+                    options.status.fetchResolved = true;
+                }
+            }
+            if (aborted && !ignoreAbort && !updateCache) {
+                return fetchFail(ac.signal.reason);
+            }
+            // either we didn't abort, and are still here, or we did, and ignored
+            const bf = p;
+            if (this.#valList[index] === p) {
+                if (v === undefined) {
+                    if (bf.__staleWhileFetching) {
+                        this.#valList[index] = bf.__staleWhileFetching;
+                    }
+                    else {
+                        this.delete(k);
+                    }
+                }
+                else {
+                    if (options.status)
+                        options.status.fetchUpdated = true;
+                    this.set(k, v, fetchOpts.options);
+                }
+            }
+            return v;
+        };
+        const eb = (er) => {
+            if (options.status) {
+                options.status.fetchRejected = true;
+                options.status.fetchError = er;
+            }
+            return fetchFail(er);
+        };
+        const fetchFail = (er) => {
+            const { aborted } = ac.signal;
+            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
+            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
+            const noDelete = allowStale || options.noDeleteOnFetchRejection;
+            const bf = p;
+            if (this.#valList[index] === p) {
+                // if we allow stale on fetch rejections, then we need to ensure that
+                // the stale value is not removed from the cache when the fetch fails.
+                const del = !noDelete || bf.__staleWhileFetching === undefined;
+                if (del) {
+                    this.delete(k);
+                }
+                else if (!allowStaleAborted) {
+                    // still replace the *promise* with the stale value,
+                    // since we are done with the promise at this point.
+                    // leave it untouched if we're still waiting for an
+                    // aborted background fetch that hasn't yet returned.
+                    this.#valList[index] = bf.__staleWhileFetching;
+                }
+            }
+            if (allowStale) {
+                if (options.status && bf.__staleWhileFetching !== undefined) {
+                    options.status.returnedStale = true;
+                }
+                return bf.__staleWhileFetching;
+            }
+            else if (bf.__returned === bf) {
+                throw er;
+            }
+        };
+        const pcall = (res, rej) => {
+            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
+            if (fmp && fmp instanceof Promise) {
+                fmp.then(v => res(v === undefined ? undefined : v), rej);
+            }
+            // ignored, we go until we finish, regardless.
+            // defer check until we are actually aborting,
+            // so fetchMethod can override.
+            ac.signal.addEventListener('abort', () => {
+                if (!options.ignoreFetchAbort ||
+                    options.allowStaleOnFetchAbort) {
+                    res(undefined);
+                    // when it eventually resolves, update the cache.
+                    if (options.allowStaleOnFetchAbort) {
+                        res = v => cb(v, true);
+                    }
+                }
+            });
+        };
+        if (options.status)
+            options.status.fetchDispatched = true;
+        const p = new Promise(pcall).then(cb, eb);
+        const bf = Object.assign(p, {
+            __abortController: ac,
+            __staleWhileFetching: v,
+            __returned: undefined,
+        });
+        if (index === undefined) {
+            // internal, don't expose status.
+            this.set(k, bf, { ...fetchOpts.options, status: undefined });
+            index = this.#keyMap.get(k);
+        }
+        else {
+            this.#valList[index] = bf;
+        }
+        return bf;
+    }
+    #isBackgroundFetch(p) {
+        if (!this.#hasFetchMethod)
+            return false;
+        const b = p;
+        return (!!b &&
+            b instanceof Promise &&
+            b.hasOwnProperty('__staleWhileFetching') &&
+            b.__abortController instanceof AC);
+    }
+    async fetch(k, fetchOptions = {}) {
+        const { 
+        // get options
+        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
+        // set options
+        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
+        // fetch exclusive options
+        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
+        if (!this.#hasFetchMethod) {
+            if (status)
+                status.fetch = 'get';
+            return this.get(k, {
+                allowStale,
+                updateAgeOnGet,
+                noDeleteOnStaleGet,
+                status,
+            });
+        }
+        const options = {
+            allowStale,
+            updateAgeOnGet,
+            noDeleteOnStaleGet,
+            ttl,
+            noDisposeOnSet,
+            size,
+            sizeCalculation,
+            noUpdateTTL,
+            noDeleteOnFetchRejection,
+            allowStaleOnFetchRejection,
+            allowStaleOnFetchAbort,
+            ignoreFetchAbort,
+            status,
+            signal,
+        };
+        let index = this.#keyMap.get(k);
+        if (index === undefined) {
+            if (status)
+                status.fetch = 'miss';
+            const p = this.#backgroundFetch(k, index, options, context);
+            return (p.__returned = p);
+        }
+        else {
+            // in cache, maybe already fetching
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                const stale = allowStale && v.__staleWhileFetching !== undefined;
+                if (status) {
+                    status.fetch = 'inflight';
+                    if (stale)
+                        status.returnedStale = true;
+                }
+                return stale ? v.__staleWhileFetching : (v.__returned = v);
+            }
+            // if we force a refresh, that means do NOT serve the cached value,
+            // unless we are already in the process of refreshing the cache.
+            const isStale = this.#isStale(index);
+            if (!forceRefresh && !isStale) {
+                if (status)
+                    status.fetch = 'hit';
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                if (status)
+                    this.#statusTTL(status, index);
+                return v;
+            }
+            // ok, it is stale or a forced refresh, and not already fetching.
+            // refresh the cache.
+            const p = this.#backgroundFetch(k, index, options, context);
+            const hasStale = p.__staleWhileFetching !== undefined;
+            const staleVal = hasStale && allowStale;
+            if (status) {
+                status.fetch = isStale ? 'stale' : 'refresh';
+                if (staleVal && isStale)
+                    status.returnedStale = true;
+            }
+            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
+        }
+    }
+    /**
+     * Return a value from the cache. Will update the recency of the cache
+     * entry found.
+     *
+     * If the key is not found, get() will return `undefined`.
+     */
+    get(k, getOptions = {}) {
+        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const value = this.#valList[index];
+            const fetching = this.#isBackgroundFetch(value);
+            if (status)
+                this.#statusTTL(status, index);
+            if (this.#isStale(index)) {
+                if (status)
+                    status.get = 'stale';
+                // delete only if not an in-flight background fetch
+                if (!fetching) {
+                    if (!noDeleteOnStaleGet) {
+                        this.delete(k);
+                    }
+                    if (status && allowStale)
+                        status.returnedStale = true;
+                    return allowStale ? value : undefined;
+                }
+                else {
+                    if (status &&
+                        allowStale &&
+                        value.__staleWhileFetching !== undefined) {
+                        status.returnedStale = true;
+                    }
+                    return allowStale ? value.__staleWhileFetching : undefined;
+                }
+            }
+            else {
+                if (status)
+                    status.get = 'hit';
+                // if we're currently fetching it, we don't actually have it yet
+                // it's not stale, which means this isn't a staleWhileRefetching.
+                // If it's not stale, and fetching, AND has a __staleWhileFetching
+                // value, then that means the user fetched with {forceRefresh:true},
+                // so it's safe to return that value.
+                if (fetching) {
+                    return value.__staleWhileFetching;
+                }
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                return value;
+            }
+        }
+        else if (status) {
+            status.get = 'miss';
+        }
+    }
+    #connect(p, n) {
+        this.#prev[n] = p;
+        this.#next[p] = n;
+    }
+    #moveToTail(index) {
+        // if tail already, nothing to do
+        // if head, move head to next[index]
+        // else
+        //   move next[prev[index]] to next[index] (head has no prev)
+        //   move prev[next[index]] to prev[index]
+        // prev[index] = tail
+        // next[tail] = index
+        // tail = index
+        if (index !== this.#tail) {
+            if (index === this.#head) {
+                this.#head = this.#next[index];
+            }
+            else {
+                this.#connect(this.#prev[index], this.#next[index]);
+            }
+            this.#connect(this.#tail, index);
+            this.#tail = index;
+        }
+    }
+    /**
+     * Deletes a key out of the cache.
+     * Returns true if the key was deleted, false otherwise.
+     */
+    delete(k) {
+        let deleted = false;
+        if (this.#size !== 0) {
+            const index = this.#keyMap.get(k);
+            if (index !== undefined) {
+                deleted = true;
+                if (this.#size === 1) {
+                    this.clear();
+                }
+                else {
+                    this.#removeItemSize(index);
+                    const v = this.#valList[index];
+                    if (this.#isBackgroundFetch(v)) {
+                        v.__abortController.abort(new Error('deleted'));
+                    }
+                    else if (this.#hasDispose || this.#hasDisposeAfter) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(v, k, 'delete');
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([v, k, 'delete']);
+                        }
+                    }
+                    this.#keyMap.delete(k);
+                    this.#keyList[index] = undefined;
+                    this.#valList[index] = undefined;
+                    if (index === this.#tail) {
+                        this.#tail = this.#prev[index];
+                    }
+                    else if (index === this.#head) {
+                        this.#head = this.#next[index];
+                    }
+                    else {
+                        this.#next[this.#prev[index]] = this.#next[index];
+                        this.#prev[this.#next[index]] = this.#prev[index];
+                    }
+                    this.#size--;
+                    this.#free.push(index);
+                }
+            }
+        }
+        if (this.#hasDisposeAfter && this.#disposed?.length) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Clear the cache entirely, throwing away all values.
+     */
+    clear() {
+        for (const index of this.#rindexes({ allowStale: true })) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                v.__abortController.abort(new Error('deleted'));
+            }
+            else {
+                const k = this.#keyList[index];
+                if (this.#hasDispose) {
+                    this.#dispose?.(v, k, 'delete');
+                }
+                if (this.#hasDisposeAfter) {
+                    this.#disposed?.push([v, k, 'delete']);
+                }
+            }
+        }
+        this.#keyMap.clear();
+        this.#valList.fill(undefined);
+        this.#keyList.fill(undefined);
+        if (this.#ttls && this.#starts) {
+            this.#ttls.fill(0);
+            this.#starts.fill(0);
+        }
+        if (this.#sizes) {
+            this.#sizes.fill(0);
+        }
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free.length = 0;
+        this.#calculatedSize = 0;
+        this.#size = 0;
+        if (this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+    }
+}
+exports.LRUCache = LRUCache;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/index.min.js b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/index.min.js
new file mode 100644
index 0000000000000..8d34a03041d25
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/index.min.js
@@ -0,0 +1,2 @@
+"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=R.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#W(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};exports.LRUCache=C;
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/package.json b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/index.js b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/index.js
new file mode 100644
index 0000000000000..23b9754ad6c76
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/index.js
@@ -0,0 +1,1400 @@
+/**
+ * @module LRUCache
+ */
+const perf = typeof performance === 'object' &&
+    performance &&
+    typeof performance.now === 'function'
+    ? performance
+    : Date;
+const warned = new Set();
+/* c8 ignore start */
+const PROCESS = (typeof process === 'object' && !!process ? process : {});
+/* c8 ignore start */
+const emitWarning = (msg, type, code, fn) => {
+    typeof PROCESS.emitWarning === 'function'
+        ? PROCESS.emitWarning(msg, type, code, fn)
+        : console.error(`[${code}] ${type}: ${msg}`);
+};
+let AC = globalThis.AbortController;
+let AS = globalThis.AbortSignal;
+/* c8 ignore start */
+if (typeof AC === 'undefined') {
+    //@ts-ignore
+    AS = class AbortSignal {
+        onabort;
+        _onabort = [];
+        reason;
+        aborted = false;
+        addEventListener(_, fn) {
+            this._onabort.push(fn);
+        }
+    };
+    //@ts-ignore
+    AC = class AbortController {
+        constructor() {
+            warnACPolyfill();
+        }
+        signal = new AS();
+        abort(reason) {
+            if (this.signal.aborted)
+                return;
+            //@ts-ignore
+            this.signal.reason = reason;
+            //@ts-ignore
+            this.signal.aborted = true;
+            //@ts-ignore
+            for (const fn of this.signal._onabort) {
+                fn(reason);
+            }
+            this.signal.onabort?.(reason);
+        }
+    };
+    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
+    const warnACPolyfill = () => {
+        if (!printACPolyfillWarning)
+            return;
+        printACPolyfillWarning = false;
+        emitWarning('AbortController is not defined. If using lru-cache in ' +
+            'node 14, load an AbortController polyfill from the ' +
+            '`node-abort-controller` package. A minimal polyfill is ' +
+            'provided for use by LRUCache.fetch(), but it should not be ' +
+            'relied upon in other contexts (eg, passing it to other APIs that ' +
+            'use AbortController/AbortSignal might have undesirable effects). ' +
+            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
+    };
+}
+/* c8 ignore stop */
+const shouldWarn = (code) => !warned.has(code);
+const TYPE = Symbol('type');
+const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
+/* c8 ignore start */
+// This is a little bit ridiculous, tbh.
+// The maximum array length is 2^32-1 or thereabouts on most JS impls.
+// And well before that point, you're caching the entire world, I mean,
+// that's ~32GB of just integers for the next/prev links, plus whatever
+// else to hold that many keys and values.  Just filling the memory with
+// zeroes at init time is brutal when you get that big.
+// But why not be complete?
+// Maybe in the future, these limits will have expanded.
+const getUintArray = (max) => !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+        ? Uint8Array
+        : max <= Math.pow(2, 16)
+            ? Uint16Array
+            : max <= Math.pow(2, 32)
+                ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER
+                    ? ZeroArray
+                    : null;
+/* c8 ignore stop */
+class ZeroArray extends Array {
+    constructor(size) {
+        super(size);
+        this.fill(0);
+    }
+}
+class Stack {
+    heap;
+    length;
+    // private constructor
+    static #constructing = false;
+    static create(max) {
+        const HeapCls = getUintArray(max);
+        if (!HeapCls)
+            return [];
+        Stack.#constructing = true;
+        const s = new Stack(max, HeapCls);
+        Stack.#constructing = false;
+        return s;
+    }
+    constructor(max, HeapCls) {
+        /* c8 ignore start */
+        if (!Stack.#constructing) {
+            throw new TypeError('instantiate Stack using Stack.create(n)');
+        }
+        /* c8 ignore stop */
+        this.heap = new HeapCls(max);
+        this.length = 0;
+    }
+    push(n) {
+        this.heap[this.length++] = n;
+    }
+    pop() {
+        return this.heap[--this.length];
+    }
+}
+/**
+ * Default export, the thing you're using this module to get.
+ *
+ * All properties from the options object (with the exception of
+ * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as
+ * normal public members. (`max` and `maxBase` are read-only getters.)
+ * Changing any of these will alter the defaults for subsequent method calls,
+ * but is otherwise safe.
+ */
+export class LRUCache {
+    // properties coming in from the options of these, only max and maxSize
+    // really *need* to be protected. The rest can be modified, as they just
+    // set defaults for various methods.
+    #max;
+    #maxSize;
+    #dispose;
+    #disposeAfter;
+    #fetchMethod;
+    /**
+     * {@link LRUCache.OptionsBase.ttl}
+     */
+    ttl;
+    /**
+     * {@link LRUCache.OptionsBase.ttlResolution}
+     */
+    ttlResolution;
+    /**
+     * {@link LRUCache.OptionsBase.ttlAutopurge}
+     */
+    ttlAutopurge;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnGet}
+     */
+    updateAgeOnGet;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnHas}
+     */
+    updateAgeOnHas;
+    /**
+     * {@link LRUCache.OptionsBase.allowStale}
+     */
+    allowStale;
+    /**
+     * {@link LRUCache.OptionsBase.noDisposeOnSet}
+     */
+    noDisposeOnSet;
+    /**
+     * {@link LRUCache.OptionsBase.noUpdateTTL}
+     */
+    noUpdateTTL;
+    /**
+     * {@link LRUCache.OptionsBase.maxEntrySize}
+     */
+    maxEntrySize;
+    /**
+     * {@link LRUCache.OptionsBase.sizeCalculation}
+     */
+    sizeCalculation;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
+     */
+    noDeleteOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
+     */
+    noDeleteOnStaleGet;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
+     */
+    allowStaleOnFetchAbort;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
+     */
+    allowStaleOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
+     */
+    ignoreFetchAbort;
+    // computed properties
+    #size;
+    #calculatedSize;
+    #keyMap;
+    #keyList;
+    #valList;
+    #next;
+    #prev;
+    #head;
+    #tail;
+    #free;
+    #disposed;
+    #sizes;
+    #starts;
+    #ttls;
+    #hasDispose;
+    #hasFetchMethod;
+    #hasDisposeAfter;
+    /**
+     * Do not call this method unless you need to inspect the
+     * inner workings of the cache.  If anything returned by this
+     * object is modified in any way, strange breakage may occur.
+     *
+     * These fields are private for a reason!
+     *
+     * @internal
+     */
+    static unsafeExposeInternals(c) {
+        return {
+            // properties
+            starts: c.#starts,
+            ttls: c.#ttls,
+            sizes: c.#sizes,
+            keyMap: c.#keyMap,
+            keyList: c.#keyList,
+            valList: c.#valList,
+            next: c.#next,
+            prev: c.#prev,
+            get head() {
+                return c.#head;
+            },
+            get tail() {
+                return c.#tail;
+            },
+            free: c.#free,
+            // methods
+            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
+            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
+            moveToTail: (index) => c.#moveToTail(index),
+            indexes: (options) => c.#indexes(options),
+            rindexes: (options) => c.#rindexes(options),
+            isStale: (index) => c.#isStale(index),
+        };
+    }
+    // Protected read-only members
+    /**
+     * {@link LRUCache.OptionsBase.max} (read-only)
+     */
+    get max() {
+        return this.#max;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.maxSize} (read-only)
+     */
+    get maxSize() {
+        return this.#maxSize;
+    }
+    /**
+     * The total computed size of items in the cache (read-only)
+     */
+    get calculatedSize() {
+        return this.#calculatedSize;
+    }
+    /**
+     * The number of items stored in the cache (read-only)
+     */
+    get size() {
+        return this.#size;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
+     */
+    get fetchMethod() {
+        return this.#fetchMethod;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.dispose} (read-only)
+     */
+    get dispose() {
+        return this.#dispose;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
+     */
+    get disposeAfter() {
+        return this.#disposeAfter;
+    }
+    constructor(options) {
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
+        if (max !== 0 && !isPosInt(max)) {
+            throw new TypeError('max option must be a nonnegative integer');
+        }
+        const UintArray = max ? getUintArray(max) : Array;
+        if (!UintArray) {
+            throw new Error('invalid max value: ' + max);
+        }
+        this.#max = max;
+        this.#maxSize = maxSize;
+        this.maxEntrySize = maxEntrySize || this.#maxSize;
+        this.sizeCalculation = sizeCalculation;
+        if (this.sizeCalculation) {
+            if (!this.#maxSize && !this.maxEntrySize) {
+                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
+            }
+            if (typeof this.sizeCalculation !== 'function') {
+                throw new TypeError('sizeCalculation set to non-function');
+            }
+        }
+        if (fetchMethod !== undefined &&
+            typeof fetchMethod !== 'function') {
+            throw new TypeError('fetchMethod must be a function if specified');
+        }
+        this.#fetchMethod = fetchMethod;
+        this.#hasFetchMethod = !!fetchMethod;
+        this.#keyMap = new Map();
+        this.#keyList = new Array(max).fill(undefined);
+        this.#valList = new Array(max).fill(undefined);
+        this.#next = new UintArray(max);
+        this.#prev = new UintArray(max);
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free = Stack.create(max);
+        this.#size = 0;
+        this.#calculatedSize = 0;
+        if (typeof dispose === 'function') {
+            this.#dispose = dispose;
+        }
+        if (typeof disposeAfter === 'function') {
+            this.#disposeAfter = disposeAfter;
+            this.#disposed = [];
+        }
+        else {
+            this.#disposeAfter = undefined;
+            this.#disposed = undefined;
+        }
+        this.#hasDispose = !!this.#dispose;
+        this.#hasDisposeAfter = !!this.#disposeAfter;
+        this.noDisposeOnSet = !!noDisposeOnSet;
+        this.noUpdateTTL = !!noUpdateTTL;
+        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
+        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
+        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
+        this.ignoreFetchAbort = !!ignoreFetchAbort;
+        // NB: maxEntrySize is set to maxSize if it's set
+        if (this.maxEntrySize !== 0) {
+            if (this.#maxSize !== 0) {
+                if (!isPosInt(this.#maxSize)) {
+                    throw new TypeError('maxSize must be a positive integer if specified');
+                }
+            }
+            if (!isPosInt(this.maxEntrySize)) {
+                throw new TypeError('maxEntrySize must be a positive integer if specified');
+            }
+            this.#initializeSizeTracking();
+        }
+        this.allowStale = !!allowStale;
+        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
+        this.updateAgeOnGet = !!updateAgeOnGet;
+        this.updateAgeOnHas = !!updateAgeOnHas;
+        this.ttlResolution =
+            isPosInt(ttlResolution) || ttlResolution === 0
+                ? ttlResolution
+                : 1;
+        this.ttlAutopurge = !!ttlAutopurge;
+        this.ttl = ttl || 0;
+        if (this.ttl) {
+            if (!isPosInt(this.ttl)) {
+                throw new TypeError('ttl must be a positive integer if specified');
+            }
+            this.#initializeTTLTracking();
+        }
+        // do not allow completely unbounded caches
+        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
+            throw new TypeError('At least one of max, maxSize, or ttl is required');
+        }
+        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
+            const code = 'LRU_CACHE_UNBOUNDED';
+            if (shouldWarn(code)) {
+                warned.add(code);
+                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
+                    'result in unbounded memory consumption.';
+                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
+            }
+        }
+    }
+    /**
+     * Return the remaining TTL time for a given entry key
+     */
+    getRemainingTTL(key) {
+        return this.#keyMap.has(key) ? Infinity : 0;
+    }
+    #initializeTTLTracking() {
+        const ttls = new ZeroArray(this.#max);
+        const starts = new ZeroArray(this.#max);
+        this.#ttls = ttls;
+        this.#starts = starts;
+        this.#setItemTTL = (index, ttl, start = perf.now()) => {
+            starts[index] = ttl !== 0 ? start : 0;
+            ttls[index] = ttl;
+            if (ttl !== 0 && this.ttlAutopurge) {
+                const t = setTimeout(() => {
+                    if (this.#isStale(index)) {
+                        this.delete(this.#keyList[index]);
+                    }
+                }, ttl + 1);
+                // unref() not supported on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+        };
+        this.#updateItemAge = index => {
+            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
+        };
+        this.#statusTTL = (status, index) => {
+            if (ttls[index]) {
+                const ttl = ttls[index];
+                const start = starts[index];
+                status.ttl = ttl;
+                status.start = start;
+                status.now = cachedNow || getNow();
+                const age = status.now - start;
+                status.remainingTTL = ttl - age;
+            }
+        };
+        // debounce calls to perf.now() to 1s so we're not hitting
+        // that costly call repeatedly.
+        let cachedNow = 0;
+        const getNow = () => {
+            const n = perf.now();
+            if (this.ttlResolution > 0) {
+                cachedNow = n;
+                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
+                // not available on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+            return n;
+        };
+        this.getRemainingTTL = key => {
+            const index = this.#keyMap.get(key);
+            if (index === undefined) {
+                return 0;
+            }
+            const ttl = ttls[index];
+            const start = starts[index];
+            if (ttl === 0 || start === 0) {
+                return Infinity;
+            }
+            const age = (cachedNow || getNow()) - start;
+            return ttl - age;
+        };
+        this.#isStale = index => {
+            return (ttls[index] !== 0 &&
+                starts[index] !== 0 &&
+                (cachedNow || getNow()) - starts[index] > ttls[index]);
+        };
+    }
+    // conditionally set private methods related to TTL
+    #updateItemAge = () => { };
+    #statusTTL = () => { };
+    #setItemTTL = () => { };
+    /* c8 ignore stop */
+    #isStale = () => false;
+    #initializeSizeTracking() {
+        const sizes = new ZeroArray(this.#max);
+        this.#calculatedSize = 0;
+        this.#sizes = sizes;
+        this.#removeItemSize = index => {
+            this.#calculatedSize -= sizes[index];
+            sizes[index] = 0;
+        };
+        this.#requireSize = (k, v, size, sizeCalculation) => {
+            // provisionally accept background fetches.
+            // actual value size will be checked when they return.
+            if (this.#isBackgroundFetch(v)) {
+                return 0;
+            }
+            if (!isPosInt(size)) {
+                if (sizeCalculation) {
+                    if (typeof sizeCalculation !== 'function') {
+                        throw new TypeError('sizeCalculation must be a function');
+                    }
+                    size = sizeCalculation(v, k);
+                    if (!isPosInt(size)) {
+                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
+                    }
+                }
+                else {
+                    throw new TypeError('invalid size value (must be positive integer). ' +
+                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
+                        'or size must be set.');
+                }
+            }
+            return size;
+        };
+        this.#addItemSize = (index, size, status) => {
+            sizes[index] = size;
+            if (this.#maxSize) {
+                const maxSize = this.#maxSize - sizes[index];
+                while (this.#calculatedSize > maxSize) {
+                    this.#evict(true);
+                }
+            }
+            this.#calculatedSize += sizes[index];
+            if (status) {
+                status.entrySize = size;
+                status.totalCalculatedSize = this.#calculatedSize;
+            }
+        };
+    }
+    #removeItemSize = _i => { };
+    #addItemSize = (_i, _s, _st) => { };
+    #requireSize = (_k, _v, size, sizeCalculation) => {
+        if (size || sizeCalculation) {
+            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
+        }
+        return 0;
+    };
+    *#indexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#tail; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#head) {
+                    break;
+                }
+                else {
+                    i = this.#prev[i];
+                }
+            }
+        }
+    }
+    *#rindexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#head; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#tail) {
+                    break;
+                }
+                else {
+                    i = this.#next[i];
+                }
+            }
+        }
+    }
+    #isValidIndex(index) {
+        return (index !== undefined &&
+            this.#keyMap.get(this.#keyList[index]) === index);
+    }
+    /**
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from most recently used to least recently used.
+     */
+    *entries() {
+        for (const i of this.#indexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.entries}
+     *
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from least recently used to most recently used.
+     */
+    *rentries() {
+        for (const i of this.#rindexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the keys in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *keys() {
+        for (const i of this.#indexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.keys}
+     *
+     * Return a generator yielding the keys in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rkeys() {
+        for (const i of this.#rindexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the values in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *values() {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.values}
+     *
+     * Return a generator yielding the values in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rvalues() {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Iterating over the cache itself yields the same results as
+     * {@link LRUCache.entries}
+     */
+    [Symbol.iterator]() {
+        return this.entries();
+    }
+    /**
+     * Find a value for which the supplied fn method returns a truthy value,
+     * similar to Array.find().  fn is called as fn(value, key, cache).
+     */
+    find(fn, getOptions = {}) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
+            if (value === undefined)
+                continue;
+            if (fn(value, this.#keyList[i], this)) {
+                return this.get(this.#keyList[i], getOptions);
+            }
+        }
+    }
+    /**
+     * Call the supplied function on each item in the cache, in order from
+     * most recently used to least recently used.  fn is called as
+     * fn(value, key, cache).  Does not update age or recenty of use.
+     * Does not iterate over stale values.
+     */
+    forEach(fn, thisp = this) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * The same as {@link LRUCache.forEach} but items are iterated over in
+     * reverse order.  (ie, less recently used items are iterated over first.)
+     */
+    rforEach(fn, thisp = this) {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * Delete any stale entries. Returns true if anything was removed,
+     * false otherwise.
+     */
+    purgeStale() {
+        let deleted = false;
+        for (const i of this.#rindexes({ allowStale: true })) {
+            if (this.#isStale(i)) {
+                this.delete(this.#keyList[i]);
+                deleted = true;
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
+     * passed to cache.load()
+     */
+    dump() {
+        const arr = [];
+        for (const i of this.#indexes({ allowStale: true })) {
+            const key = this.#keyList[i];
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
+            if (value === undefined || key === undefined)
+                continue;
+            const entry = { value };
+            if (this.#ttls && this.#starts) {
+                entry.ttl = this.#ttls[i];
+                // always dump the start relative to a portable timestamp
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = perf.now() - this.#starts[i];
+                entry.start = Math.floor(Date.now() - age);
+            }
+            if (this.#sizes) {
+                entry.size = this.#sizes[i];
+            }
+            arr.unshift([key, entry]);
+        }
+        return arr;
+    }
+    /**
+     * Reset the cache and load in the items in entries in the order listed.
+     * Note that the shape of the resulting cache may be different if the
+     * same options are not used in both caches.
+     */
+    load(arr) {
+        this.clear();
+        for (const [key, entry] of arr) {
+            if (entry.start) {
+                // entry.start is a portable timestamp, but we may be using
+                // node's performance.now(), so calculate the offset, so that
+                // we get the intended remaining TTL, no matter how long it's
+                // been on ice.
+                //
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = Date.now() - entry.start;
+                entry.start = perf.now() - age;
+            }
+            this.set(key, entry.value, entry);
+        }
+    }
+    /**
+     * Add a value to the cache.
+     *
+     * Note: if `undefined` is specified as a value, this is an alias for
+     * {@link LRUCache#delete}
+     */
+    set(k, v, setOptions = {}) {
+        if (v === undefined) {
+            this.delete(k);
+            return this;
+        }
+        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
+        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
+        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
+        // if the item doesn't fit, don't do anything
+        // NB: maxEntrySize set to maxSize by default
+        if (this.maxEntrySize && size > this.maxEntrySize) {
+            if (status) {
+                status.set = 'miss';
+                status.maxEntrySizeExceeded = true;
+            }
+            // have to delete, in case something is there already.
+            this.delete(k);
+            return this;
+        }
+        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
+        if (index === undefined) {
+            // addition
+            index = (this.#size === 0
+                ? this.#tail
+                : this.#free.length !== 0
+                    ? this.#free.pop()
+                    : this.#size === this.#max
+                        ? this.#evict(false)
+                        : this.#size);
+            this.#keyList[index] = k;
+            this.#valList[index] = v;
+            this.#keyMap.set(k, index);
+            this.#next[this.#tail] = index;
+            this.#prev[index] = this.#tail;
+            this.#tail = index;
+            this.#size++;
+            this.#addItemSize(index, size, status);
+            if (status)
+                status.set = 'add';
+            noUpdateTTL = false;
+        }
+        else {
+            // update
+            this.#moveToTail(index);
+            const oldVal = this.#valList[index];
+            if (v !== oldVal) {
+                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
+                    oldVal.__abortController.abort(new Error('replaced'));
+                    const { __staleWhileFetching: s } = oldVal;
+                    if (s !== undefined && !noDisposeOnSet) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(s, k, 'set');
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([s, k, 'set']);
+                        }
+                    }
+                }
+                else if (!noDisposeOnSet) {
+                    if (this.#hasDispose) {
+                        this.#dispose?.(oldVal, k, 'set');
+                    }
+                    if (this.#hasDisposeAfter) {
+                        this.#disposed?.push([oldVal, k, 'set']);
+                    }
+                }
+                this.#removeItemSize(index);
+                this.#addItemSize(index, size, status);
+                this.#valList[index] = v;
+                if (status) {
+                    status.set = 'replace';
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
+                        ? oldVal.__staleWhileFetching
+                        : oldVal;
+                    if (oldValue !== undefined)
+                        status.oldValue = oldValue;
+                }
+            }
+            else if (status) {
+                status.set = 'update';
+            }
+        }
+        if (ttl !== 0 && !this.#ttls) {
+            this.#initializeTTLTracking();
+        }
+        if (this.#ttls) {
+            if (!noUpdateTTL) {
+                this.#setItemTTL(index, ttl, start);
+            }
+            if (status)
+                this.#statusTTL(status, index);
+        }
+        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return this;
+    }
+    /**
+     * Evict the least recently used item, returning its value or
+     * `undefined` if cache is empty.
+     */
+    pop() {
+        try {
+            while (this.#size) {
+                const val = this.#valList[this.#head];
+                this.#evict(true);
+                if (this.#isBackgroundFetch(val)) {
+                    if (val.__staleWhileFetching) {
+                        return val.__staleWhileFetching;
+                    }
+                }
+                else if (val !== undefined) {
+                    return val;
+                }
+            }
+        }
+        finally {
+            if (this.#hasDisposeAfter && this.#disposed) {
+                const dt = this.#disposed;
+                let task;
+                while ((task = dt?.shift())) {
+                    this.#disposeAfter?.(...task);
+                }
+            }
+        }
+    }
+    #evict(free) {
+        const head = this.#head;
+        const k = this.#keyList[head];
+        const v = this.#valList[head];
+        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('evicted'));
+        }
+        else if (this.#hasDispose || this.#hasDisposeAfter) {
+            if (this.#hasDispose) {
+                this.#dispose?.(v, k, 'evict');
+            }
+            if (this.#hasDisposeAfter) {
+                this.#disposed?.push([v, k, 'evict']);
+            }
+        }
+        this.#removeItemSize(head);
+        // if we aren't about to use the index, then null these out
+        if (free) {
+            this.#keyList[head] = undefined;
+            this.#valList[head] = undefined;
+            this.#free.push(head);
+        }
+        if (this.#size === 1) {
+            this.#head = this.#tail = 0;
+            this.#free.length = 0;
+        }
+        else {
+            this.#head = this.#next[head];
+        }
+        this.#keyMap.delete(k);
+        this.#size--;
+        return head;
+    }
+    /**
+     * Check if a key is in the cache, without updating the recency of use.
+     * Will return false if the item is stale, even though it is technically
+     * in the cache.
+     *
+     * Will not update item age unless
+     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
+     */
+    has(k, hasOptions = {}) {
+        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v) &&
+                v.__staleWhileFetching === undefined) {
+                return false;
+            }
+            if (!this.#isStale(index)) {
+                if (updateAgeOnHas) {
+                    this.#updateItemAge(index);
+                }
+                if (status) {
+                    status.has = 'hit';
+                    this.#statusTTL(status, index);
+                }
+                return true;
+            }
+            else if (status) {
+                status.has = 'stale';
+                this.#statusTTL(status, index);
+            }
+        }
+        else if (status) {
+            status.has = 'miss';
+        }
+        return false;
+    }
+    /**
+     * Like {@link LRUCache#get} but doesn't update recency or delete stale
+     * items.
+     *
+     * Returns `undefined` if the item is stale, unless
+     * {@link LRUCache.OptionsBase.allowStale} is set.
+     */
+    peek(k, peekOptions = {}) {
+        const { allowStale = this.allowStale } = peekOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined &&
+            (allowStale || !this.#isStale(index))) {
+            const v = this.#valList[index];
+            // either stale and allowed, or forcing a refresh of non-stale value
+            return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        }
+    }
+    #backgroundFetch(k, index, options, context) {
+        const v = index === undefined ? undefined : this.#valList[index];
+        if (this.#isBackgroundFetch(v)) {
+            return v;
+        }
+        const ac = new AC();
+        const { signal } = options;
+        // when/if our AC signals, then stop listening to theirs.
+        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
+            signal: ac.signal,
+        });
+        const fetchOpts = {
+            signal: ac.signal,
+            options,
+            context,
+        };
+        const cb = (v, updateCache = false) => {
+            const { aborted } = ac.signal;
+            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
+            if (options.status) {
+                if (aborted && !updateCache) {
+                    options.status.fetchAborted = true;
+                    options.status.fetchError = ac.signal.reason;
+                    if (ignoreAbort)
+                        options.status.fetchAbortIgnored = true;
+                }
+                else {
+                    options.status.fetchResolved = true;
+                }
+            }
+            if (aborted && !ignoreAbort && !updateCache) {
+                return fetchFail(ac.signal.reason);
+            }
+            // either we didn't abort, and are still here, or we did, and ignored
+            const bf = p;
+            if (this.#valList[index] === p) {
+                if (v === undefined) {
+                    if (bf.__staleWhileFetching) {
+                        this.#valList[index] = bf.__staleWhileFetching;
+                    }
+                    else {
+                        this.delete(k);
+                    }
+                }
+                else {
+                    if (options.status)
+                        options.status.fetchUpdated = true;
+                    this.set(k, v, fetchOpts.options);
+                }
+            }
+            return v;
+        };
+        const eb = (er) => {
+            if (options.status) {
+                options.status.fetchRejected = true;
+                options.status.fetchError = er;
+            }
+            return fetchFail(er);
+        };
+        const fetchFail = (er) => {
+            const { aborted } = ac.signal;
+            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
+            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
+            const noDelete = allowStale || options.noDeleteOnFetchRejection;
+            const bf = p;
+            if (this.#valList[index] === p) {
+                // if we allow stale on fetch rejections, then we need to ensure that
+                // the stale value is not removed from the cache when the fetch fails.
+                const del = !noDelete || bf.__staleWhileFetching === undefined;
+                if (del) {
+                    this.delete(k);
+                }
+                else if (!allowStaleAborted) {
+                    // still replace the *promise* with the stale value,
+                    // since we are done with the promise at this point.
+                    // leave it untouched if we're still waiting for an
+                    // aborted background fetch that hasn't yet returned.
+                    this.#valList[index] = bf.__staleWhileFetching;
+                }
+            }
+            if (allowStale) {
+                if (options.status && bf.__staleWhileFetching !== undefined) {
+                    options.status.returnedStale = true;
+                }
+                return bf.__staleWhileFetching;
+            }
+            else if (bf.__returned === bf) {
+                throw er;
+            }
+        };
+        const pcall = (res, rej) => {
+            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
+            if (fmp && fmp instanceof Promise) {
+                fmp.then(v => res(v === undefined ? undefined : v), rej);
+            }
+            // ignored, we go until we finish, regardless.
+            // defer check until we are actually aborting,
+            // so fetchMethod can override.
+            ac.signal.addEventListener('abort', () => {
+                if (!options.ignoreFetchAbort ||
+                    options.allowStaleOnFetchAbort) {
+                    res(undefined);
+                    // when it eventually resolves, update the cache.
+                    if (options.allowStaleOnFetchAbort) {
+                        res = v => cb(v, true);
+                    }
+                }
+            });
+        };
+        if (options.status)
+            options.status.fetchDispatched = true;
+        const p = new Promise(pcall).then(cb, eb);
+        const bf = Object.assign(p, {
+            __abortController: ac,
+            __staleWhileFetching: v,
+            __returned: undefined,
+        });
+        if (index === undefined) {
+            // internal, don't expose status.
+            this.set(k, bf, { ...fetchOpts.options, status: undefined });
+            index = this.#keyMap.get(k);
+        }
+        else {
+            this.#valList[index] = bf;
+        }
+        return bf;
+    }
+    #isBackgroundFetch(p) {
+        if (!this.#hasFetchMethod)
+            return false;
+        const b = p;
+        return (!!b &&
+            b instanceof Promise &&
+            b.hasOwnProperty('__staleWhileFetching') &&
+            b.__abortController instanceof AC);
+    }
+    async fetch(k, fetchOptions = {}) {
+        const { 
+        // get options
+        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
+        // set options
+        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
+        // fetch exclusive options
+        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
+        if (!this.#hasFetchMethod) {
+            if (status)
+                status.fetch = 'get';
+            return this.get(k, {
+                allowStale,
+                updateAgeOnGet,
+                noDeleteOnStaleGet,
+                status,
+            });
+        }
+        const options = {
+            allowStale,
+            updateAgeOnGet,
+            noDeleteOnStaleGet,
+            ttl,
+            noDisposeOnSet,
+            size,
+            sizeCalculation,
+            noUpdateTTL,
+            noDeleteOnFetchRejection,
+            allowStaleOnFetchRejection,
+            allowStaleOnFetchAbort,
+            ignoreFetchAbort,
+            status,
+            signal,
+        };
+        let index = this.#keyMap.get(k);
+        if (index === undefined) {
+            if (status)
+                status.fetch = 'miss';
+            const p = this.#backgroundFetch(k, index, options, context);
+            return (p.__returned = p);
+        }
+        else {
+            // in cache, maybe already fetching
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                const stale = allowStale && v.__staleWhileFetching !== undefined;
+                if (status) {
+                    status.fetch = 'inflight';
+                    if (stale)
+                        status.returnedStale = true;
+                }
+                return stale ? v.__staleWhileFetching : (v.__returned = v);
+            }
+            // if we force a refresh, that means do NOT serve the cached value,
+            // unless we are already in the process of refreshing the cache.
+            const isStale = this.#isStale(index);
+            if (!forceRefresh && !isStale) {
+                if (status)
+                    status.fetch = 'hit';
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                if (status)
+                    this.#statusTTL(status, index);
+                return v;
+            }
+            // ok, it is stale or a forced refresh, and not already fetching.
+            // refresh the cache.
+            const p = this.#backgroundFetch(k, index, options, context);
+            const hasStale = p.__staleWhileFetching !== undefined;
+            const staleVal = hasStale && allowStale;
+            if (status) {
+                status.fetch = isStale ? 'stale' : 'refresh';
+                if (staleVal && isStale)
+                    status.returnedStale = true;
+            }
+            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
+        }
+    }
+    /**
+     * Return a value from the cache. Will update the recency of the cache
+     * entry found.
+     *
+     * If the key is not found, get() will return `undefined`.
+     */
+    get(k, getOptions = {}) {
+        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const value = this.#valList[index];
+            const fetching = this.#isBackgroundFetch(value);
+            if (status)
+                this.#statusTTL(status, index);
+            if (this.#isStale(index)) {
+                if (status)
+                    status.get = 'stale';
+                // delete only if not an in-flight background fetch
+                if (!fetching) {
+                    if (!noDeleteOnStaleGet) {
+                        this.delete(k);
+                    }
+                    if (status && allowStale)
+                        status.returnedStale = true;
+                    return allowStale ? value : undefined;
+                }
+                else {
+                    if (status &&
+                        allowStale &&
+                        value.__staleWhileFetching !== undefined) {
+                        status.returnedStale = true;
+                    }
+                    return allowStale ? value.__staleWhileFetching : undefined;
+                }
+            }
+            else {
+                if (status)
+                    status.get = 'hit';
+                // if we're currently fetching it, we don't actually have it yet
+                // it's not stale, which means this isn't a staleWhileRefetching.
+                // If it's not stale, and fetching, AND has a __staleWhileFetching
+                // value, then that means the user fetched with {forceRefresh:true},
+                // so it's safe to return that value.
+                if (fetching) {
+                    return value.__staleWhileFetching;
+                }
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                return value;
+            }
+        }
+        else if (status) {
+            status.get = 'miss';
+        }
+    }
+    #connect(p, n) {
+        this.#prev[n] = p;
+        this.#next[p] = n;
+    }
+    #moveToTail(index) {
+        // if tail already, nothing to do
+        // if head, move head to next[index]
+        // else
+        //   move next[prev[index]] to next[index] (head has no prev)
+        //   move prev[next[index]] to prev[index]
+        // prev[index] = tail
+        // next[tail] = index
+        // tail = index
+        if (index !== this.#tail) {
+            if (index === this.#head) {
+                this.#head = this.#next[index];
+            }
+            else {
+                this.#connect(this.#prev[index], this.#next[index]);
+            }
+            this.#connect(this.#tail, index);
+            this.#tail = index;
+        }
+    }
+    /**
+     * Deletes a key out of the cache.
+     * Returns true if the key was deleted, false otherwise.
+     */
+    delete(k) {
+        let deleted = false;
+        if (this.#size !== 0) {
+            const index = this.#keyMap.get(k);
+            if (index !== undefined) {
+                deleted = true;
+                if (this.#size === 1) {
+                    this.clear();
+                }
+                else {
+                    this.#removeItemSize(index);
+                    const v = this.#valList[index];
+                    if (this.#isBackgroundFetch(v)) {
+                        v.__abortController.abort(new Error('deleted'));
+                    }
+                    else if (this.#hasDispose || this.#hasDisposeAfter) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(v, k, 'delete');
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([v, k, 'delete']);
+                        }
+                    }
+                    this.#keyMap.delete(k);
+                    this.#keyList[index] = undefined;
+                    this.#valList[index] = undefined;
+                    if (index === this.#tail) {
+                        this.#tail = this.#prev[index];
+                    }
+                    else if (index === this.#head) {
+                        this.#head = this.#next[index];
+                    }
+                    else {
+                        this.#next[this.#prev[index]] = this.#next[index];
+                        this.#prev[this.#next[index]] = this.#prev[index];
+                    }
+                    this.#size--;
+                    this.#free.push(index);
+                }
+            }
+        }
+        if (this.#hasDisposeAfter && this.#disposed?.length) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Clear the cache entirely, throwing away all values.
+     */
+    clear() {
+        for (const index of this.#rindexes({ allowStale: true })) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                v.__abortController.abort(new Error('deleted'));
+            }
+            else {
+                const k = this.#keyList[index];
+                if (this.#hasDispose) {
+                    this.#dispose?.(v, k, 'delete');
+                }
+                if (this.#hasDisposeAfter) {
+                    this.#disposed?.push([v, k, 'delete']);
+                }
+            }
+        }
+        this.#keyMap.clear();
+        this.#valList.fill(undefined);
+        this.#keyList.fill(undefined);
+        if (this.#ttls && this.#starts) {
+            this.#ttls.fill(0);
+            this.#starts.fill(0);
+        }
+        if (this.#sizes) {
+            this.#sizes.fill(0);
+        }
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free.length = 0;
+        this.#calculatedSize = 0;
+        this.#size = 0;
+        if (this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/index.min.js b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/index.min.js
new file mode 100644
index 0000000000000..5a16b3940d6df
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/index.min.js
@@ -0,0 +1,2 @@
+var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var R=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=C.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,R))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#W=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#R(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#W=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#R(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#W(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#W(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#R(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#R(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};export{R as LRUCache};
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/package.json b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/package.json b/node_modules/@npmcli/agent/node_modules/lru-cache/package.json
new file mode 100644
index 0000000000000..bae4a04839d1f
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/lru-cache/package.json
@@ -0,0 +1,108 @@
+{
+  "name": "lru-cache",
+  "description": "A cache object that deletes the least-recently-used items.",
+  "version": "10.0.1",
+  "author": "Isaac Z. Schlueter ",
+  "keywords": [
+    "mru",
+    "lru",
+    "cache"
+  ],
+  "sideEffects": false,
+  "scripts": {
+    "build": "npm run prepare",
+    "preprepare": "rm -rf dist",
+    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
+    "postprepare": "bash fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "c8 tap",
+    "snap": "c8 tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write .",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts",
+    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
+    "prebenchmark": "npm run prepare",
+    "benchmark": "make -C benchmark",
+    "preprofile": "npm run prepare",
+    "profile": "make -C benchmark profile"
+  },
+  "main": "./dist/cjs/index.js",
+  "module": "./dist/mjs/index.js",
+  "exports": {
+    "./min": {
+      "import": {
+        "types": "./dist/mjs/index.d.ts",
+        "default": "./dist/mjs/index.min.js"
+      },
+      "require": {
+        "types": "./dist/cjs/index.d.ts",
+        "default": "./dist/cjs/index.min.js"
+      }
+    },
+    ".": {
+      "import": {
+        "types": "./dist/mjs/index.d.ts",
+        "default": "./dist/mjs/index.js"
+      },
+      "require": {
+        "types": "./dist/cjs/index.d.ts",
+        "default": "./dist/cjs/index.js"
+      }
+    }
+  },
+  "repository": "git://github.com/isaacs/node-lru-cache.git",
+  "devDependencies": {
+    "@size-limit/preset-small-lib": "^7.0.8",
+    "@types/node": "^20.2.5",
+    "@types/tap": "^15.0.6",
+    "benchmark": "^2.1.4",
+    "c8": "^7.11.2",
+    "clock-mock": "^1.0.6",
+    "esbuild": "^0.17.11",
+    "eslint-config-prettier": "^8.5.0",
+    "marked": "^4.2.12",
+    "mkdirp": "^2.1.5",
+    "prettier": "^2.6.2",
+    "size-limit": "^7.0.8",
+    "tap": "^16.3.4",
+    "ts-node": "^10.9.1",
+    "tslib": "^2.4.0",
+    "typedoc": "^0.24.6",
+    "typescript": "^5.0.4"
+  },
+  "license": "ISC",
+  "files": [
+    "dist"
+  ],
+  "engines": {
+    "node": "14 || >=16.14"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 70,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "tap": {
+    "coverage": false,
+    "node-arg": [
+      "--expose-gc",
+      "-r",
+      "ts-node/register"
+    ],
+    "ts": false
+  },
+  "size-limit": [
+    {
+      "path": "./dist/mjs/index.js"
+    }
+  ]
+}
diff --git a/node_modules/@npmcli/agent/package.json b/node_modules/@npmcli/agent/package.json
index a3fb4262b9c86..0d0ec1bdfb418 100644
--- a/node_modules/@npmcli/agent/package.json
+++ b/node_modules/@npmcli/agent/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@npmcli/agent",
-  "version": "1.1.0",
+  "version": "2.0.0",
   "description": "the http/https agent used by the npm cli",
   "main": "lib/index.js",
   "scripts": {
@@ -24,16 +24,22 @@
     "lib/"
   ],
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.15.1",
-    "publish": "true"
+    "version": "4.18.0",
+    "publish": "true",
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ]
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.15.1",
+    "@npmcli/template-oss": "4.18.0",
     "minipass-fetch": "^3.0.3",
     "nock": "^13.2.7",
     "simple-socks": "^2.2.2",
@@ -50,7 +56,7 @@
     ]
   },
   "dependencies": {
-    "lru-cache": "^7.18.3",
+    "lru-cache": "^10.0.1",
     "socks": "^2.7.1"
   }
 }
diff --git a/node_modules/make-fetch-happen/package.json b/node_modules/make-fetch-happen/package.json
index 419db8fbb1289..a874ace6d1d47 100644
--- a/node_modules/make-fetch-happen/package.json
+++ b/node_modules/make-fetch-happen/package.json
@@ -1,6 +1,6 @@
 {
   "name": "make-fetch-happen",
-  "version": "12.0.0",
+  "version": "13.0.0",
   "description": "Opinionated, caching, retrying fetch client",
   "main": "lib/index.js",
   "files": [
@@ -33,8 +33,8 @@
   "author": "GitHub Inc.",
   "license": "ISC",
   "dependencies": {
-    "@npmcli/agent": "^1.1.0",
-    "cacache": "^17.0.0",
+    "@npmcli/agent": "^2.0.0",
+    "cacache": "^18.0.0",
     "http-cache-semantics": "^4.1.1",
     "is-lambda": "^1.0.1",
     "minipass": "^7.0.2",
@@ -54,7 +54,7 @@
     "tap": "^16.0.0"
   },
   "engines": {
-    "node": "^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "tap": {
     "color": 1,
@@ -69,7 +69,7 @@
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "ciVersions": [
-      "16.13.0",
+      "16.14.0",
       "16.x",
       "18.0.0",
       "18.x"
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/dns.js b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/dns.js
new file mode 100644
index 0000000000000..10dcb8d471d10
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/dns.js
@@ -0,0 +1,51 @@
+'use strict'
+
+const LRUCache = require('lru-cache')
+const dns = require('dns')
+
+const defaultOptions = exports.defaultOptions = {
+  family: undefined,
+  hints: dns.ADDRCONFIG,
+  all: false,
+  verbatim: undefined,
+}
+
+const lookupCache = exports.lookupCache = new LRUCache({ max: 50 })
+
+// this is a factory so that each request can have its own opts (i.e. ttl)
+// while still sharing the cache across all requests
+exports.getLookup = (dnsOptions) => {
+  return (hostname, options, callback) => {
+    if (typeof options === 'function') {
+      callback = options
+      options = null
+    } else if (typeof options === 'number') {
+      options = { family: options }
+    }
+
+    options = { ...defaultOptions, ...options }
+
+    const key = JSON.stringify({
+      hostname,
+      family: options.family,
+      hints: options.hints,
+      all: options.all,
+      verbatim: options.verbatim,
+    })
+
+    if (lookupCache.has(key)) {
+      const [address, family] = lookupCache.get(key)
+      process.nextTick(callback, null, address, family)
+      return
+    }
+
+    dnsOptions.lookup(hostname, options, (err, address, family) => {
+      if (err) {
+        return callback(err)
+      }
+
+      lookupCache.set(key, [address, family], { ttl: dnsOptions.ttl })
+      return callback(null, address, family)
+    })
+  }
+}
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/errors.js b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/errors.js
new file mode 100644
index 0000000000000..9c664aeb39757
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/errors.js
@@ -0,0 +1,71 @@
+'use strict'
+
+class InvalidProxyProtocolError extends Error {
+  constructor (url) {
+    super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``)
+    this.code = 'EINVALIDPROXY'
+    this.proxy = url
+  }
+}
+
+class InvalidProxyResponseError extends Error {
+  constructor (url, status) {
+    super(`Invalid status code \`${status}\` connecting to proxy \`${url.host}\``)
+    this.code = 'EINVALIDRESPONSE'
+    this.proxy = url
+    this.status = status
+  }
+}
+
+class ConnectionTimeoutError extends Error {
+  constructor (host) {
+    super(`Timeout connecting to host \`${host}\``)
+    this.code = 'ECONNECTIONTIMEOUT'
+    this.host = host
+  }
+}
+
+class IdleTimeoutError extends Error {
+  constructor (host) {
+    super(`Idle timeout reached for host \`${host}\``)
+    this.code = 'EIDLETIMEOUT'
+    this.host = host
+  }
+}
+
+class ResponseTimeoutError extends Error {
+  constructor (proxy, request) {
+    let msg = 'Response timeout '
+    if (proxy.url) {
+      msg += `from proxy \`${proxy.url.host}\` `
+    }
+    msg += `connecting to host \`${request.host}\``
+    super(msg)
+    this.code = 'ERESPONSETIMEOUT'
+    this.proxy = proxy.url
+    this.request = request
+  }
+}
+
+class TransferTimeoutError extends Error {
+  constructor (proxy, request) {
+    let msg = 'Transfer timeout '
+    if (proxy.url) {
+      msg += `from proxy \`${proxy.url.host}\` `
+    }
+    msg += `for \`${request.host}\``
+    super(msg)
+    this.code = 'ETRANSFERTIMEOUT'
+    this.proxy = proxy.url
+    this.request = request
+  }
+}
+
+module.exports = {
+  InvalidProxyProtocolError,
+  InvalidProxyResponseError,
+  ConnectionTimeoutError,
+  IdleTimeoutError,
+  ResponseTimeoutError,
+  TransferTimeoutError,
+}
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/http.js b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/http.js
new file mode 100644
index 0000000000000..23512393caf3f
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/http.js
@@ -0,0 +1,33 @@
+'use strict'
+
+const http = require('http')
+
+const { getLookup } = require('./dns.js')
+const { normalizeOptions } = require('./util.js')
+const createProxy = require('./proxy/index.js')
+
+class HttpAgent extends http.Agent {
+  constructor (_options = {}) {
+    const options = normalizeOptions(_options)
+    super(options)
+    this.proxy = createProxy({
+      agent: this,
+      lookup: getLookup(options.dns),
+      proxy: options.proxy,
+      secure: false,
+    })
+  }
+
+  createConnection (_options, callback) {
+    const options = normalizeOptions(_options)
+    return this.proxy.createConnection(options, callback)
+  }
+
+  addRequest (request, _options) {
+    const options = normalizeOptions(_options)
+    super.addRequest(request, _options)
+    return this.proxy.addRequest(request, options)
+  }
+}
+
+module.exports = HttpAgent
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/https.js b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/https.js
new file mode 100644
index 0000000000000..b544614d7f47f
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/https.js
@@ -0,0 +1,33 @@
+'use strict'
+
+const https = require('https')
+
+const { getLookup } = require('./dns.js')
+const { normalizeOptions } = require('./util.js')
+const createProxy = require('./proxy/index.js')
+
+class HttpsAgent extends https.Agent {
+  constructor (_options) {
+    const options = normalizeOptions(_options)
+    super(options)
+    this.proxy = createProxy({
+      agent: this,
+      lookup: getLookup(options.dns),
+      proxy: options.proxy,
+      secure: true,
+    })
+  }
+
+  createConnection (_options, callback) {
+    const options = normalizeOptions(_options)
+    return this.proxy.createConnection(options, callback)
+  }
+
+  addRequest (request, _options) {
+    const options = normalizeOptions(_options)
+    super.addRequest(request, options)
+    return this.proxy.addRequest(request, options)
+  }
+}
+
+module.exports = HttpsAgent
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/index.js b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/index.js
new file mode 100644
index 0000000000000..a6f556964d86d
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/index.js
@@ -0,0 +1,135 @@
+'use strict'
+
+const { normalizeOptions } = require('./util.js')
+const HttpAgent = require('./http.js')
+const HttpsAgent = require('./https.js')
+
+const AgentCache = new Map()
+
+const proxyEnv = {}
+for (const [key, value] of Object.entries(process.env)) {
+  const lowerKey = key.toLowerCase()
+  if (['https_proxy', 'http_proxy', 'proxy', 'no_proxy'].includes(lowerKey)) {
+    proxyEnv[lowerKey] = value
+  }
+}
+
+const getAgent = (url, options) => {
+  url = new URL(url)
+  options = normalizeOptions(options)
+
+  // false has meaning so this can't be a simple truthiness check
+  if (options.agent != null) {
+    return options.agent
+  }
+
+  const isHttps = url.protocol === 'https:'
+
+  let proxy = options.proxy
+  if (!proxy) {
+    proxy = isHttps
+      ? proxyEnv.https_proxy
+      : (proxyEnv.https_proxy || proxyEnv.http_proxy || proxyEnv.proxy)
+  }
+
+  if (proxy) {
+    proxy = new URL(proxy)
+    let noProxy = options.noProxy || proxyEnv.no_proxy
+    if (typeof noProxy === 'string') {
+      noProxy = noProxy.split(',').map((p) => p.trim())
+    }
+
+    if (noProxy) {
+      const hostSegments = url.hostname.split('.').reverse()
+      const matches = noProxy.some((no) => {
+        const noSegments = no.split('.').filter(Boolean).reverse()
+        if (!noSegments.length) {
+          return false
+        }
+
+        for (let i = 0; i < noSegments.length; ++i) {
+          if (hostSegments[i] !== noSegments[i]) {
+            return false
+          }
+        }
+
+        return true
+      })
+
+      if (matches) {
+        proxy = ''
+      }
+    }
+  }
+
+  const timeouts = [
+    options.timeouts.connection || 0,
+    options.timeouts.idle || 0,
+    options.timeouts.response || 0,
+    options.timeouts.transfer || 0,
+  ].join('.')
+
+  const maxSockets = options.maxSockets || 15
+
+  let proxyDescriptor = 'proxy:'
+  if (!proxy) {
+    proxyDescriptor += 'null'
+  } else {
+    proxyDescriptor += `${proxy.protocol}//`
+    let auth = ''
+
+    if (proxy.username) {
+      auth += proxy.username
+    }
+
+    if (proxy.password) {
+      auth += `:${proxy.password}`
+    }
+
+    if (auth) {
+      proxyDescriptor += `${auth}@`
+    }
+
+    proxyDescriptor += proxy.host
+  }
+
+  const key = [
+    `https:${isHttps}`,
+    proxyDescriptor,
+    `local-address:${options.localAddress || 'null'}`,
+    `strict-ssl:${isHttps ? options.rejectUnauthorized : 'false'}`,
+    `ca:${isHttps && options.ca || 'null'}`,
+    `cert:${isHttps && options.cert || 'null'}`,
+    `key:${isHttps && options.key || 'null'}`,
+    `timeouts:${timeouts}`,
+    `maxSockets:${maxSockets}`,
+  ].join(':')
+
+  if (AgentCache.has(key)) {
+    return AgentCache.get(key)
+  }
+
+  const agentOptions = {
+    ca: options.ca,
+    cert: options.cert,
+    key: options.key,
+    rejectUnauthorized: options.rejectUnauthorized,
+    maxSockets,
+    timeouts: options.timeouts,
+    localAddress: options.localAddress,
+    proxy,
+  }
+
+  const agent = isHttps
+    ? new HttpsAgent(agentOptions)
+    : new HttpAgent(agentOptions)
+
+  AgentCache.set(key, agent)
+  return agent
+}
+
+module.exports = {
+  getAgent,
+  HttpAgent,
+  HttpsAgent,
+}
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/http.js b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/http.js
new file mode 100644
index 0000000000000..8d092e963c084
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/http.js
@@ -0,0 +1,146 @@
+'use strict'
+
+const http = require('http')
+const https = require('https')
+const net = require('net')
+const tls = require('tls')
+
+const {
+  ConnectionTimeoutError,
+  IdleTimeoutError,
+  InvalidProxyResponseError,
+  ResponseTimeoutError,
+  TransferTimeoutError,
+} = require('../errors.js')
+
+// this proxy class uses the http CONNECT method
+class HttpProxy {
+  constructor ({ agent, lookup, url, secure }) {
+    this.agent = agent
+    this.lookup = lookup
+    this.url = url
+    this.secure = secure
+  }
+
+  createConnection (options, callback) {
+    const requestOptions = {
+      // pass createConnection so this request doesn't go through an agent
+      createConnection: (opts, cb) => {
+        // delete the path first, otherwise (net|tls).connect will try to open a unix socket
+        delete opts.path
+        // we also delete the timeout since we control it ourselves
+        delete opts.timeout
+        opts.family = this.agent.options.family
+        opts.lookup = this.lookup
+
+        if (this.url.protocol === 'https:') {
+          return tls.connect(opts, cb)
+        }
+
+        return net.connect(opts, cb)
+      },
+      method: 'CONNECT',
+      host: this.url.hostname,
+      port: this.url.port,
+      servername: this.url.hostname,
+      path: `${options.host}:${options.port}`,
+      setHost: false,
+      timeout: options.timeout,
+      headers: {
+        connection: this.agent.keepAlive ? 'keep-alive' : 'close',
+        host: `${options.host}:${options.port}`,
+      },
+      rejectUnauthorized: options.rejectUnauthorized,
+    }
+
+    if (this.url.username || this.url.password) {
+      const username = decodeURIComponent(this.url.username)
+      const password = decodeURIComponent(this.url.password)
+      requestOptions.headers['proxy-authentication'] =
+        Buffer.from(`${username}:${password}`).toString('base64')
+    }
+
+    let connectionTimeout
+
+    const onConnect = (res, socket) => {
+      clearTimeout(connectionTimeout)
+      req.removeListener('error', onError)
+
+      if (res.statusCode !== 200) {
+        return callback(new InvalidProxyResponseError(this.url, res.statusCode))
+      }
+
+      if (this.secure) {
+        socket = tls.connect({ ...options, socket })
+      }
+
+      socket.setKeepAlive(this.agent.keepAlive, this.agent.keepAliveMsecs)
+      socket.setNoDelay(this.agent.keepAlive)
+
+      if (options.timeouts.idle) {
+        socket.setTimeout(options.timeouts.idle)
+        socket.once('timeout', () => {
+          socket.destroy(new IdleTimeoutError(this.url.host))
+        })
+      }
+
+      return callback(null, socket)
+    }
+
+    const onError = (err) => {
+      req.removeListener('connect', onConnect)
+      return callback(err)
+    }
+
+    const req = this.secure
+      ? https.request(requestOptions)
+      : http.request(requestOptions)
+
+    req.once('connect', onConnect)
+    req.once('error', onError)
+    req.end()
+
+    if (options.timeouts.connection) {
+      connectionTimeout = setTimeout(() => {
+        return callback(new ConnectionTimeoutError(this.url.host))
+      }, options.timeouts.connection)
+    }
+  }
+
+  addRequest (request, options) {
+    if (this.agent.options.timeouts.response) {
+      let responseTimeout
+
+      const onFinish = () => {
+        responseTimeout = setTimeout(() => {
+          request.destroy(new ResponseTimeoutError(this, request))
+        }, this.agent.options.timeouts.response)
+      }
+
+      const onResponse = () => {
+        clearTimeout(responseTimeout)
+      }
+
+      request.once('finish', onFinish)
+      request.once('response', onResponse)
+    }
+
+    if (this.agent.options.timeouts.transfer) {
+      let transferTimeout
+
+      const onResponse = (res) => {
+        transferTimeout = setTimeout(() => {
+          res.destroy(new TransferTimeoutError(this, request))
+        }, this.agent.options.timeouts.transfer)
+
+        res.once('close', () => {
+          clearTimeout(transferTimeout)
+        })
+      }
+
+      request.once('response', onResponse)
+    }
+  }
+}
+
+module.exports = HttpProxy
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/index.js b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/index.js
new file mode 100644
index 0000000000000..87f628c5bbf94
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/index.js
@@ -0,0 +1,25 @@
+'use strict'
+
+const { InvalidProxyProtocolError } = require('../errors.js')
+const HttpProxy = require('./http.js')
+const NullProxy = require('./null.js')
+const SocksProxy = require('./socks.js')
+
+const createProxy = ({ agent, lookup, proxy, secure }) => {
+  if (!proxy) {
+    return new NullProxy({ agent, lookup, secure })
+  }
+
+  const parsed = new URL(proxy)
+  if (parsed.protocol === 'http:' || parsed.protocol === 'https:') {
+    return new HttpProxy({ agent, lookup, url: parsed, secure })
+  }
+
+  if (parsed.protocol.startsWith('socks')) {
+    return new SocksProxy({ agent, lookup, url: parsed, secure })
+  }
+
+  throw new InvalidProxyProtocolError(parsed)
+}
+
+module.exports = createProxy
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/null.js b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/null.js
new file mode 100644
index 0000000000000..d2b2f6f777e92
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/null.js
@@ -0,0 +1,97 @@
+'use strict'
+
+const net = require('net')
+const tls = require('tls')
+
+const {
+  ConnectionTimeoutError,
+  IdleTimeoutError,
+  ResponseTimeoutError,
+  TransferTimeoutError,
+} = require('../errors.js')
+
+class NullProxy {
+  constructor ({ agent, lookup, secure }) {
+    this.agent = agent
+    this.lookup = lookup
+    this.secure = secure
+  }
+
+  createConnection (options, callback) {
+    const socket = this.secure
+      ? tls.connect({ ...options, family: this.agent.options.family, lookup: this.lookup })
+      : net.connect({ ...options, family: this.agent.options.family, lookup: this.lookup })
+
+    socket.setKeepAlive(this.agent.keepAlive, this.agent.keepAliveMsecs)
+    socket.setNoDelay(this.agent.keepAlive)
+
+    let connectionTimeout
+
+    if (options.timeouts.connection) {
+      connectionTimeout = setTimeout(() => {
+        callback(new ConnectionTimeoutError(options.host))
+      }, options.timeouts.connection)
+    }
+
+    if (options.timeouts.idle) {
+      socket.setTimeout(options.timeouts.idle)
+      socket.once('timeout', () => {
+        socket.destroy(new IdleTimeoutError(options.host))
+      })
+    }
+
+    const onConnect = () => {
+      clearTimeout(connectionTimeout)
+      socket.removeListener('error', onError)
+      callback(null, socket)
+    }
+
+    const onError = (err) => {
+      socket.removeListener('connect', onConnect)
+      callback(err)
+    }
+
+    socket.once('error', onError)
+    socket.once(this.secure ? 'secureConnect' : 'connect', onConnect)
+  }
+
+  addRequest (request, options) {
+    if (this.agent.options.timeouts.response) {
+      let responseTimeout
+
+      const onFinish = () => {
+        responseTimeout = setTimeout(() => {
+          request.destroy(new ResponseTimeoutError(this, request))
+        }, this.agent.options.timeouts.response)
+      }
+
+      const onResponse = () => {
+        clearTimeout(responseTimeout)
+      }
+
+      request.once('finish', onFinish)
+      request.once('response', onResponse)
+    }
+
+    if (this.agent.options.timeouts.transfer) {
+      let transferTimeout
+
+      const onResponse = (res) => {
+        transferTimeout = setTimeout(() => {
+          // swallow the error event on the request, this allows the one on the response
+          // to make it to the end user
+          request.once('error', () => {})
+          res.destroy(new TransferTimeoutError(this, request))
+        }, this.agent.options.timeouts.transfer)
+
+        res.once('close', () => {
+          clearTimeout(transferTimeout)
+        })
+      }
+
+      request.once('response', onResponse)
+    }
+  }
+}
+
+module.exports = NullProxy
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/socks.js b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/socks.js
new file mode 100644
index 0000000000000..8cad7148e9227
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/socks.js
@@ -0,0 +1,153 @@
+'use strict'
+
+const { SocksClient } = require('socks')
+const tls = require('tls')
+
+const {
+  ConnectionTimeoutError,
+  IdleTimeoutError,
+  InvalidProxyProtocolError,
+  ResponseTimeoutError,
+  TransferTimeoutError,
+} = require('../errors.js')
+
+class SocksProxy {
+  constructor ({ agent, lookup, secure, url }) {
+    this.agent = agent
+    this.lookup = lookup
+    this.secure = secure
+    this.url = url
+    if (!this.url.port) {
+      this.url.port = 1080
+    }
+
+    if (this.url.protocol === 'socks4:') {
+      this.shouldLookup = true
+      this.type = 4
+    } else if (this.url.protocol === 'socks4a:') {
+      this.shouldLookup = false
+      this.type = 4
+    } else if (this.url.protocol === 'socks5:') {
+      this.shouldLookup = true
+      this.type = 5
+    } else if (this.url.protocol === 'socks5h:' || this.url.protocol === 'socks:') {
+      this.shouldLookup = false
+      this.type = 5
+    } else {
+      throw new InvalidProxyProtocolError(this.url)
+    }
+  }
+
+  createConnection (options, callback) {
+    const socksOptions = {
+      proxy: {
+        host: this.url.hostname,
+        port: parseInt(this.url.port, 10),
+        type: this.type,
+        userId: this.url.username,
+        password: this.url.password,
+      },
+      destination: {
+        host: options.host,
+        port: parseInt(options.port, 10),
+      },
+      command: 'connect',
+      socket_options: {
+        family: this.agent.options.family,
+        lookup: this.lookup,
+      },
+    }
+
+    const connect = () => {
+      let connectionTimeout
+      const socksClient = new SocksClient(socksOptions)
+
+      const onError = (err) => {
+        socksClient.removeListener('established', onEstablished)
+        return callback(err)
+      }
+
+      const onEstablished = (connection) => {
+        clearTimeout(connectionTimeout)
+        socksClient.removeListener('error', onError)
+
+        if (this.secure) {
+          connection.socket = tls.connect({ ...options, socket: connection.socket })
+        }
+
+        connection.socket.setKeepAlive(this.agent.keepAlive, this.agent.keepAliveMsecs)
+        connection.socket.setNoDelay(this.agent.keepAlive)
+
+        if (options.timeouts.idle) {
+          connection.socket.setTimeout(options.timeouts.idle)
+          connection.socket.once('timeout', () => {
+            connection.socket.destroy(new IdleTimeoutError(this.url.host))
+          })
+        }
+
+        return callback(null, connection.socket)
+      }
+
+      socksClient.once('error', onError)
+      socksClient.once('established', onEstablished)
+
+      if (options.timeouts.connection) {
+        connectionTimeout = setTimeout(() => {
+          return callback(new ConnectionTimeoutError(this.url.host))
+        }, options.timeouts.connection)
+      }
+
+      socksClient.connect()
+    }
+
+    if (!this.shouldLookup) {
+      return connect()
+    }
+
+    this.lookup(options.host, (err, result) => {
+      if (err) {
+        return callback(err)
+      }
+
+      socksOptions.destination.host = result
+      connect()
+    })
+  }
+
+  addRequest (request, options) {
+    if (this.agent.options.timeouts.response) {
+      let responseTimeout
+
+      const onFinish = () => {
+        responseTimeout = setTimeout(() => {
+          request.destroy(new ResponseTimeoutError(this, request))
+        }, this.agent.options.timeouts.response)
+      }
+
+      const onResponse = () => {
+        clearTimeout(responseTimeout)
+      }
+
+      request.once('finish', onFinish)
+      request.once('response', onResponse)
+    }
+
+    if (this.agent.options.timeouts.transfer) {
+      let transferTimeout
+
+      const onResponse = (res) => {
+        transferTimeout = setTimeout(() => {
+          res.destroy(new TransferTimeoutError(this, request))
+        }, this.agent.options.timeouts.transfer)
+
+        res.once('close', () => {
+          clearTimeout(transferTimeout)
+        })
+      }
+
+      request.once('response', onResponse)
+    }
+  }
+}
+
+module.exports = SocksProxy
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/util.js b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/util.js
new file mode 100644
index 0000000000000..512207084d23e
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/util.js
@@ -0,0 +1,33 @@
+'use strict'
+
+const dns = require('dns')
+
+const normalizeOptions = (_options) => {
+  const options = { ..._options }
+
+  if (typeof options.keepAlive === 'undefined') {
+    options.keepAlive = true
+  }
+
+  if (!options.timeouts) {
+    options.timeouts = {}
+  }
+
+  if (options.timeout) {
+    options.timeouts.idle = options.timeout
+    delete options.timeout
+  }
+
+  options.family = !isNaN(+options.family) ? +options.family : 0
+  options.dns = {
+    ttl: 5 * 60 * 1000,
+    lookup: dns.lookup,
+    ...options.dns,
+  }
+
+  return options
+}
+
+module.exports = {
+  normalizeOptions,
+}
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/package.json b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/package.json
new file mode 100644
index 0000000000000..a3fb4262b9c86
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/package.json
@@ -0,0 +1,56 @@
+{
+  "name": "@npmcli/agent",
+  "version": "1.1.0",
+  "description": "the http/https agent used by the npm cli",
+  "main": "lib/index.js",
+  "scripts": {
+    "gencerts": "bash scripts/create-cert.sh",
+    "test": "tap",
+    "lint": "eslint \"**/*.js\"",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force",
+    "lintfix": "npm run lint -- --fix",
+    "snap": "tap",
+    "posttest": "npm run lint"
+  },
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/agent/issues"
+  },
+  "homepage": "https://github.com/npm/agent#readme",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.15.1",
+    "publish": "true"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.15.1",
+    "minipass-fetch": "^3.0.3",
+    "nock": "^13.2.7",
+    "simple-socks": "^2.2.2",
+    "tap": "^16.3.0"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/agent.git"
+  },
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "dependencies": {
+    "lru-cache": "^7.18.3",
+    "socks": "^2.7.1"
+  }
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md b/node_modules/npm-registry-fetch/node_modules/cacache/LICENSE.md
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md
rename to node_modules/npm-registry-fetch/node_modules/cacache/LICENSE.md
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js b/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/path.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js
rename to node_modules/npm-registry-fetch/node_modules/cacache/lib/content/path.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js b/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/read.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js
rename to node_modules/npm-registry-fetch/node_modules/cacache/lib/content/read.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js b/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/rm.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js
rename to node_modules/npm-registry-fetch/node_modules/cacache/lib/content/rm.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js b/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/write.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js
rename to node_modules/npm-registry-fetch/node_modules/cacache/lib/content/write.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js b/node_modules/npm-registry-fetch/node_modules/cacache/lib/entry-index.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js
rename to node_modules/npm-registry-fetch/node_modules/cacache/lib/entry-index.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js b/node_modules/npm-registry-fetch/node_modules/cacache/lib/get.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/get.js
rename to node_modules/npm-registry-fetch/node_modules/cacache/lib/get.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js b/node_modules/npm-registry-fetch/node_modules/cacache/lib/index.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/index.js
rename to node_modules/npm-registry-fetch/node_modules/cacache/lib/index.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js b/node_modules/npm-registry-fetch/node_modules/cacache/lib/memoization.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js
rename to node_modules/npm-registry-fetch/node_modules/cacache/lib/memoization.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js b/node_modules/npm-registry-fetch/node_modules/cacache/lib/put.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/put.js
rename to node_modules/npm-registry-fetch/node_modules/cacache/lib/put.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js b/node_modules/npm-registry-fetch/node_modules/cacache/lib/rm.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js
rename to node_modules/npm-registry-fetch/node_modules/cacache/lib/rm.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/glob.js b/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/glob.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/util/glob.js
rename to node_modules/npm-registry-fetch/node_modules/cacache/lib/util/glob.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/hash-to-segments.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js
rename to node_modules/npm-registry-fetch/node_modules/cacache/lib/util/hash-to-segments.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js b/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/tmp.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js
rename to node_modules/npm-registry-fetch/node_modules/cacache/lib/util/tmp.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js b/node_modules/npm-registry-fetch/node_modules/cacache/lib/verify.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js
rename to node_modules/npm-registry-fetch/node_modules/cacache/lib/verify.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/package.json b/node_modules/npm-registry-fetch/node_modules/cacache/package.json
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/package.json
rename to node_modules/npm-registry-fetch/node_modules/cacache/package.json
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE
new file mode 100644
index 0000000000000..1808eb2844231
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright 2017-2022 (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/entry.js
new file mode 100644
index 0000000000000..45141095074ec
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/entry.js
@@ -0,0 +1,469 @@
+const { Request, Response } = require('minipass-fetch')
+const { Minipass } = require('minipass')
+const MinipassFlush = require('minipass-flush')
+const cacache = require('cacache')
+const url = require('url')
+
+const CachingMinipassPipeline = require('../pipeline.js')
+const CachePolicy = require('./policy.js')
+const cacheKey = require('./key.js')
+const remote = require('../remote.js')
+
+const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop)
+
+// allow list for request headers that will be written to the cache index
+// note: we will also store any request headers
+// that are named in a response's vary header
+const KEEP_REQUEST_HEADERS = [
+  'accept-charset',
+  'accept-encoding',
+  'accept-language',
+  'accept',
+  'cache-control',
+]
+
+// allow list for response headers that will be written to the cache index
+// note: we must not store the real response's age header, or when we load
+// a cache policy based on the metadata it will think the cached response
+// is always stale
+const KEEP_RESPONSE_HEADERS = [
+  'cache-control',
+  'content-encoding',
+  'content-language',
+  'content-type',
+  'date',
+  'etag',
+  'expires',
+  'last-modified',
+  'link',
+  'location',
+  'pragma',
+  'vary',
+]
+
+// return an object containing all metadata to be written to the index
+const getMetadata = (request, response, options) => {
+  const metadata = {
+    time: Date.now(),
+    url: request.url,
+    reqHeaders: {},
+    resHeaders: {},
+
+    // options on which we must match the request and vary the response
+    options: {
+      compress: options.compress != null ? options.compress : request.compress,
+    },
+  }
+
+  // only save the status if it's not a 200 or 304
+  if (response.status !== 200 && response.status !== 304) {
+    metadata.status = response.status
+  }
+
+  for (const name of KEEP_REQUEST_HEADERS) {
+    if (request.headers.has(name)) {
+      metadata.reqHeaders[name] = request.headers.get(name)
+    }
+  }
+
+  // if the request's host header differs from the host in the url
+  // we need to keep it, otherwise it's just noise and we ignore it
+  const host = request.headers.get('host')
+  const parsedUrl = new url.URL(request.url)
+  if (host && parsedUrl.host !== host) {
+    metadata.reqHeaders.host = host
+  }
+
+  // if the response has a vary header, make sure
+  // we store the relevant request headers too
+  if (response.headers.has('vary')) {
+    const vary = response.headers.get('vary')
+    // a vary of "*" means every header causes a different response.
+    // in that scenario, we do not include any additional headers
+    // as the freshness check will always fail anyway and we don't
+    // want to bloat the cache indexes
+    if (vary !== '*') {
+      // copy any other request headers that will vary the response
+      const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/)
+      for (const name of varyHeaders) {
+        if (request.headers.has(name)) {
+          metadata.reqHeaders[name] = request.headers.get(name)
+        }
+      }
+    }
+  }
+
+  for (const name of KEEP_RESPONSE_HEADERS) {
+    if (response.headers.has(name)) {
+      metadata.resHeaders[name] = response.headers.get(name)
+    }
+  }
+
+  for (const name of options.cacheAdditionalHeaders) {
+    if (response.headers.has(name)) {
+      metadata.resHeaders[name] = response.headers.get(name)
+    }
+  }
+
+  return metadata
+}
+
+// symbols used to hide objects that may be lazily evaluated in a getter
+const _request = Symbol('request')
+const _response = Symbol('response')
+const _policy = Symbol('policy')
+
+class CacheEntry {
+  constructor ({ entry, request, response, options }) {
+    if (entry) {
+      this.key = entry.key
+      this.entry = entry
+      // previous versions of this module didn't write an explicit timestamp in
+      // the metadata, so fall back to the entry's timestamp. we can't use the
+      // entry timestamp to determine staleness because cacache will update it
+      // when it verifies its data
+      this.entry.metadata.time = this.entry.metadata.time || this.entry.time
+    } else {
+      this.key = cacheKey(request)
+    }
+
+    this.options = options
+
+    // these properties are behind getters that lazily evaluate
+    this[_request] = request
+    this[_response] = response
+    this[_policy] = null
+  }
+
+  // returns a CacheEntry instance that satisfies the given request
+  // or undefined if no existing entry satisfies
+  static async find (request, options) {
+    try {
+      // compacts the index and returns an array of unique entries
+      var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => {
+        const entryA = new CacheEntry({ entry: A, options })
+        const entryB = new CacheEntry({ entry: B, options })
+        return entryA.policy.satisfies(entryB.request)
+      }, {
+        validateEntry: (entry) => {
+          // clean out entries with a buggy content-encoding value
+          if (entry.metadata &&
+              entry.metadata.resHeaders &&
+              entry.metadata.resHeaders['content-encoding'] === null) {
+            return false
+          }
+
+          // if an integrity is null, it needs to have a status specified
+          if (entry.integrity === null) {
+            return !!(entry.metadata && entry.metadata.status)
+          }
+
+          return true
+        },
+      })
+    } catch (err) {
+      // if the compact request fails, ignore the error and return
+      return
+    }
+
+    // a cache mode of 'reload' means to behave as though we have no cache
+    // on the way to the network. return undefined to allow cacheFetch to
+    // create a brand new request no matter what.
+    if (options.cache === 'reload') {
+      return
+    }
+
+    // find the specific entry that satisfies the request
+    let match
+    for (const entry of matches) {
+      const _entry = new CacheEntry({
+        entry,
+        options,
+      })
+
+      if (_entry.policy.satisfies(request)) {
+        match = _entry
+        break
+      }
+    }
+
+    return match
+  }
+
+  // if the user made a PUT/POST/PATCH then we invalidate our
+  // cache for the same url by deleting the index entirely
+  static async invalidate (request, options) {
+    const key = cacheKey(request)
+    try {
+      await cacache.rm.entry(options.cachePath, key, { removeFully: true })
+    } catch (err) {
+      // ignore errors
+    }
+  }
+
+  get request () {
+    if (!this[_request]) {
+      this[_request] = new Request(this.entry.metadata.url, {
+        method: 'GET',
+        headers: this.entry.metadata.reqHeaders,
+        ...this.entry.metadata.options,
+      })
+    }
+
+    return this[_request]
+  }
+
+  get response () {
+    if (!this[_response]) {
+      this[_response] = new Response(null, {
+        url: this.entry.metadata.url,
+        counter: this.options.counter,
+        status: this.entry.metadata.status || 200,
+        headers: {
+          ...this.entry.metadata.resHeaders,
+          'content-length': this.entry.size,
+        },
+      })
+    }
+
+    return this[_response]
+  }
+
+  get policy () {
+    if (!this[_policy]) {
+      this[_policy] = new CachePolicy({
+        entry: this.entry,
+        request: this.request,
+        response: this.response,
+        options: this.options,
+      })
+    }
+
+    return this[_policy]
+  }
+
+  // wraps the response in a pipeline that stores the data
+  // in the cache while the user consumes it
+  async store (status) {
+    // if we got a status other than 200, 301, or 308,
+    // or the CachePolicy forbid storage, append the
+    // cache status header and return it untouched
+    if (
+      this.request.method !== 'GET' ||
+      ![200, 301, 308].includes(this.response.status) ||
+      !this.policy.storable()
+    ) {
+      this.response.headers.set('x-local-cache-status', 'skip')
+      return this.response
+    }
+
+    const size = this.response.headers.get('content-length')
+    const cacheOpts = {
+      algorithms: this.options.algorithms,
+      metadata: getMetadata(this.request, this.response, this.options),
+      size,
+      integrity: this.options.integrity,
+      integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body,
+    }
+
+    let body = null
+    // we only set a body if the status is a 200, redirects are
+    // stored as metadata only
+    if (this.response.status === 200) {
+      let cacheWriteResolve, cacheWriteReject
+      const cacheWritePromise = new Promise((resolve, reject) => {
+        cacheWriteResolve = resolve
+        cacheWriteReject = reject
+      })
+
+      body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({
+        flush () {
+          return cacheWritePromise
+        },
+      }))
+      // this is always true since if we aren't reusing the one from the remote fetch, we
+      // are using the one from cacache
+      body.hasIntegrityEmitter = true
+
+      const onResume = () => {
+        const tee = new Minipass()
+        const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts)
+        // re-emit the integrity and size events on our new response body so they can be reused
+        cacheStream.on('integrity', i => body.emit('integrity', i))
+        cacheStream.on('size', s => body.emit('size', s))
+        // stick a flag on here so downstream users will know if they can expect integrity events
+        tee.pipe(cacheStream)
+        // TODO if the cache write fails, log a warning but return the response anyway
+        // eslint-disable-next-line promise/catch-or-return
+        cacheStream.promise().then(cacheWriteResolve, cacheWriteReject)
+        body.unshift(tee)
+        body.unshift(this.response.body)
+      }
+
+      body.once('resume', onResume)
+      body.once('end', () => body.removeListener('resume', onResume))
+    } else {
+      await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts)
+    }
+
+    // note: we do not set the x-local-cache-hash header because we do not know
+    // the hash value until after the write to the cache completes, which doesn't
+    // happen until after the response has been sent and it's too late to write
+    // the header anyway
+    this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
+    this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
+    this.response.headers.set('x-local-cache-mode', 'stream')
+    this.response.headers.set('x-local-cache-status', status)
+    this.response.headers.set('x-local-cache-time', new Date().toISOString())
+    const newResponse = new Response(body, {
+      url: this.response.url,
+      status: this.response.status,
+      headers: this.response.headers,
+      counter: this.options.counter,
+    })
+    return newResponse
+  }
+
+  // use the cached data to create a response and return it
+  async respond (method, options, status) {
+    let response
+    if (method === 'HEAD' || [301, 308].includes(this.response.status)) {
+      // if the request is a HEAD, or the response is a redirect,
+      // then the metadata in the entry already includes everything
+      // we need to build a response
+      response = this.response
+    } else {
+      // we're responding with a full cached response, so create a body
+      // that reads from cacache and attach it to a new Response
+      const body = new Minipass()
+      const headers = { ...this.policy.responseHeaders() }
+
+      const onResume = () => {
+        const cacheStream = cacache.get.stream.byDigest(
+          this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
+        )
+        cacheStream.on('error', async (err) => {
+          cacheStream.pause()
+          if (err.code === 'EINTEGRITY') {
+            await cacache.rm.content(
+              this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
+            )
+          }
+          if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') {
+            await CacheEntry.invalidate(this.request, this.options)
+          }
+          body.emit('error', err)
+          cacheStream.resume()
+        })
+        // emit the integrity and size events based on our metadata so we're consistent
+        body.emit('integrity', this.entry.integrity)
+        body.emit('size', Number(headers['content-length']))
+        cacheStream.pipe(body)
+      }
+
+      body.once('resume', onResume)
+      body.once('end', () => body.removeListener('resume', onResume))
+      response = new Response(body, {
+        url: this.entry.metadata.url,
+        counter: options.counter,
+        status: 200,
+        headers,
+      })
+    }
+
+    response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
+    response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity))
+    response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
+    response.headers.set('x-local-cache-mode', 'stream')
+    response.headers.set('x-local-cache-status', status)
+    response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString())
+    return response
+  }
+
+  // use the provided request along with this cache entry to
+  // revalidate the stored response. returns a response, either
+  // from the cache or from the update
+  async revalidate (request, options) {
+    const revalidateRequest = new Request(request, {
+      headers: this.policy.revalidationHeaders(request),
+    })
+
+    try {
+      // NOTE: be sure to remove the headers property from the
+      // user supplied options, since we have already defined
+      // them on the new request object. if they're still in the
+      // options then those will overwrite the ones from the policy
+      var response = await remote(revalidateRequest, {
+        ...options,
+        headers: undefined,
+      })
+    } catch (err) {
+      // if the network fetch fails, return the stale
+      // cached response unless it has a cache-control
+      // of 'must-revalidate'
+      if (!this.policy.mustRevalidate) {
+        return this.respond(request.method, options, 'stale')
+      }
+
+      throw err
+    }
+
+    if (this.policy.revalidated(revalidateRequest, response)) {
+      // we got a 304, write a new index to the cache and respond from cache
+      const metadata = getMetadata(request, response, options)
+      // 304 responses do not include headers that are specific to the response data
+      // since they do not include a body, so we copy values for headers that were
+      // in the old cache entry to the new one, if the new metadata does not already
+      // include that header
+      for (const name of KEEP_RESPONSE_HEADERS) {
+        if (
+          !hasOwnProperty(metadata.resHeaders, name) &&
+          hasOwnProperty(this.entry.metadata.resHeaders, name)
+        ) {
+          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
+        }
+      }
+
+      for (const name of options.cacheAdditionalHeaders) {
+        const inMeta = hasOwnProperty(metadata.resHeaders, name)
+        const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name)
+        const inPolicy = hasOwnProperty(this.policy.response.headers, name)
+
+        // if the header is in the existing entry, but it is not in the metadata
+        // then we need to write it to the metadata as this will refresh the on-disk cache
+        if (!inMeta && inEntry) {
+          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
+        }
+        // if the header is in the metadata, but not in the policy, then we need to set
+        // it in the policy so that it's included in the immediate response. future
+        // responses will load a new cache entry, so we don't need to change that
+        if (!inPolicy && inMeta) {
+          this.policy.response.headers[name] = metadata.resHeaders[name]
+        }
+      }
+
+      try {
+        await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, {
+          size: this.entry.size,
+          metadata,
+        })
+      } catch (err) {
+        // if updating the cache index fails, we ignore it and
+        // respond anyway
+      }
+      return this.respond(request.method, options, 'revalidated')
+    }
+
+    // if we got a modified response, create a new entry based on it
+    const newEntry = new CacheEntry({
+      request,
+      response,
+      options,
+    })
+
+    // respond with the new entry while writing it to the cache
+    return newEntry.store('updated')
+  }
+}
+
+module.exports = CacheEntry
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/errors.js
new file mode 100644
index 0000000000000..67a66573bebe6
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/errors.js
@@ -0,0 +1,11 @@
+class NotCachedError extends Error {
+  constructor (url) {
+    /* eslint-disable-next-line max-len */
+    super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`)
+    this.code = 'ENOTCACHED'
+  }
+}
+
+module.exports = {
+  NotCachedError,
+}
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/index.js
new file mode 100644
index 0000000000000..0de49d23fb933
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/index.js
@@ -0,0 +1,49 @@
+const { NotCachedError } = require('./errors.js')
+const CacheEntry = require('./entry.js')
+const remote = require('../remote.js')
+
+// do whatever is necessary to get a Response and return it
+const cacheFetch = async (request, options) => {
+  // try to find a cached entry that satisfies this request
+  const entry = await CacheEntry.find(request, options)
+  if (!entry) {
+    // no cached result, if the cache mode is 'only-if-cached' that's a failure
+    if (options.cache === 'only-if-cached') {
+      throw new NotCachedError(request.url)
+    }
+
+    // otherwise, we make a request, store it and return it
+    const response = await remote(request, options)
+    const newEntry = new CacheEntry({ request, response, options })
+    return newEntry.store('miss')
+  }
+
+  // we have a cached response that satisfies this request, however if the cache
+  // mode is 'no-cache' then we send the revalidation request no matter what
+  if (options.cache === 'no-cache') {
+    return entry.revalidate(request, options)
+  }
+
+  // if the cached entry is not stale, or if the cache mode is 'force-cache' or
+  // 'only-if-cached' we can respond with the cached entry. set the status
+  // based on the result of needsRevalidation and respond
+  const _needsRevalidation = entry.policy.needsRevalidation(request)
+  if (options.cache === 'force-cache' ||
+      options.cache === 'only-if-cached' ||
+      !_needsRevalidation) {
+    return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit')
+  }
+
+  // if we got here, the cache entry is stale so revalidate it
+  return entry.revalidate(request, options)
+}
+
+cacheFetch.invalidate = async (request, options) => {
+  if (!options.cachePath) {
+    return
+  }
+
+  return CacheEntry.invalidate(request, options)
+}
+
+module.exports = cacheFetch
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/key.js
new file mode 100644
index 0000000000000..f7684d562b7fa
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/key.js
@@ -0,0 +1,17 @@
+const { URL, format } = require('url')
+
+// options passed to url.format() when generating a key
+const formatOptions = {
+  auth: false,
+  fragment: false,
+  search: true,
+  unicode: false,
+}
+
+// returns a string to be used as the cache key for the Request
+const cacheKey = (request) => {
+  const parsed = new URL(request.url)
+  return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}`
+}
+
+module.exports = cacheKey
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/policy.js
new file mode 100644
index 0000000000000..ada3c8600dae9
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/policy.js
@@ -0,0 +1,161 @@
+const CacheSemantics = require('http-cache-semantics')
+const Negotiator = require('negotiator')
+const ssri = require('ssri')
+
+// options passed to http-cache-semantics constructor
+const policyOptions = {
+  shared: false,
+  ignoreCargoCult: true,
+}
+
+// a fake empty response, used when only testing the
+// request for storability
+const emptyResponse = { status: 200, headers: {} }
+
+// returns a plain object representation of the Request
+const requestObject = (request) => {
+  const _obj = {
+    method: request.method,
+    url: request.url,
+    headers: {},
+    compress: request.compress,
+  }
+
+  request.headers.forEach((value, key) => {
+    _obj.headers[key] = value
+  })
+
+  return _obj
+}
+
+// returns a plain object representation of the Response
+const responseObject = (response) => {
+  const _obj = {
+    status: response.status,
+    headers: {},
+  }
+
+  response.headers.forEach((value, key) => {
+    _obj.headers[key] = value
+  })
+
+  return _obj
+}
+
+class CachePolicy {
+  constructor ({ entry, request, response, options }) {
+    this.entry = entry
+    this.request = requestObject(request)
+    this.response = responseObject(response)
+    this.options = options
+    this.policy = new CacheSemantics(this.request, this.response, policyOptions)
+
+    if (this.entry) {
+      // if we have an entry, copy the timestamp to the _responseTime
+      // this is necessary because the CacheSemantics constructor forces
+      // the value to Date.now() which means a policy created from a
+      // cache entry is likely to always identify itself as stale
+      this.policy._responseTime = this.entry.metadata.time
+    }
+  }
+
+  // static method to quickly determine if a request alone is storable
+  static storable (request, options) {
+    // no cachePath means no caching
+    if (!options.cachePath) {
+      return false
+    }
+
+    // user explicitly asked not to cache
+    if (options.cache === 'no-store') {
+      return false
+    }
+
+    // we only cache GET and HEAD requests
+    if (!['GET', 'HEAD'].includes(request.method)) {
+      return false
+    }
+
+    // otherwise, let http-cache-semantics make the decision
+    // based on the request's headers
+    const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions)
+    return policy.storable()
+  }
+
+  // returns true if the policy satisfies the request
+  satisfies (request) {
+    const _req = requestObject(request)
+    if (this.request.headers.host !== _req.headers.host) {
+      return false
+    }
+
+    if (this.request.compress !== _req.compress) {
+      return false
+    }
+
+    const negotiatorA = new Negotiator(this.request)
+    const negotiatorB = new Negotiator(_req)
+
+    if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) {
+      return false
+    }
+
+    if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) {
+      return false
+    }
+
+    if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) {
+      return false
+    }
+
+    if (this.options.integrity) {
+      return ssri.parse(this.options.integrity).match(this.entry.integrity)
+    }
+
+    return true
+  }
+
+  // returns true if the request and response allow caching
+  storable () {
+    return this.policy.storable()
+  }
+
+  // NOTE: this is a hack to avoid parsing the cache-control
+  // header ourselves, it returns true if the response's
+  // cache-control contains must-revalidate
+  get mustRevalidate () {
+    return !!this.policy._rescc['must-revalidate']
+  }
+
+  // returns true if the cached response requires revalidation
+  // for the given request
+  needsRevalidation (request) {
+    const _req = requestObject(request)
+    // force method to GET because we only cache GETs
+    // but can serve a HEAD from a cached GET
+    _req.method = 'GET'
+    return !this.policy.satisfiesWithoutRevalidation(_req)
+  }
+
+  responseHeaders () {
+    return this.policy.responseHeaders()
+  }
+
+  // returns a new object containing the appropriate headers
+  // to send a revalidation request
+  revalidationHeaders (request) {
+    const _req = requestObject(request)
+    return this.policy.revalidationHeaders(_req)
+  }
+
+  // returns true if the request/response was revalidated
+  // successfully. returns false if a new response was received
+  revalidated (request, response) {
+    const _req = requestObject(request)
+    const _res = responseObject(response)
+    const policy = this.policy.revalidatedPolicy(_req, _res)
+    return !policy.modified
+  }
+}
+
+module.exports = CachePolicy
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/fetch.js
new file mode 100644
index 0000000000000..233ba67e16550
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/fetch.js
@@ -0,0 +1,118 @@
+'use strict'
+
+const { FetchError, Request, isRedirect } = require('minipass-fetch')
+const url = require('url')
+
+const CachePolicy = require('./cache/policy.js')
+const cache = require('./cache/index.js')
+const remote = require('./remote.js')
+
+// given a Request, a Response and user options
+// return true if the response is a redirect that
+// can be followed. we throw errors that will result
+// in the fetch being rejected if the redirect is
+// possible but invalid for some reason
+const canFollowRedirect = (request, response, options) => {
+  if (!isRedirect(response.status)) {
+    return false
+  }
+
+  if (options.redirect === 'manual') {
+    return false
+  }
+
+  if (options.redirect === 'error') {
+    throw new FetchError(`redirect mode is set to error: ${request.url}`,
+      'no-redirect', { code: 'ENOREDIRECT' })
+  }
+
+  if (!response.headers.has('location')) {
+    throw new FetchError(`redirect location header missing for: ${request.url}`,
+      'no-location', { code: 'EINVALIDREDIRECT' })
+  }
+
+  if (request.counter >= request.follow) {
+    throw new FetchError(`maximum redirect reached at: ${request.url}`,
+      'max-redirect', { code: 'EMAXREDIRECT' })
+  }
+
+  return true
+}
+
+// given a Request, a Response, and the user's options return an object
+// with a new Request and a new options object that will be used for
+// following the redirect
+const getRedirect = (request, response, options) => {
+  const _opts = { ...options }
+  const location = response.headers.get('location')
+  const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url)
+  // Comment below is used under the following license:
+  /**
+   * @license
+   * Copyright (c) 2010-2012 Mikeal Rogers
+   * Licensed under the Apache License, Version 2.0 (the "License");
+   * you may not use this file except in compliance with the License.
+   * You may obtain a copy of the License at
+   * http://www.apache.org/licenses/LICENSE-2.0
+   * Unless required by applicable law or agreed to in writing,
+   * software distributed under the License is distributed on an "AS
+   * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+   * express or implied. See the License for the specific language
+   * governing permissions and limitations under the License.
+   */
+
+  // Remove authorization if changing hostnames (but not if just
+  // changing ports or protocols).  This matches the behavior of request:
+  // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
+  if (new url.URL(request.url).hostname !== redirectUrl.hostname) {
+    request.headers.delete('authorization')
+    request.headers.delete('cookie')
+  }
+
+  // for POST request with 301/302 response, or any request with 303 response,
+  // use GET when following redirect
+  if (
+    response.status === 303 ||
+    (request.method === 'POST' && [301, 302].includes(response.status))
+  ) {
+    _opts.method = 'GET'
+    _opts.body = null
+    request.headers.delete('content-length')
+  }
+
+  _opts.headers = {}
+  request.headers.forEach((value, key) => {
+    _opts.headers[key] = value
+  })
+
+  _opts.counter = ++request.counter
+  const redirectReq = new Request(url.format(redirectUrl), _opts)
+  return {
+    request: redirectReq,
+    options: _opts,
+  }
+}
+
+const fetch = async (request, options) => {
+  const response = CachePolicy.storable(request, options)
+    ? await cache(request, options)
+    : await remote(request, options)
+
+  // if the request wasn't a GET or HEAD, and the response
+  // status is between 200 and 399 inclusive, invalidate the
+  // request url
+  if (!['GET', 'HEAD'].includes(request.method) &&
+      response.status >= 200 &&
+      response.status <= 399) {
+    await cache.invalidate(request, options)
+  }
+
+  if (!canFollowRedirect(request, response, options)) {
+    return response
+  }
+
+  const redirect = getRedirect(request, response, options)
+  return fetch(redirect.request, redirect.options)
+}
+
+module.exports = fetch
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/index.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/index.js
new file mode 100644
index 0000000000000..2f12e8e1b6113
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/index.js
@@ -0,0 +1,41 @@
+const { FetchError, Headers, Request, Response } = require('minipass-fetch')
+
+const configureOptions = require('./options.js')
+const fetch = require('./fetch.js')
+
+const makeFetchHappen = (url, opts) => {
+  const options = configureOptions(opts)
+
+  const request = new Request(url, options)
+  return fetch(request, options)
+}
+
+makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => {
+  if (typeof defaultUrl === 'object') {
+    defaultOptions = defaultUrl
+    defaultUrl = null
+  }
+
+  const defaultedFetch = (url, options = {}) => {
+    const finalUrl = url || defaultUrl
+    const finalOptions = {
+      ...defaultOptions,
+      ...options,
+      headers: {
+        ...defaultOptions.headers,
+        ...options.headers,
+      },
+    }
+    return wrappedFetch(finalUrl, finalOptions)
+  }
+
+  defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) =>
+    makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch)
+  return defaultedFetch
+}
+
+module.exports = makeFetchHappen
+module.exports.FetchError = FetchError
+module.exports.Headers = Headers
+module.exports.Request = Request
+module.exports.Response = Response
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/options.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/options.js
new file mode 100644
index 0000000000000..f77511279f831
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/options.js
@@ -0,0 +1,54 @@
+const dns = require('dns')
+
+const conditionalHeaders = [
+  'if-modified-since',
+  'if-none-match',
+  'if-unmodified-since',
+  'if-match',
+  'if-range',
+]
+
+const configureOptions = (opts) => {
+  const { strictSSL, ...options } = { ...opts }
+  options.method = options.method ? options.method.toUpperCase() : 'GET'
+  options.rejectUnauthorized = strictSSL !== false
+
+  if (!options.retry) {
+    options.retry = { retries: 0 }
+  } else if (typeof options.retry === 'string') {
+    const retries = parseInt(options.retry, 10)
+    if (isFinite(retries)) {
+      options.retry = { retries }
+    } else {
+      options.retry = { retries: 0 }
+    }
+  } else if (typeof options.retry === 'number') {
+    options.retry = { retries: options.retry }
+  } else {
+    options.retry = { retries: 0, ...options.retry }
+  }
+
+  options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns }
+
+  options.cache = options.cache || 'default'
+  if (options.cache === 'default') {
+    const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => {
+      return conditionalHeaders.includes(name.toLowerCase())
+    })
+    if (hasConditionalHeader) {
+      options.cache = 'no-store'
+    }
+  }
+
+  options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || []
+
+  // cacheManager is deprecated, but if it's set and
+  // cachePath is not we should copy it to the new field
+  if (options.cacheManager && !options.cachePath) {
+    options.cachePath = options.cacheManager
+  }
+
+  return options
+}
+
+module.exports = configureOptions
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/pipeline.js
new file mode 100644
index 0000000000000..b1d221b2d0ce3
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/pipeline.js
@@ -0,0 +1,41 @@
+'use strict'
+
+const MinipassPipeline = require('minipass-pipeline')
+
+class CachingMinipassPipeline extends MinipassPipeline {
+  #events = []
+  #data = new Map()
+
+  constructor (opts, ...streams) {
+    // CRITICAL: do NOT pass the streams to the call to super(), this will start
+    // the flow of data and potentially cause the events we need to catch to emit
+    // before we've finished our own setup. instead we call super() with no args,
+    // finish our setup, and then push the streams into ourselves to start the
+    // data flow
+    super()
+    this.#events = opts.events
+
+    /* istanbul ignore next - coverage disabled because this is pointless to test here */
+    if (streams.length) {
+      this.push(...streams)
+    }
+  }
+
+  on (event, handler) {
+    if (this.#events.includes(event) && this.#data.has(event)) {
+      return handler(...this.#data.get(event))
+    }
+
+    return super.on(event, handler)
+  }
+
+  emit (event, ...data) {
+    if (this.#events.includes(event)) {
+      this.#data.set(event, data)
+    }
+
+    return super.emit(event, ...data)
+  }
+}
+
+module.exports = CachingMinipassPipeline
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/remote.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/remote.js
new file mode 100644
index 0000000000000..2aef9f8f969b0
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/remote.js
@@ -0,0 +1,127 @@
+const { Minipass } = require('minipass')
+const fetch = require('minipass-fetch')
+const promiseRetry = require('promise-retry')
+const ssri = require('ssri')
+
+const CachingMinipassPipeline = require('./pipeline.js')
+const { getAgent } = require('@npmcli/agent')
+const pkg = require('../package.json')
+
+const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
+
+const RETRY_ERRORS = [
+  'ECONNRESET', // remote socket closed on us
+  'ECONNREFUSED', // remote host refused to open connection
+  'EADDRINUSE', // failed to bind to a local port (proxy?)
+  'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW
+  // from @npmcli/agent
+  'ECONNECTIONTIMEOUT',
+  'EIDLETIMEOUT',
+  'ERESPONSETIMEOUT',
+  'ETRANSFERTIMEOUT',
+  // Known codes we do NOT retry on:
+  // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
+  // EINVALIDPROXY // invalid protocol from @npmcli/agent
+  // EINVALIDRESPONSE // invalid status code from @npmcli/agent
+]
+
+const RETRY_TYPES = [
+  'request-timeout',
+]
+
+// make a request directly to the remote source,
+// retrying certain classes of errors as well as
+// following redirects (through the cache if necessary)
+// and verifying response integrity
+const remoteFetch = (request, options) => {
+  const agent = getAgent(request.url, options)
+  if (!request.headers.has('connection')) {
+    request.headers.set('connection', agent ? 'keep-alive' : 'close')
+  }
+
+  if (!request.headers.has('user-agent')) {
+    request.headers.set('user-agent', USER_AGENT)
+  }
+
+  // keep our own options since we're overriding the agent
+  // and the redirect mode
+  const _opts = {
+    ...options,
+    agent,
+    redirect: 'manual',
+  }
+
+  return promiseRetry(async (retryHandler, attemptNum) => {
+    const req = new fetch.Request(request, _opts)
+    try {
+      let res = await fetch(req, _opts)
+      if (_opts.integrity && res.status === 200) {
+        // we got a 200 response and the user has specified an expected
+        // integrity value, so wrap the response in an ssri stream to verify it
+        const integrityStream = ssri.integrityStream({
+          algorithms: _opts.algorithms,
+          integrity: _opts.integrity,
+          size: _opts.size,
+        })
+        const pipeline = new CachingMinipassPipeline({
+          events: ['integrity', 'size'],
+        }, res.body, integrityStream)
+        // we also propagate the integrity and size events out to the pipeline so we can use
+        // this new response body as an integrityEmitter for cacache
+        integrityStream.on('integrity', i => pipeline.emit('integrity', i))
+        integrityStream.on('size', s => pipeline.emit('size', s))
+        res = new fetch.Response(pipeline, res)
+        // set an explicit flag so we know if our response body will emit integrity and size
+        res.body.hasIntegrityEmitter = true
+      }
+
+      res.headers.set('x-fetch-attempts', attemptNum)
+
+      // do not retry POST requests, or requests with a streaming body
+      // do retry requests with a 408, 420, 429 or 500+ status in the response
+      const isStream = Minipass.isStream(req.body)
+      const isRetriable = req.method !== 'POST' &&
+          !isStream &&
+          ([408, 420, 429].includes(res.status) || res.status >= 500)
+
+      if (isRetriable) {
+        if (typeof options.onRetry === 'function') {
+          options.onRetry(res)
+        }
+
+        return retryHandler(res)
+      }
+
+      return res
+    } catch (err) {
+      const code = (err.code === 'EPROMISERETRY')
+        ? err.retried.code
+        : err.code
+
+      // err.retried will be the thing that was thrown from above
+      // if it's a response, we just got a bad status code and we
+      // can re-throw to allow the retry
+      const isRetryError = err.retried instanceof fetch.Response ||
+        (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type))
+
+      if (req.method === 'POST' || isRetryError) {
+        throw err
+      }
+
+      if (typeof options.onRetry === 'function') {
+        options.onRetry(err)
+      }
+
+      return retryHandler(err)
+    }
+  }, options.retry).catch((err) => {
+    // don't reject for http errors, just return them
+    if (err.status >= 400 && err.type !== 'system') {
+      return err
+    }
+
+    throw err
+  })
+}
+
+module.exports = remoteFetch
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json
new file mode 100644
index 0000000000000..419db8fbb1289
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json
@@ -0,0 +1,80 @@
+{
+  "name": "make-fetch-happen",
+  "version": "12.0.0",
+  "description": "Opinionated, caching, retrying fetch client",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "test": "tap",
+    "posttest": "npm run lint",
+    "eslint": "eslint",
+    "lint": "eslint \"**/*.js\"",
+    "lintfix": "npm run lint -- --fix",
+    "postlint": "template-oss-check",
+    "snap": "tap",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/make-fetch-happen.git"
+  },
+  "keywords": [
+    "http",
+    "request",
+    "fetch",
+    "mean girls",
+    "caching",
+    "cache",
+    "subresource integrity"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "dependencies": {
+    "@npmcli/agent": "^1.1.0",
+    "cacache": "^17.0.0",
+    "http-cache-semantics": "^4.1.1",
+    "is-lambda": "^1.0.1",
+    "minipass": "^7.0.2",
+    "minipass-fetch": "^3.0.0",
+    "minipass-flush": "^1.0.5",
+    "minipass-pipeline": "^1.2.4",
+    "negotiator": "^0.6.3",
+    "promise-retry": "^2.0.1",
+    "ssri": "^10.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "nock": "^13.2.4",
+    "safe-buffer": "^5.2.1",
+    "standard-version": "^9.3.2",
+    "tap": "^16.0.0"
+  },
+  "engines": {
+    "node": "^16.13.0 || >=18.0.0"
+  },
+  "tap": {
+    "color": 1,
+    "files": "test/*.js",
+    "check-coverage": true,
+    "timeout": 60,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "ciVersions": [
+      "16.13.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ],
+    "version": "4.18.0",
+    "publish": "true"
+  }
+}
diff --git a/package-lock.json b/package-lock.json
index 46ba9a0e343cf..ceca6adc42d1c 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -122,7 +122,7 @@
         "libnpmsearch": "^6.0.2",
         "libnpmteam": "^5.0.3",
         "libnpmversion": "^4.0.2",
-        "make-fetch-happen": "^12.0.0",
+        "make-fetch-happen": "^13.0.0",
         "minimatch": "^9.0.3",
         "minipass": "^7.0.3",
         "minipass-pipeline": "^1.2.4",
@@ -2341,16 +2341,25 @@
       }
     },
     "node_modules/@npmcli/agent": {
-      "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-1.1.0.tgz",
-      "integrity": "sha512-I9g/2XFOkflxm5IDrGSjCcR2d12Jmic0di9w/WpJBbzYuSXmfgoL+WwEV7zY/ajxzQr7o4vSkEJh6piyFLYtuQ==",
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-2.0.0.tgz",
+      "integrity": "sha512-RpRbD6PnaQIUl+p8MoH7sl2CHyMofCO0abOV+0VulqKW84+0nRWnj0bYFQELTN5HpNvzWAV8pRN6Fjx9ZLOS0g==",
       "inBundle": true,
       "dependencies": {
-        "lru-cache": "^7.18.3",
+        "lru-cache": "^10.0.1",
         "socks": "^2.7.1"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
+    "node_modules/@npmcli/agent/node_modules/lru-cache": {
+      "version": "10.0.1",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz",
+      "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==",
+      "inBundle": true,
+      "engines": {
+        "node": "14 || >=16.14"
       }
     },
     "node_modules/@npmcli/arborist": {
@@ -7992,13 +8001,13 @@
       "dev": true
     },
     "node_modules/make-fetch-happen": {
-      "version": "12.0.0",
-      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-12.0.0.tgz",
-      "integrity": "sha512-xpuA2kA8Z66uGQjaSXd7rffqJOv60iYpP8X0TsZl3uwXlqxUVmHETImjM71JOPA694TlcX37GhlaCsl6z6fNVg==",
+      "version": "13.0.0",
+      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-13.0.0.tgz",
+      "integrity": "sha512-7ThobcL8brtGo9CavByQrQi+23aIfgYU++wg4B87AIS8Rb2ZBt/MEaDqzA00Xwv/jUjAjYkLHjVolYuTLKda2A==",
       "inBundle": true,
       "dependencies": {
-        "@npmcli/agent": "^1.1.0",
-        "cacache": "^17.0.0",
+        "@npmcli/agent": "^2.0.0",
+        "cacache": "^18.0.0",
         "http-cache-semantics": "^4.1.1",
         "is-lambda": "^1.0.1",
         "minipass": "^7.0.2",
@@ -8010,30 +8019,7 @@
         "ssri": "^10.0.0"
       },
       "engines": {
-        "node": "^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/make-fetch-happen/node_modules/cacache": {
-      "version": "17.1.4",
-      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
-      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
-      "inBundle": true,
-      "dependencies": {
-        "@npmcli/fs": "^3.1.0",
-        "fs-minipass": "^3.0.0",
-        "glob": "^10.2.2",
-        "lru-cache": "^7.7.1",
-        "minipass": "^7.0.3",
-        "minipass-collect": "^1.0.2",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "p-map": "^4.0.0",
-        "ssri": "^10.0.0",
-        "tar": "^6.1.11",
-        "unique-filename": "^3.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "node_modules/map-obj": {
@@ -9894,6 +9880,64 @@
         "node": "^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/npm-registry-fetch/node_modules/@npmcli/agent": {
+      "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-1.1.0.tgz",
+      "integrity": "sha512-I9g/2XFOkflxm5IDrGSjCcR2d12Jmic0di9w/WpJBbzYuSXmfgoL+WwEV7zY/ajxzQr7o4vSkEJh6piyFLYtuQ==",
+      "inBundle": true,
+      "dependencies": {
+        "lru-cache": "^7.18.3",
+        "socks": "^2.7.1"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/npm-registry-fetch/node_modules/cacache": {
+      "version": "17.1.4",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
+      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
+      "inBundle": true,
+      "dependencies": {
+        "@npmcli/fs": "^3.1.0",
+        "fs-minipass": "^3.0.0",
+        "glob": "^10.2.2",
+        "lru-cache": "^7.7.1",
+        "minipass": "^7.0.3",
+        "minipass-collect": "^1.0.2",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "p-map": "^4.0.0",
+        "ssri": "^10.0.0",
+        "tar": "^6.1.11",
+        "unique-filename": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/npm-registry-fetch/node_modules/make-fetch-happen": {
+      "version": "12.0.0",
+      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-12.0.0.tgz",
+      "integrity": "sha512-xpuA2kA8Z66uGQjaSXd7rffqJOv60iYpP8X0TsZl3uwXlqxUVmHETImjM71JOPA694TlcX37GhlaCsl6z6fNVg==",
+      "inBundle": true,
+      "dependencies": {
+        "@npmcli/agent": "^1.1.0",
+        "cacache": "^17.0.0",
+        "http-cache-semantics": "^4.1.1",
+        "is-lambda": "^1.0.1",
+        "minipass": "^7.0.2",
+        "minipass-fetch": "^3.0.0",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "negotiator": "^0.6.3",
+        "promise-retry": "^2.0.1",
+        "ssri": "^10.0.0"
+      },
+      "engines": {
+        "node": "^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/npm-run-path": {
       "version": "4.0.1",
       "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz",
diff --git a/package.json b/package.json
index c405b20e7b246..b56506563983d 100644
--- a/package.json
+++ b/package.json
@@ -87,7 +87,7 @@
     "libnpmsearch": "^6.0.2",
     "libnpmteam": "^5.0.3",
     "libnpmversion": "^4.0.2",
-    "make-fetch-happen": "^12.0.0",
+    "make-fetch-happen": "^13.0.0",
     "minimatch": "^9.0.3",
     "minipass": "^7.0.3",
     "minipass-pipeline": "^1.2.4",

From 8e7381a17a7a3ecee532e892d040a942982a1d7b Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Tue, 15 Aug 2023 12:12:10 -0700
Subject: [PATCH 33/68] deps: normalize-package-data@6.0.0

---
 package-lock.json                     | 16 +++++++++++++++-
 workspaces/libnpmpublish/package.json |  2 +-
 2 files changed, 16 insertions(+), 2 deletions(-)

diff --git a/package-lock.json b/package-lock.json
index ceca6adc42d1c..0d171a58688a3 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -16330,7 +16330,7 @@
       "license": "ISC",
       "dependencies": {
         "ci-info": "^3.6.1",
-        "normalize-package-data": "^5.0.0",
+        "normalize-package-data": "^6.0.0",
         "npm-package-arg": "^10.1.0",
         "npm-registry-fetch": "^15.0.0",
         "proc-log": "^3.0.0",
@@ -16350,6 +16350,20 @@
         "node": "^16.13.0 || >=18.0.0"
       }
     },
+    "workspaces/libnpmpublish/node_modules/normalize-package-data": {
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.0.tgz",
+      "integrity": "sha512-UL7ELRVxYBHBgYEtZCXjxuD5vPxnmvMGq0jp/dGPKKrN7tfsBh2IY7TlJ15WWwdjRWD3RJbnsygUurTK3xkPkg==",
+      "dependencies": {
+        "hosted-git-info": "^7.0.0",
+        "is-core-module": "^2.8.1",
+        "semver": "^7.3.5",
+        "validate-npm-package-license": "^3.0.4"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
     "workspaces/libnpmsearch": {
       "version": "6.0.2",
       "license": "ISC",
diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json
index 6c0edc5be9246..f2c117a0bef5c 100644
--- a/workspaces/libnpmpublish/package.json
+++ b/workspaces/libnpmpublish/package.json
@@ -39,7 +39,7 @@
   "homepage": "https://npmjs.com/package/libnpmpublish",
   "dependencies": {
     "ci-info": "^3.6.1",
-    "normalize-package-data": "^5.0.0",
+    "normalize-package-data": "^6.0.0",
     "npm-package-arg": "^10.1.0",
     "npm-registry-fetch": "^15.0.0",
     "proc-log": "^3.0.0",

From eb3aa2c18fe3cdd5e9b351d5a1a3627032fc5963 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Tue, 15 Aug 2023 12:14:12 -0700
Subject: [PATCH 34/68] deps: npm-package-arg@11.0.0

---
 mock-registry/package.json                    |   2 +-
 node_modules/.gitignore                       |  15 +-
 .../node_modules/hosted-git-info/LICENSE      |   0
 .../hosted-git-info/lib/from-url.js           |   0
 .../node_modules/hosted-git-info/lib/hosts.js |   0
 .../node_modules/hosted-git-info/lib/index.js |   0
 .../hosted-git-info/lib/parse-url.js          |   0
 .../node_modules/hosted-git-info/package.json |   0
 .../node_modules/npm-package-arg/LICENSE      |  15 +
 .../node_modules/npm-package-arg/lib/npa.js   | 431 ++++++++++++++++++
 .../node_modules/npm-package-arg/package.json |  59 +++
 node_modules/npm-package-arg/lib/npa.js       |  47 +-
 node_modules/npm-package-arg/package.json     |  17 +-
 .../node_modules/hosted-git-info/LICENSE      |  13 +
 .../hosted-git-info/lib/from-url.js           | 122 +++++
 .../node_modules/hosted-git-info/lib/hosts.js | 228 +++++++++
 .../node_modules/hosted-git-info/lib/index.js | 179 ++++++++
 .../hosted-git-info/lib/parse-url.js          |  78 ++++
 .../node_modules/hosted-git-info/package.json |  59 +++
 .../node_modules/npm-package-arg/LICENSE      |  15 +
 .../node_modules/npm-package-arg/lib/npa.js   | 431 ++++++++++++++++++
 .../node_modules/npm-package-arg/package.json |  59 +++
 .../node_modules/hosted-git-info/LICENSE      |  13 +
 .../hosted-git-info/lib/from-url.js           | 122 +++++
 .../node_modules/hosted-git-info/lib/hosts.js | 228 +++++++++
 .../node_modules/hosted-git-info/lib/index.js | 179 ++++++++
 .../hosted-git-info/lib/parse-url.js          |  78 ++++
 .../node_modules/hosted-git-info/package.json |  59 +++
 .../node_modules/npm-package-arg/LICENSE      |  15 +
 .../node_modules/npm-package-arg/lib/npa.js   | 431 ++++++++++++++++++
 .../node_modules/npm-package-arg/package.json |  59 +++
 .../node_modules/hosted-git-info/LICENSE      |  13 +
 .../hosted-git-info/lib/from-url.js           | 122 +++++
 .../node_modules/hosted-git-info/lib/hosts.js | 228 +++++++++
 .../node_modules/hosted-git-info/lib/index.js | 179 ++++++++
 .../hosted-git-info/lib/parse-url.js          |  78 ++++
 .../node_modules/hosted-git-info/package.json |  59 +++
 .../node_modules/npm-package-arg/LICENSE      |  15 +
 .../node_modules/npm-package-arg/lib/npa.js   | 431 ++++++++++++++++++
 .../node_modules/npm-package-arg/package.json |  59 +++
 package-lock.json                             | 161 ++++++-
 package.json                                  |   2 +-
 workspaces/arborist/package.json              |   2 +-
 workspaces/libnpmaccess/package.json          |   2 +-
 workspaces/libnpmdiff/package.json            |   2 +-
 workspaces/libnpmexec/package.json            |   2 +-
 workspaces/libnpmpack/package.json            |   2 +-
 workspaces/libnpmpublish/package.json         |   2 +-
 48 files changed, 4240 insertions(+), 73 deletions(-)
 rename node_modules/{npm-package-arg => init-package-json}/node_modules/hosted-git-info/LICENSE (100%)
 rename node_modules/{npm-package-arg => init-package-json}/node_modules/hosted-git-info/lib/from-url.js (100%)
 rename node_modules/{npm-package-arg => init-package-json}/node_modules/hosted-git-info/lib/hosts.js (100%)
 rename node_modules/{npm-package-arg => init-package-json}/node_modules/hosted-git-info/lib/index.js (100%)
 rename node_modules/{npm-package-arg => init-package-json}/node_modules/hosted-git-info/lib/parse-url.js (100%)
 rename node_modules/{npm-package-arg => init-package-json}/node_modules/hosted-git-info/package.json (100%)
 create mode 100644 node_modules/init-package-json/node_modules/npm-package-arg/LICENSE
 create mode 100644 node_modules/init-package-json/node_modules/npm-package-arg/lib/npa.js
 create mode 100644 node_modules/init-package-json/node_modules/npm-package-arg/package.json
 create mode 100644 node_modules/npm-pick-manifest/node_modules/hosted-git-info/LICENSE
 create mode 100644 node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/from-url.js
 create mode 100644 node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/hosts.js
 create mode 100644 node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/index.js
 create mode 100644 node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/parse-url.js
 create mode 100644 node_modules/npm-pick-manifest/node_modules/hosted-git-info/package.json
 create mode 100644 node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE
 create mode 100644 node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js
 create mode 100644 node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json
 create mode 100644 node_modules/npm-registry-fetch/node_modules/hosted-git-info/LICENSE
 create mode 100644 node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/from-url.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/hosts.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/index.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/parse-url.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/hosted-git-info/package.json
 create mode 100644 node_modules/npm-registry-fetch/node_modules/npm-package-arg/LICENSE
 create mode 100644 node_modules/npm-registry-fetch/node_modules/npm-package-arg/lib/npa.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/npm-package-arg/package.json
 create mode 100644 node_modules/pacote/node_modules/hosted-git-info/LICENSE
 create mode 100644 node_modules/pacote/node_modules/hosted-git-info/lib/from-url.js
 create mode 100644 node_modules/pacote/node_modules/hosted-git-info/lib/hosts.js
 create mode 100644 node_modules/pacote/node_modules/hosted-git-info/lib/index.js
 create mode 100644 node_modules/pacote/node_modules/hosted-git-info/lib/parse-url.js
 create mode 100644 node_modules/pacote/node_modules/hosted-git-info/package.json
 create mode 100644 node_modules/pacote/node_modules/npm-package-arg/LICENSE
 create mode 100644 node_modules/pacote/node_modules/npm-package-arg/lib/npa.js
 create mode 100644 node_modules/pacote/node_modules/npm-package-arg/package.json

diff --git a/mock-registry/package.json b/mock-registry/package.json
index c7624d6d25579..fa885b2cdb548 100644
--- a/mock-registry/package.json
+++ b/mock-registry/package.json
@@ -55,7 +55,7 @@
     "@npmcli/template-oss": "4.18.0",
     "json-stringify-safe": "^5.0.1",
     "nock": "^13.3.0",
-    "npm-package-arg": "^10.1.0",
+    "npm-package-arg": "^11.0.0",
     "pacote": "^16.0.0",
     "tap": "^16.3.4"
   }
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 2884505632a13..71679c6fedcfd 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -141,6 +141,10 @@
 !/inherits
 !/ini
 !/init-package-json
+!/init-package-json/node_modules/
+/init-package-json/node_modules/*
+!/init-package-json/node_modules/hosted-git-info
+!/init-package-json/node_modules/npm-package-arg
 !/ip-regex
 !/ip
 !/is-cidr
@@ -220,11 +224,12 @@
 !/npm-install-checks
 !/npm-normalize-package-bin
 !/npm-package-arg
-!/npm-package-arg/node_modules/
-/npm-package-arg/node_modules/*
-!/npm-package-arg/node_modules/hosted-git-info
 !/npm-packlist
 !/npm-pick-manifest
+!/npm-pick-manifest/node_modules/
+/npm-pick-manifest/node_modules/*
+!/npm-pick-manifest/node_modules/hosted-git-info
+!/npm-pick-manifest/node_modules/npm-package-arg
 !/npm-profile
 !/npm-registry-fetch
 !/npm-registry-fetch/node_modules/
@@ -233,7 +238,9 @@
 /npm-registry-fetch/node_modules/@npmcli/*
 !/npm-registry-fetch/node_modules/@npmcli/agent
 !/npm-registry-fetch/node_modules/cacache
+!/npm-registry-fetch/node_modules/hosted-git-info
 !/npm-registry-fetch/node_modules/make-fetch-happen
+!/npm-registry-fetch/node_modules/npm-package-arg
 !/npm-user-validate
 !/npmlog
 !/once
@@ -245,6 +252,8 @@
 /pacote/node_modules/@npmcli/*
 !/pacote/node_modules/@npmcli/git
 !/pacote/node_modules/cacache
+!/pacote/node_modules/hosted-git-info
+!/pacote/node_modules/npm-package-arg
 !/parse-conflict-json
 !/path-is-absolute
 !/path-key
diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE b/node_modules/init-package-json/node_modules/hosted-git-info/LICENSE
similarity index 100%
rename from node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE
rename to node_modules/init-package-json/node_modules/hosted-git-info/LICENSE
diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/from-url.js b/node_modules/init-package-json/node_modules/hosted-git-info/lib/from-url.js
similarity index 100%
rename from node_modules/npm-package-arg/node_modules/hosted-git-info/lib/from-url.js
rename to node_modules/init-package-json/node_modules/hosted-git-info/lib/from-url.js
diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/hosts.js b/node_modules/init-package-json/node_modules/hosted-git-info/lib/hosts.js
similarity index 100%
rename from node_modules/npm-package-arg/node_modules/hosted-git-info/lib/hosts.js
rename to node_modules/init-package-json/node_modules/hosted-git-info/lib/hosts.js
diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/index.js b/node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js
similarity index 100%
rename from node_modules/npm-package-arg/node_modules/hosted-git-info/lib/index.js
rename to node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js
diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/init-package-json/node_modules/hosted-git-info/lib/parse-url.js
similarity index 100%
rename from node_modules/npm-package-arg/node_modules/hosted-git-info/lib/parse-url.js
rename to node_modules/init-package-json/node_modules/hosted-git-info/lib/parse-url.js
diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json b/node_modules/init-package-json/node_modules/hosted-git-info/package.json
similarity index 100%
rename from node_modules/npm-package-arg/node_modules/hosted-git-info/package.json
rename to node_modules/init-package-json/node_modules/hosted-git-info/package.json
diff --git a/node_modules/init-package-json/node_modules/npm-package-arg/LICENSE b/node_modules/init-package-json/node_modules/npm-package-arg/LICENSE
new file mode 100644
index 0000000000000..19cec97b18468
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/npm-package-arg/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/init-package-json/node_modules/npm-package-arg/lib/npa.js b/node_modules/init-package-json/node_modules/npm-package-arg/lib/npa.js
new file mode 100644
index 0000000000000..36bd18cd9f9a6
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/npm-package-arg/lib/npa.js
@@ -0,0 +1,431 @@
+'use strict'
+module.exports = npa
+module.exports.resolve = resolve
+module.exports.toPurl = toPurl
+module.exports.Result = Result
+
+const url = require('url')
+const HostedGit = require('hosted-git-info')
+const semver = require('semver')
+const path = global.FAKE_WINDOWS ? require('path').win32 : require('path')
+const validatePackageName = require('validate-npm-package-name')
+const { homedir } = require('os')
+const log = require('proc-log')
+
+const isWindows = process.platform === 'win32' || global.FAKE_WINDOWS
+const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
+const isURL = /^(?:git[+])?[a-z]+:/i
+const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
+const isFilename = /[.](?:tgz|tar.gz|tar)$/i
+
+function npa (arg, where) {
+  let name
+  let spec
+  if (typeof arg === 'object') {
+    if (arg instanceof Result && (!where || where === arg.where)) {
+      return arg
+    } else if (arg.name && arg.rawSpec) {
+      return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
+    } else {
+      return npa(arg.raw, where || arg.where)
+    }
+  }
+  const nameEndsAt = arg[0] === '@' ? arg.slice(1).indexOf('@') + 1 : arg.indexOf('@')
+  const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
+  if (isURL.test(arg)) {
+    spec = arg
+  } else if (isGit.test(arg)) {
+    spec = `git+ssh://${arg}`
+  } else if (namePart[0] !== '@' && (hasSlashes.test(namePart) || isFilename.test(namePart))) {
+    spec = arg
+  } else if (nameEndsAt > 0) {
+    name = namePart
+    spec = arg.slice(nameEndsAt + 1) || '*'
+  } else {
+    const valid = validatePackageName(arg)
+    if (valid.validForOldPackages) {
+      name = arg
+      spec = '*'
+    } else {
+      spec = arg
+    }
+  }
+  return resolve(name, spec, where, arg)
+}
+
+const isFilespec = isWindows ? /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/ : /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
+
+function resolve (name, spec, where, arg) {
+  const res = new Result({
+    raw: arg,
+    name: name,
+    rawSpec: spec,
+    fromArgument: arg != null,
+  })
+
+  if (name) {
+    res.setName(name)
+  }
+
+  if (spec && (isFilespec.test(spec) || /^file:/i.test(spec))) {
+    return fromFile(res, where)
+  } else if (spec && /^npm:/i.test(spec)) {
+    return fromAlias(res, where)
+  }
+
+  const hosted = HostedGit.fromUrl(spec, {
+    noGitPlus: true,
+    noCommittish: true,
+  })
+  if (hosted) {
+    return fromHostedGit(res, hosted)
+  } else if (spec && isURL.test(spec)) {
+    return fromURL(res)
+  } else if (spec && (hasSlashes.test(spec) || isFilename.test(spec))) {
+    return fromFile(res, where)
+  } else {
+    return fromRegistry(res)
+  }
+}
+
+const defaultRegistry = 'https://registry.npmjs.org'
+
+function toPurl (arg, reg = defaultRegistry) {
+  const res = npa(arg)
+
+  if (res.type !== 'version') {
+    throw invalidPurlType(res.type, res.raw)
+  }
+
+  // URI-encode leading @ of scoped packages
+  let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec
+  if (reg !== defaultRegistry) {
+    purl += '?repository_url=' + reg
+  }
+
+  return purl
+}
+
+function invalidPackageName (name, valid, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`)
+  err.code = 'EINVALIDPACKAGENAME'
+  return err
+}
+
+function invalidTagName (name, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`)
+  err.code = 'EINVALIDTAGNAME'
+  return err
+}
+
+function invalidPurlType (type, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`)
+  err.code = 'EINVALIDPURLTYPE'
+  return err
+}
+
+function Result (opts) {
+  this.type = opts.type
+  this.registry = opts.registry
+  this.where = opts.where
+  if (opts.raw == null) {
+    this.raw = opts.name ? opts.name + '@' + opts.rawSpec : opts.rawSpec
+  } else {
+    this.raw = opts.raw
+  }
+
+  this.name = undefined
+  this.escapedName = undefined
+  this.scope = undefined
+  this.rawSpec = opts.rawSpec || ''
+  this.saveSpec = opts.saveSpec
+  this.fetchSpec = opts.fetchSpec
+  if (opts.name) {
+    this.setName(opts.name)
+  }
+  this.gitRange = opts.gitRange
+  this.gitCommittish = opts.gitCommittish
+  this.gitSubdir = opts.gitSubdir
+  this.hosted = opts.hosted
+}
+
+Result.prototype.setName = function (name) {
+  const valid = validatePackageName(name)
+  if (!valid.validForOldPackages) {
+    throw invalidPackageName(name, valid, this.raw)
+  }
+
+  this.name = name
+  this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
+  // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
+  this.escapedName = name.replace('/', '%2f')
+  return this
+}
+
+Result.prototype.toString = function () {
+  const full = []
+  if (this.name != null && this.name !== '') {
+    full.push(this.name)
+  }
+  const spec = this.saveSpec || this.fetchSpec || this.rawSpec
+  if (spec != null && spec !== '') {
+    full.push(spec)
+  }
+  return full.length ? full.join('@') : this.raw
+}
+
+Result.prototype.toJSON = function () {
+  const result = Object.assign({}, this)
+  delete result.hosted
+  return result
+}
+
+function setGitCommittish (res, committish) {
+  if (!committish) {
+    res.gitCommittish = null
+    return res
+  }
+
+  // for each :: separated item:
+  for (const part of committish.split('::')) {
+    // if the item has no : the n it is a commit-ish
+    if (!part.includes(':')) {
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a committish')
+      }
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a second committish')
+      }
+      res.gitCommittish = part
+      continue
+    }
+    // split on name:value
+    const [name, value] = part.split(':')
+    // if name is semver do semver lookup of ref or tag
+    if (name === 'semver') {
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a semver range')
+      }
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a second semver range')
+      }
+      res.gitRange = decodeURIComponent(value)
+      continue
+    }
+    if (name === 'path') {
+      if (res.gitSubdir) {
+        throw new Error('cannot override existing path with a second path')
+      }
+      res.gitSubdir = `/${value}`
+      continue
+    }
+    log.warn('npm-package-arg', `ignoring unknown key "${name}"`)
+  }
+
+  return res
+}
+
+function fromFile (res, where) {
+  if (!where) {
+    where = process.cwd()
+  }
+  res.type = isFilename.test(res.rawSpec) ? 'file' : 'directory'
+  res.where = where
+
+  // always put the '/' on where when resolving urls, or else
+  // file:foo from /path/to/bar goes to /path/to/foo, when we want
+  // it to be /path/to/bar/foo
+
+  let specUrl
+  let resolvedUrl
+  const prefix = (!/^file:/.test(res.rawSpec) ? 'file:' : '')
+  const rawWithPrefix = prefix + res.rawSpec
+  let rawNoPrefix = rawWithPrefix.replace(/^file:/, '')
+  try {
+    resolvedUrl = new url.URL(rawWithPrefix, `file://${path.resolve(where)}/`)
+    specUrl = new url.URL(rawWithPrefix)
+  } catch (originalError) {
+    const er = new Error('Invalid file: URL, must comply with RFC 8909')
+    throw Object.assign(er, {
+      raw: res.rawSpec,
+      spec: res,
+      where,
+      originalError,
+    })
+  }
+
+  // environment switch for testing
+  if (process.env.NPM_PACKAGE_ARG_8909_STRICT !== '1') {
+    // XXX backwards compatibility lack of compliance with 8909
+    // Remove when we want a breaking change to come into RFC compliance.
+    if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
+      const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///')
+      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
+      specUrl = new url.URL(rawSpec)
+      rawNoPrefix = rawSpec.replace(/^file:/, '')
+    }
+    // turn file:/../foo into file:../foo
+    // for 1, 2 or 3 leading slashes since we attempted
+    // in the previous step to make it a file protocol url with a leading slash
+    if (/^\/{1,3}\.\.?(\/|$)/.test(rawNoPrefix)) {
+      const rawSpec = res.rawSpec.replace(/^file:\/{1,3}/, 'file:')
+      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
+      specUrl = new url.URL(rawSpec)
+      rawNoPrefix = rawSpec.replace(/^file:/, '')
+    }
+    // XXX end 8909 violation backwards compatibility section
+  }
+
+  // file:foo - relative url to ./foo
+  // file:/foo - absolute path /foo
+  // file:///foo - absolute path to /foo, no authority host
+  // file://localhost/foo - absolute path to /foo, on localhost
+  // file://foo - absolute path to / on foo host (error!)
+  if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
+    const msg = `Invalid file: URL, must be absolute if // present`
+    throw Object.assign(new Error(msg), {
+      raw: res.rawSpec,
+      parsed: resolvedUrl,
+    })
+  }
+
+  // turn /C:/blah into just C:/blah on windows
+  let specPath = decodeURIComponent(specUrl.pathname)
+  let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
+  if (isWindows) {
+    specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1')
+    resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1')
+  }
+
+  // replace ~ with homedir, but keep the ~ in the saveSpec
+  // otherwise, make it relative to where param
+  if (/^\/~(\/|$)/.test(specPath)) {
+    res.saveSpec = `file:${specPath.substr(1)}`
+    resolvedPath = path.resolve(homedir(), specPath.substr(3))
+  } else if (!path.isAbsolute(rawNoPrefix)) {
+    res.saveSpec = `file:${path.relative(where, resolvedPath)}`
+  } else {
+    res.saveSpec = `file:${path.resolve(resolvedPath)}`
+  }
+
+  res.fetchSpec = path.resolve(where, resolvedPath)
+  return res
+}
+
+function fromHostedGit (res, hosted) {
+  res.type = 'git'
+  res.hosted = hosted
+  res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
+  res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
+  return setGitCommittish(res, hosted.committish)
+}
+
+function unsupportedURLType (protocol, spec) {
+  const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
+  err.code = 'EUNSUPPORTEDPROTOCOL'
+  return err
+}
+
+function matchGitScp (spec) {
+  // git ssh specifiers are overloaded to also use scp-style git
+  // specifiers, so we have to parse those out and treat them special.
+  // They are NOT true URIs, so we can't hand them to `url.parse`.
+  //
+  // This regex looks for things that look like:
+  // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
+  //
+  // ...and various combinations. The username in the beginning is *required*.
+  const matched = spec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
+  return matched && !matched[1].match(/:[0-9]+\/?.*$/i) && {
+    fetchSpec: matched[1],
+    gitCommittish: matched[2] == null ? null : matched[2],
+  }
+}
+
+function fromURL (res) {
+  // eslint-disable-next-line node/no-deprecated-api
+  const urlparse = url.parse(res.rawSpec)
+  res.saveSpec = res.rawSpec
+  // check the protocol, and then see if it's git or not
+  switch (urlparse.protocol) {
+    case 'git:':
+    case 'git+http:':
+    case 'git+https:':
+    case 'git+rsync:':
+    case 'git+ftp:':
+    case 'git+file:':
+    case 'git+ssh:': {
+      res.type = 'git'
+      const match = urlparse.protocol === 'git+ssh:' ? matchGitScp(res.rawSpec)
+        : null
+      if (match) {
+        setGitCommittish(res, match.gitCommittish)
+        res.fetchSpec = match.fetchSpec
+      } else {
+        setGitCommittish(res, urlparse.hash != null ? urlparse.hash.slice(1) : '')
+        urlparse.protocol = urlparse.protocol.replace(/^git[+]/, '')
+        if (urlparse.protocol === 'file:' && /^git\+file:\/\/[a-z]:/i.test(res.rawSpec)) {
+          // keep the drive letter : on windows file paths
+          urlparse.host += ':'
+          urlparse.hostname += ':'
+        }
+        delete urlparse.hash
+        res.fetchSpec = url.format(urlparse)
+      }
+      break
+    }
+    case 'http:':
+    case 'https:':
+      res.type = 'remote'
+      res.fetchSpec = res.saveSpec
+      break
+
+    default:
+      throw unsupportedURLType(urlparse.protocol, res.rawSpec)
+  }
+
+  return res
+}
+
+function fromAlias (res, where) {
+  const subSpec = npa(res.rawSpec.substr(4), where)
+  if (subSpec.type === 'alias') {
+    throw new Error('nested aliases not supported')
+  }
+
+  if (!subSpec.registry) {
+    throw new Error('aliases only work for registry deps')
+  }
+
+  res.subSpec = subSpec
+  res.registry = true
+  res.type = 'alias'
+  res.saveSpec = null
+  res.fetchSpec = null
+  return res
+}
+
+function fromRegistry (res) {
+  res.registry = true
+  const spec = res.rawSpec.trim()
+  // no save spec for registry components as we save based on the fetched
+  // version, not on the argument so this can't compute that.
+  res.saveSpec = null
+  res.fetchSpec = spec
+  const version = semver.valid(spec, true)
+  const range = semver.validRange(spec, true)
+  if (version) {
+    res.type = 'version'
+  } else if (range) {
+    res.type = 'range'
+  } else {
+    if (encodeURIComponent(spec) !== spec) {
+      throw invalidTagName(spec, res.raw)
+    }
+    res.type = 'tag'
+  }
+  return res
+}
diff --git a/node_modules/init-package-json/node_modules/npm-package-arg/package.json b/node_modules/init-package-json/node_modules/npm-package-arg/package.json
new file mode 100644
index 0000000000000..bb9e71b258a93
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/npm-package-arg/package.json
@@ -0,0 +1,59 @@
+{
+  "name": "npm-package-arg",
+  "version": "10.1.0",
+  "description": "Parse the things that can be arguments to `npm install`",
+  "main": "./lib/npa.js",
+  "directories": {
+    "test": "test"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "dependencies": {
+    "hosted-git-info": "^6.0.0",
+    "proc-log": "^3.0.0",
+    "semver": "^7.3.5",
+    "validate-npm-package-name": "^5.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.10.0",
+    "tap": "^16.0.1"
+  },
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "npmclilint": "npmcli-lint",
+    "lint": "eslint \"**/*.js\"",
+    "lintfix": "npm run lint -- --fix",
+    "posttest": "npm run lint",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/npm-package-arg.git"
+  },
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/npm-package-arg/issues"
+  },
+  "homepage": "https://github.com/npm/npm-package-arg",
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "tap": {
+    "branches": 97,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.10.0"
+  }
+}
diff --git a/node_modules/npm-package-arg/lib/npa.js b/node_modules/npm-package-arg/lib/npa.js
index 36bd18cd9f9a6..f5ede2326e7b4 100644
--- a/node_modules/npm-package-arg/lib/npa.js
+++ b/node_modules/npm-package-arg/lib/npa.js
@@ -257,40 +257,23 @@ function fromFile (res, where) {
     })
   }
 
-  // environment switch for testing
-  if (process.env.NPM_PACKAGE_ARG_8909_STRICT !== '1') {
-    // XXX backwards compatibility lack of compliance with 8909
-    // Remove when we want a breaking change to come into RFC compliance.
-    if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
-      const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///')
-      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
-      specUrl = new url.URL(rawSpec)
-      rawNoPrefix = rawSpec.replace(/^file:/, '')
-    }
-    // turn file:/../foo into file:../foo
-    // for 1, 2 or 3 leading slashes since we attempted
-    // in the previous step to make it a file protocol url with a leading slash
-    if (/^\/{1,3}\.\.?(\/|$)/.test(rawNoPrefix)) {
-      const rawSpec = res.rawSpec.replace(/^file:\/{1,3}/, 'file:')
-      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
-      specUrl = new url.URL(rawSpec)
-      rawNoPrefix = rawSpec.replace(/^file:/, '')
-    }
-    // XXX end 8909 violation backwards compatibility section
-  }
-
-  // file:foo - relative url to ./foo
-  // file:/foo - absolute path /foo
-  // file:///foo - absolute path to /foo, no authority host
-  // file://localhost/foo - absolute path to /foo, on localhost
-  // file://foo - absolute path to / on foo host (error!)
+  // XXX backwards compatibility lack of compliance with RFC 8909
   if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
-    const msg = `Invalid file: URL, must be absolute if // present`
-    throw Object.assign(new Error(msg), {
-      raw: res.rawSpec,
-      parsed: resolvedUrl,
-    })
+    const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///')
+    resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
+    specUrl = new url.URL(rawSpec)
+    rawNoPrefix = rawSpec.replace(/^file:/, '')
+  }
+  // turn file:/../foo into file:../foo
+  // for 1, 2 or 3 leading slashes since we attempted
+  // in the previous step to make it a file protocol url with a leading slash
+  if (/^\/{1,3}\.\.?(\/|$)/.test(rawNoPrefix)) {
+    const rawSpec = res.rawSpec.replace(/^file:\/{1,3}/, 'file:')
+    resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
+    specUrl = new url.URL(rawSpec)
+    rawNoPrefix = rawSpec.replace(/^file:/, '')
   }
+  // XXX end RFC 8909 violation backwards compatibility section
 
   // turn /C:/blah into just C:/blah on windows
   let specPath = decodeURIComponent(specUrl.pathname)
diff --git a/node_modules/npm-package-arg/package.json b/node_modules/npm-package-arg/package.json
index bb9e71b258a93..9ba1d135f3ebf 100644
--- a/node_modules/npm-package-arg/package.json
+++ b/node_modules/npm-package-arg/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-package-arg",
-  "version": "10.1.0",
+  "version": "11.0.0",
   "description": "Parse the things that can be arguments to `npm install`",
   "main": "./lib/npa.js",
   "directories": {
@@ -11,14 +11,14 @@
     "lib/"
   ],
   "dependencies": {
-    "hosted-git-info": "^6.0.0",
+    "hosted-git-info": "^7.0.0",
     "proc-log": "^3.0.0",
     "semver": "^7.3.5",
     "validate-npm-package-name": "^5.0.0"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.10.0",
+    "@npmcli/template-oss": "4.18.0",
     "tap": "^16.0.1"
   },
   "scripts": {
@@ -43,7 +43,7 @@
   },
   "homepage": "https://github.com/npm/npm-package-arg",
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "tap": {
     "branches": 97,
@@ -54,6 +54,13 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.10.0"
+    "version": "4.18.0",
+    "publish": true,
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ]
   }
 }
diff --git a/node_modules/npm-pick-manifest/node_modules/hosted-git-info/LICENSE b/node_modules/npm-pick-manifest/node_modules/hosted-git-info/LICENSE
new file mode 100644
index 0000000000000..45055763dc838
--- /dev/null
+++ b/node_modules/npm-pick-manifest/node_modules/hosted-git-info/LICENSE
@@ -0,0 +1,13 @@
+Copyright (c) 2015, Rebecca Turner
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/from-url.js b/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/from-url.js
new file mode 100644
index 0000000000000..efc1247d59d12
--- /dev/null
+++ b/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/from-url.js
@@ -0,0 +1,122 @@
+'use strict'
+
+const parseUrl = require('./parse-url')
+
+// look for github shorthand inputs, such as npm/cli
+const isGitHubShorthand = (arg) => {
+  // it cannot contain whitespace before the first #
+  // it cannot start with a / because that's probably an absolute file path
+  // but it must include a slash since repos are username/repository
+  // it cannot start with a . because that's probably a relative file path
+  // it cannot start with an @ because that's a scoped package if it passes the other tests
+  // it cannot contain a : before a # because that tells us that there's a protocol
+  // a second / may not exist before a #
+  const firstHash = arg.indexOf('#')
+  const firstSlash = arg.indexOf('/')
+  const secondSlash = arg.indexOf('/', firstSlash + 1)
+  const firstColon = arg.indexOf(':')
+  const firstSpace = /\s/.exec(arg)
+  const firstAt = arg.indexOf('@')
+
+  const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash)
+  const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash)
+  const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash)
+  const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash)
+  const hasSlash = firstSlash > 0
+  // if a # is found, what we really want to know is that the character
+  // immediately before # is not a /
+  const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/')
+  const doesNotStartWithDot = !arg.startsWith('.')
+
+  return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash &&
+    doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash &&
+    secondSlashOnlyAfterHash
+}
+
+module.exports = (giturl, opts, { gitHosts, protocols }) => {
+  if (!giturl) {
+    return
+  }
+
+  const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl
+  const parsed = parseUrl(correctedUrl, protocols)
+  if (!parsed) {
+    return
+  }
+
+  const gitHostShortcut = gitHosts.byShortcut[parsed.protocol]
+  const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.')
+    ? parsed.hostname.slice(4)
+    : parsed.hostname]
+  const gitHostName = gitHostShortcut || gitHostDomain
+  if (!gitHostName) {
+    return
+  }
+
+  const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain]
+  let auth = null
+  if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) {
+    auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}`
+  }
+
+  let committish = null
+  let user = null
+  let project = null
+  let defaultRepresentation = null
+
+  try {
+    if (gitHostShortcut) {
+      let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname
+      const firstAt = pathname.indexOf('@')
+      // we ignore auth for shortcuts, so just trim it out
+      if (firstAt > -1) {
+        pathname = pathname.slice(firstAt + 1)
+      }
+
+      const lastSlash = pathname.lastIndexOf('/')
+      if (lastSlash > -1) {
+        user = decodeURIComponent(pathname.slice(0, lastSlash))
+        // we want nulls only, never empty strings
+        if (!user) {
+          user = null
+        }
+        project = decodeURIComponent(pathname.slice(lastSlash + 1))
+      } else {
+        project = decodeURIComponent(pathname)
+      }
+
+      if (project.endsWith('.git')) {
+        project = project.slice(0, -4)
+      }
+
+      if (parsed.hash) {
+        committish = decodeURIComponent(parsed.hash.slice(1))
+      }
+
+      defaultRepresentation = 'shortcut'
+    } else {
+      if (!gitHostInfo.protocols.includes(parsed.protocol)) {
+        return
+      }
+
+      const segments = gitHostInfo.extract(parsed)
+      if (!segments) {
+        return
+      }
+
+      user = segments.user && decodeURIComponent(segments.user)
+      project = decodeURIComponent(segments.project)
+      committish = decodeURIComponent(segments.committish)
+      defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1)
+    }
+  } catch (err) {
+    /* istanbul ignore else */
+    if (err instanceof URIError) {
+      return
+    } else {
+      throw err
+    }
+  }
+
+  return [gitHostName, user, auth, project, committish, defaultRepresentation, opts]
+}
diff --git a/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/hosts.js b/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/hosts.js
new file mode 100644
index 0000000000000..013712b7842c8
--- /dev/null
+++ b/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/hosts.js
@@ -0,0 +1,228 @@
+/* eslint-disable max-len */
+
+'use strict'
+
+const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
+const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
+const formatHashFragment = (f) => f.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-')
+
+const defaults = {
+  sshtemplate: ({ domain, user, project, committish }) =>
+    `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`,
+  sshurltemplate: ({ domain, user, project, committish }) =>
+    `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  edittemplate: ({ domain, user, project, committish, editpath, path }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`,
+  browsetemplate: ({ domain, user, project, committish, treepath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`,
+  browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) =>
+    `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
+  browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) =>
+    `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
+  docstemplate: ({ domain, user, project, treepath, committish }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`,
+  httpstemplate: ({ auth, domain, user, project, committish }) =>
+    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  filetemplate: ({ domain, user, project, committish, path }) =>
+    `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`,
+  shortcuttemplate: ({ type, user, project, committish }) =>
+    `${type}:${user}/${project}${maybeJoin('#', committish)}`,
+  pathtemplate: ({ user, project, committish }) =>
+    `${user}/${project}${maybeJoin('#', committish)}`,
+  bugstemplate: ({ domain, user, project }) =>
+    `https://${domain}/${user}/${project}/issues`,
+  hashformat: formatHashFragment,
+}
+
+const hosts = {}
+hosts.github = {
+  // First two are insecure and generally shouldn't be used any more, but
+  // they are still supported.
+  protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'github.com',
+  treepath: 'tree',
+  blobpath: 'blob',
+  editpath: 'edit',
+  filetemplate: ({ auth, user, project, committish, path }) =>
+    `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`,
+  gittemplate: ({ auth, domain, user, project, committish }) =>
+    `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    let [, user, project, type, committish] = url.pathname.split('/', 5)
+    if (type && type !== 'tree') {
+      return
+    }
+
+    if (!type) {
+      committish = url.hash.slice(1)
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish }
+  },
+}
+
+hosts.bitbucket = {
+  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'bitbucket.org',
+  treepath: 'src',
+  blobpath: 'src',
+  editpath: '?mode=edit',
+  edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+    if (['get'].includes(aux)) {
+      return
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+hosts.gitlab = {
+  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'gitlab.com',
+  treepath: 'tree',
+  blobpath: 'tree',
+  editpath: '-/edit',
+  httpstemplate: ({ auth, domain, user, project, committish }) =>
+    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    const path = url.pathname.slice(1)
+    if (path.includes('/-/') || path.includes('/archive.tar.gz')) {
+      return
+    }
+
+    const segments = path.split('/')
+    let project = segments.pop()
+    if (project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    const user = segments.join('/')
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+hosts.gist = {
+  protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'gist.github.com',
+  editpath: 'edit',
+  sshtemplate: ({ domain, project, committish }) =>
+    `git@${domain}:${project}.git${maybeJoin('#', committish)}`,
+  sshurltemplate: ({ domain, project, committish }) =>
+    `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`,
+  edittemplate: ({ domain, user, project, committish, editpath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`,
+  browsetemplate: ({ domain, project, committish }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
+  browsetreetemplate: ({ domain, project, committish, path, hashformat }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
+  browseblobtemplate: ({ domain, project, committish, path, hashformat }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
+  docstemplate: ({ domain, project, committish }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
+  httpstemplate: ({ domain, project, committish }) =>
+    `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`,
+  filetemplate: ({ user, project, committish, path }) =>
+    `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`,
+  shortcuttemplate: ({ type, project, committish }) =>
+    `${type}:${project}${maybeJoin('#', committish)}`,
+  pathtemplate: ({ project, committish }) =>
+    `${project}${maybeJoin('#', committish)}`,
+  bugstemplate: ({ domain, project }) =>
+    `https://${domain}/${project}`,
+  gittemplate: ({ domain, project, committish }) =>
+    `git://${domain}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ project, committish }) =>
+    `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+    if (aux === 'raw') {
+      return
+    }
+
+    if (!project) {
+      if (!user) {
+        return
+      }
+
+      project = user
+      user = null
+    }
+
+    if (project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+  hashformat: function (fragment) {
+    return fragment && 'file-' + formatHashFragment(fragment)
+  },
+}
+
+hosts.sourcehut = {
+  protocols: ['git+ssh:', 'https:'],
+  domain: 'git.sr.ht',
+  treepath: 'tree',
+  blobpath: 'tree',
+  filetemplate: ({ domain, user, project, committish, path }) =>
+    `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`,
+  httpstemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`,
+  bugstemplate: ({ user, project }) =>
+    `https://todo.sr.ht/${user}/${project}`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+
+    // tarball url
+    if (['archive'].includes(aux)) {
+      return
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+for (const [name, host] of Object.entries(hosts)) {
+  hosts[name] = Object.assign({}, defaults, host)
+}
+
+module.exports = hosts
diff --git a/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/index.js b/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/index.js
new file mode 100644
index 0000000000000..a7339c217e9a3
--- /dev/null
+++ b/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/index.js
@@ -0,0 +1,179 @@
+'use strict'
+
+const LRU = require('lru-cache')
+const hosts = require('./hosts.js')
+const fromUrl = require('./from-url.js')
+const parseUrl = require('./parse-url.js')
+
+const cache = new LRU({ max: 1000 })
+
+class GitHost {
+  constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
+    Object.assign(this, GitHost.#gitHosts[type], {
+      type,
+      user,
+      auth,
+      project,
+      committish,
+      default: defaultRepresentation,
+      opts,
+    })
+  }
+
+  static #gitHosts = { byShortcut: {}, byDomain: {} }
+  static #protocols = {
+    'git+ssh:': { name: 'sshurl' },
+    'ssh:': { name: 'sshurl' },
+    'git+https:': { name: 'https', auth: true },
+    'git:': { auth: true },
+    'http:': { auth: true },
+    'https:': { auth: true },
+    'git+http:': { auth: true },
+  }
+
+  static addHost (name, host) {
+    GitHost.#gitHosts[name] = host
+    GitHost.#gitHosts.byDomain[host.domain] = name
+    GitHost.#gitHosts.byShortcut[`${name}:`] = name
+    GitHost.#protocols[`${name}:`] = { name }
+  }
+
+  static fromUrl (giturl, opts) {
+    if (typeof giturl !== 'string') {
+      return
+    }
+
+    const key = giturl + JSON.stringify(opts || {})
+
+    if (!cache.has(key)) {
+      const hostArgs = fromUrl(giturl, opts, {
+        gitHosts: GitHost.#gitHosts,
+        protocols: GitHost.#protocols,
+      })
+      cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined)
+    }
+
+    return cache.get(key)
+  }
+
+  static parseUrl (url) {
+    return parseUrl(url)
+  }
+
+  #fill (template, opts) {
+    if (typeof template !== 'function') {
+      return null
+    }
+
+    const options = { ...this, ...this.opts, ...opts }
+
+    // the path should always be set so we don't end up with 'undefined' in urls
+    if (!options.path) {
+      options.path = ''
+    }
+
+    // template functions will insert the leading slash themselves
+    if (options.path.startsWith('/')) {
+      options.path = options.path.slice(1)
+    }
+
+    if (options.noCommittish) {
+      options.committish = null
+    }
+
+    const result = template(options)
+    return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result
+  }
+
+  hash () {
+    return this.committish ? `#${this.committish}` : ''
+  }
+
+  ssh (opts) {
+    return this.#fill(this.sshtemplate, opts)
+  }
+
+  sshurl (opts) {
+    return this.#fill(this.sshurltemplate, opts)
+  }
+
+  browse (path, ...args) {
+    // not a string, treat path as opts
+    if (typeof path !== 'string') {
+      return this.#fill(this.browsetemplate, path)
+    }
+
+    if (typeof args[0] !== 'string') {
+      return this.#fill(this.browsetreetemplate, { ...args[0], path })
+    }
+
+    return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path })
+  }
+
+  // If the path is known to be a file, then browseFile should be used. For some hosts
+  // the url is the same as browse, but for others like GitHub a file can use both `/tree/`
+  // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/`
+  // path will redirect to a specific commit. Using the `/blob/` path avoids this and
+  // does not redirect to a different commit.
+  browseFile (path, ...args) {
+    if (typeof args[0] !== 'string') {
+      return this.#fill(this.browseblobtemplate, { ...args[0], path })
+    }
+
+    return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path })
+  }
+
+  docs (opts) {
+    return this.#fill(this.docstemplate, opts)
+  }
+
+  bugs (opts) {
+    return this.#fill(this.bugstemplate, opts)
+  }
+
+  https (opts) {
+    return this.#fill(this.httpstemplate, opts)
+  }
+
+  git (opts) {
+    return this.#fill(this.gittemplate, opts)
+  }
+
+  shortcut (opts) {
+    return this.#fill(this.shortcuttemplate, opts)
+  }
+
+  path (opts) {
+    return this.#fill(this.pathtemplate, opts)
+  }
+
+  tarball (opts) {
+    return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false })
+  }
+
+  file (path, opts) {
+    return this.#fill(this.filetemplate, { ...opts, path })
+  }
+
+  edit (path, opts) {
+    return this.#fill(this.edittemplate, { ...opts, path })
+  }
+
+  getDefaultRepresentation () {
+    return this.default
+  }
+
+  toString (opts) {
+    if (this.default && typeof this[this.default] === 'function') {
+      return this[this.default](opts)
+    }
+
+    return this.sshurl(opts)
+  }
+}
+
+for (const [name, host] of Object.entries(hosts)) {
+  GitHost.addHost(name, host)
+}
+
+module.exports = GitHost
diff --git a/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/parse-url.js
new file mode 100644
index 0000000000000..7d5489c008ab4
--- /dev/null
+++ b/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/parse-url.js
@@ -0,0 +1,78 @@
+const url = require('url')
+
+const lastIndexOfBefore = (str, char, beforeChar) => {
+  const startPosition = str.indexOf(beforeChar)
+  return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity)
+}
+
+const safeUrl = (u) => {
+  try {
+    return new url.URL(u)
+  } catch {
+    // this fn should never throw
+  }
+}
+
+// accepts input like git:github.com:user/repo and inserts the // after the first :
+const correctProtocol = (arg, protocols) => {
+  const firstColon = arg.indexOf(':')
+  const proto = arg.slice(0, firstColon + 1)
+  if (Object.prototype.hasOwnProperty.call(protocols, proto)) {
+    return arg
+  }
+
+  const firstAt = arg.indexOf('@')
+  if (firstAt > -1) {
+    if (firstAt > firstColon) {
+      return `git+ssh://${arg}`
+    } else {
+      return arg
+    }
+  }
+
+  const doubleSlash = arg.indexOf('//')
+  if (doubleSlash === firstColon + 1) {
+    return arg
+  }
+
+  return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}`
+}
+
+// attempt to correct an scp style url so that it will parse with `new URL()`
+const correctUrl = (giturl) => {
+  // ignore @ that come after the first hash since the denotes the start
+  // of a committish which can contain @ characters
+  const firstAt = lastIndexOfBefore(giturl, '@', '#')
+  // ignore colons that come after the hash since that could include colons such as:
+  // git@github.com:user/package-2#semver:^1.0.0
+  const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#')
+
+  if (lastColonBeforeHash > firstAt) {
+    // the last : comes after the first @ (or there is no @)
+    // like it would in:
+    // proto://hostname.com:user/repo
+    // username@hostname.com:user/repo
+    // :password@hostname.com:user/repo
+    // username:password@hostname.com:user/repo
+    // proto://username@hostname.com:user/repo
+    // proto://:password@hostname.com:user/repo
+    // proto://username:password@hostname.com:user/repo
+    // then we replace the last : with a / to create a valid path
+    giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1)
+  }
+
+  if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) {
+    // we have no : at all
+    // as it would be in:
+    // username@hostname.com/user/repo
+    // then we prepend a protocol
+    giturl = `git+ssh://${giturl}`
+  }
+
+  return giturl
+}
+
+module.exports = (giturl, protocols) => {
+  const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl
+  return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol))
+}
diff --git a/node_modules/npm-pick-manifest/node_modules/hosted-git-info/package.json b/node_modules/npm-pick-manifest/node_modules/hosted-git-info/package.json
new file mode 100644
index 0000000000000..612259948afe7
--- /dev/null
+++ b/node_modules/npm-pick-manifest/node_modules/hosted-git-info/package.json
@@ -0,0 +1,59 @@
+{
+  "name": "hosted-git-info",
+  "version": "6.1.1",
+  "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
+  "main": "./lib/index.js",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/hosted-git-info.git"
+  },
+  "keywords": [
+    "git",
+    "github",
+    "bitbucket",
+    "gitlab"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/hosted-git-info/issues"
+  },
+  "homepage": "https://github.com/npm/hosted-git-info",
+  "scripts": {
+    "posttest": "npm run lint",
+    "snap": "tap",
+    "test": "tap",
+    "test:coverage": "tap --coverage-report=html",
+    "lint": "eslint \"**/*.js\"",
+    "postlint": "template-oss-check",
+    "lintfix": "npm run lint -- --fix",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "dependencies": {
+    "lru-cache": "^7.5.1"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.7.1",
+    "tap": "^16.0.1"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "tap": {
+    "color": 1,
+    "coverage": true,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.7.1"
+  }
+}
diff --git a/node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE b/node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE
new file mode 100644
index 0000000000000..19cec97b18468
--- /dev/null
+++ b/node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js b/node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js
new file mode 100644
index 0000000000000..36bd18cd9f9a6
--- /dev/null
+++ b/node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js
@@ -0,0 +1,431 @@
+'use strict'
+module.exports = npa
+module.exports.resolve = resolve
+module.exports.toPurl = toPurl
+module.exports.Result = Result
+
+const url = require('url')
+const HostedGit = require('hosted-git-info')
+const semver = require('semver')
+const path = global.FAKE_WINDOWS ? require('path').win32 : require('path')
+const validatePackageName = require('validate-npm-package-name')
+const { homedir } = require('os')
+const log = require('proc-log')
+
+const isWindows = process.platform === 'win32' || global.FAKE_WINDOWS
+const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
+const isURL = /^(?:git[+])?[a-z]+:/i
+const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
+const isFilename = /[.](?:tgz|tar.gz|tar)$/i
+
+function npa (arg, where) {
+  let name
+  let spec
+  if (typeof arg === 'object') {
+    if (arg instanceof Result && (!where || where === arg.where)) {
+      return arg
+    } else if (arg.name && arg.rawSpec) {
+      return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
+    } else {
+      return npa(arg.raw, where || arg.where)
+    }
+  }
+  const nameEndsAt = arg[0] === '@' ? arg.slice(1).indexOf('@') + 1 : arg.indexOf('@')
+  const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
+  if (isURL.test(arg)) {
+    spec = arg
+  } else if (isGit.test(arg)) {
+    spec = `git+ssh://${arg}`
+  } else if (namePart[0] !== '@' && (hasSlashes.test(namePart) || isFilename.test(namePart))) {
+    spec = arg
+  } else if (nameEndsAt > 0) {
+    name = namePart
+    spec = arg.slice(nameEndsAt + 1) || '*'
+  } else {
+    const valid = validatePackageName(arg)
+    if (valid.validForOldPackages) {
+      name = arg
+      spec = '*'
+    } else {
+      spec = arg
+    }
+  }
+  return resolve(name, spec, where, arg)
+}
+
+const isFilespec = isWindows ? /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/ : /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
+
+function resolve (name, spec, where, arg) {
+  const res = new Result({
+    raw: arg,
+    name: name,
+    rawSpec: spec,
+    fromArgument: arg != null,
+  })
+
+  if (name) {
+    res.setName(name)
+  }
+
+  if (spec && (isFilespec.test(spec) || /^file:/i.test(spec))) {
+    return fromFile(res, where)
+  } else if (spec && /^npm:/i.test(spec)) {
+    return fromAlias(res, where)
+  }
+
+  const hosted = HostedGit.fromUrl(spec, {
+    noGitPlus: true,
+    noCommittish: true,
+  })
+  if (hosted) {
+    return fromHostedGit(res, hosted)
+  } else if (spec && isURL.test(spec)) {
+    return fromURL(res)
+  } else if (spec && (hasSlashes.test(spec) || isFilename.test(spec))) {
+    return fromFile(res, where)
+  } else {
+    return fromRegistry(res)
+  }
+}
+
+const defaultRegistry = 'https://registry.npmjs.org'
+
+function toPurl (arg, reg = defaultRegistry) {
+  const res = npa(arg)
+
+  if (res.type !== 'version') {
+    throw invalidPurlType(res.type, res.raw)
+  }
+
+  // URI-encode leading @ of scoped packages
+  let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec
+  if (reg !== defaultRegistry) {
+    purl += '?repository_url=' + reg
+  }
+
+  return purl
+}
+
+function invalidPackageName (name, valid, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`)
+  err.code = 'EINVALIDPACKAGENAME'
+  return err
+}
+
+function invalidTagName (name, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`)
+  err.code = 'EINVALIDTAGNAME'
+  return err
+}
+
+function invalidPurlType (type, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`)
+  err.code = 'EINVALIDPURLTYPE'
+  return err
+}
+
+function Result (opts) {
+  this.type = opts.type
+  this.registry = opts.registry
+  this.where = opts.where
+  if (opts.raw == null) {
+    this.raw = opts.name ? opts.name + '@' + opts.rawSpec : opts.rawSpec
+  } else {
+    this.raw = opts.raw
+  }
+
+  this.name = undefined
+  this.escapedName = undefined
+  this.scope = undefined
+  this.rawSpec = opts.rawSpec || ''
+  this.saveSpec = opts.saveSpec
+  this.fetchSpec = opts.fetchSpec
+  if (opts.name) {
+    this.setName(opts.name)
+  }
+  this.gitRange = opts.gitRange
+  this.gitCommittish = opts.gitCommittish
+  this.gitSubdir = opts.gitSubdir
+  this.hosted = opts.hosted
+}
+
+Result.prototype.setName = function (name) {
+  const valid = validatePackageName(name)
+  if (!valid.validForOldPackages) {
+    throw invalidPackageName(name, valid, this.raw)
+  }
+
+  this.name = name
+  this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
+  // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
+  this.escapedName = name.replace('/', '%2f')
+  return this
+}
+
+Result.prototype.toString = function () {
+  const full = []
+  if (this.name != null && this.name !== '') {
+    full.push(this.name)
+  }
+  const spec = this.saveSpec || this.fetchSpec || this.rawSpec
+  if (spec != null && spec !== '') {
+    full.push(spec)
+  }
+  return full.length ? full.join('@') : this.raw
+}
+
+Result.prototype.toJSON = function () {
+  const result = Object.assign({}, this)
+  delete result.hosted
+  return result
+}
+
+function setGitCommittish (res, committish) {
+  if (!committish) {
+    res.gitCommittish = null
+    return res
+  }
+
+  // for each :: separated item:
+  for (const part of committish.split('::')) {
+    // if the item has no : the n it is a commit-ish
+    if (!part.includes(':')) {
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a committish')
+      }
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a second committish')
+      }
+      res.gitCommittish = part
+      continue
+    }
+    // split on name:value
+    const [name, value] = part.split(':')
+    // if name is semver do semver lookup of ref or tag
+    if (name === 'semver') {
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a semver range')
+      }
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a second semver range')
+      }
+      res.gitRange = decodeURIComponent(value)
+      continue
+    }
+    if (name === 'path') {
+      if (res.gitSubdir) {
+        throw new Error('cannot override existing path with a second path')
+      }
+      res.gitSubdir = `/${value}`
+      continue
+    }
+    log.warn('npm-package-arg', `ignoring unknown key "${name}"`)
+  }
+
+  return res
+}
+
+function fromFile (res, where) {
+  if (!where) {
+    where = process.cwd()
+  }
+  res.type = isFilename.test(res.rawSpec) ? 'file' : 'directory'
+  res.where = where
+
+  // always put the '/' on where when resolving urls, or else
+  // file:foo from /path/to/bar goes to /path/to/foo, when we want
+  // it to be /path/to/bar/foo
+
+  let specUrl
+  let resolvedUrl
+  const prefix = (!/^file:/.test(res.rawSpec) ? 'file:' : '')
+  const rawWithPrefix = prefix + res.rawSpec
+  let rawNoPrefix = rawWithPrefix.replace(/^file:/, '')
+  try {
+    resolvedUrl = new url.URL(rawWithPrefix, `file://${path.resolve(where)}/`)
+    specUrl = new url.URL(rawWithPrefix)
+  } catch (originalError) {
+    const er = new Error('Invalid file: URL, must comply with RFC 8909')
+    throw Object.assign(er, {
+      raw: res.rawSpec,
+      spec: res,
+      where,
+      originalError,
+    })
+  }
+
+  // environment switch for testing
+  if (process.env.NPM_PACKAGE_ARG_8909_STRICT !== '1') {
+    // XXX backwards compatibility lack of compliance with 8909
+    // Remove when we want a breaking change to come into RFC compliance.
+    if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
+      const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///')
+      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
+      specUrl = new url.URL(rawSpec)
+      rawNoPrefix = rawSpec.replace(/^file:/, '')
+    }
+    // turn file:/../foo into file:../foo
+    // for 1, 2 or 3 leading slashes since we attempted
+    // in the previous step to make it a file protocol url with a leading slash
+    if (/^\/{1,3}\.\.?(\/|$)/.test(rawNoPrefix)) {
+      const rawSpec = res.rawSpec.replace(/^file:\/{1,3}/, 'file:')
+      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
+      specUrl = new url.URL(rawSpec)
+      rawNoPrefix = rawSpec.replace(/^file:/, '')
+    }
+    // XXX end 8909 violation backwards compatibility section
+  }
+
+  // file:foo - relative url to ./foo
+  // file:/foo - absolute path /foo
+  // file:///foo - absolute path to /foo, no authority host
+  // file://localhost/foo - absolute path to /foo, on localhost
+  // file://foo - absolute path to / on foo host (error!)
+  if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
+    const msg = `Invalid file: URL, must be absolute if // present`
+    throw Object.assign(new Error(msg), {
+      raw: res.rawSpec,
+      parsed: resolvedUrl,
+    })
+  }
+
+  // turn /C:/blah into just C:/blah on windows
+  let specPath = decodeURIComponent(specUrl.pathname)
+  let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
+  if (isWindows) {
+    specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1')
+    resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1')
+  }
+
+  // replace ~ with homedir, but keep the ~ in the saveSpec
+  // otherwise, make it relative to where param
+  if (/^\/~(\/|$)/.test(specPath)) {
+    res.saveSpec = `file:${specPath.substr(1)}`
+    resolvedPath = path.resolve(homedir(), specPath.substr(3))
+  } else if (!path.isAbsolute(rawNoPrefix)) {
+    res.saveSpec = `file:${path.relative(where, resolvedPath)}`
+  } else {
+    res.saveSpec = `file:${path.resolve(resolvedPath)}`
+  }
+
+  res.fetchSpec = path.resolve(where, resolvedPath)
+  return res
+}
+
+function fromHostedGit (res, hosted) {
+  res.type = 'git'
+  res.hosted = hosted
+  res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
+  res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
+  return setGitCommittish(res, hosted.committish)
+}
+
+function unsupportedURLType (protocol, spec) {
+  const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
+  err.code = 'EUNSUPPORTEDPROTOCOL'
+  return err
+}
+
+function matchGitScp (spec) {
+  // git ssh specifiers are overloaded to also use scp-style git
+  // specifiers, so we have to parse those out and treat them special.
+  // They are NOT true URIs, so we can't hand them to `url.parse`.
+  //
+  // This regex looks for things that look like:
+  // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
+  //
+  // ...and various combinations. The username in the beginning is *required*.
+  const matched = spec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
+  return matched && !matched[1].match(/:[0-9]+\/?.*$/i) && {
+    fetchSpec: matched[1],
+    gitCommittish: matched[2] == null ? null : matched[2],
+  }
+}
+
+function fromURL (res) {
+  // eslint-disable-next-line node/no-deprecated-api
+  const urlparse = url.parse(res.rawSpec)
+  res.saveSpec = res.rawSpec
+  // check the protocol, and then see if it's git or not
+  switch (urlparse.protocol) {
+    case 'git:':
+    case 'git+http:':
+    case 'git+https:':
+    case 'git+rsync:':
+    case 'git+ftp:':
+    case 'git+file:':
+    case 'git+ssh:': {
+      res.type = 'git'
+      const match = urlparse.protocol === 'git+ssh:' ? matchGitScp(res.rawSpec)
+        : null
+      if (match) {
+        setGitCommittish(res, match.gitCommittish)
+        res.fetchSpec = match.fetchSpec
+      } else {
+        setGitCommittish(res, urlparse.hash != null ? urlparse.hash.slice(1) : '')
+        urlparse.protocol = urlparse.protocol.replace(/^git[+]/, '')
+        if (urlparse.protocol === 'file:' && /^git\+file:\/\/[a-z]:/i.test(res.rawSpec)) {
+          // keep the drive letter : on windows file paths
+          urlparse.host += ':'
+          urlparse.hostname += ':'
+        }
+        delete urlparse.hash
+        res.fetchSpec = url.format(urlparse)
+      }
+      break
+    }
+    case 'http:':
+    case 'https:':
+      res.type = 'remote'
+      res.fetchSpec = res.saveSpec
+      break
+
+    default:
+      throw unsupportedURLType(urlparse.protocol, res.rawSpec)
+  }
+
+  return res
+}
+
+function fromAlias (res, where) {
+  const subSpec = npa(res.rawSpec.substr(4), where)
+  if (subSpec.type === 'alias') {
+    throw new Error('nested aliases not supported')
+  }
+
+  if (!subSpec.registry) {
+    throw new Error('aliases only work for registry deps')
+  }
+
+  res.subSpec = subSpec
+  res.registry = true
+  res.type = 'alias'
+  res.saveSpec = null
+  res.fetchSpec = null
+  return res
+}
+
+function fromRegistry (res) {
+  res.registry = true
+  const spec = res.rawSpec.trim()
+  // no save spec for registry components as we save based on the fetched
+  // version, not on the argument so this can't compute that.
+  res.saveSpec = null
+  res.fetchSpec = spec
+  const version = semver.valid(spec, true)
+  const range = semver.validRange(spec, true)
+  if (version) {
+    res.type = 'version'
+  } else if (range) {
+    res.type = 'range'
+  } else {
+    if (encodeURIComponent(spec) !== spec) {
+      throw invalidTagName(spec, res.raw)
+    }
+    res.type = 'tag'
+  }
+  return res
+}
diff --git a/node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json b/node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json
new file mode 100644
index 0000000000000..bb9e71b258a93
--- /dev/null
+++ b/node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json
@@ -0,0 +1,59 @@
+{
+  "name": "npm-package-arg",
+  "version": "10.1.0",
+  "description": "Parse the things that can be arguments to `npm install`",
+  "main": "./lib/npa.js",
+  "directories": {
+    "test": "test"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "dependencies": {
+    "hosted-git-info": "^6.0.0",
+    "proc-log": "^3.0.0",
+    "semver": "^7.3.5",
+    "validate-npm-package-name": "^5.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.10.0",
+    "tap": "^16.0.1"
+  },
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "npmclilint": "npmcli-lint",
+    "lint": "eslint \"**/*.js\"",
+    "lintfix": "npm run lint -- --fix",
+    "posttest": "npm run lint",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/npm-package-arg.git"
+  },
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/npm-package-arg/issues"
+  },
+  "homepage": "https://github.com/npm/npm-package-arg",
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "tap": {
+    "branches": 97,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.10.0"
+  }
+}
diff --git a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/LICENSE b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/LICENSE
new file mode 100644
index 0000000000000..45055763dc838
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/LICENSE
@@ -0,0 +1,13 @@
+Copyright (c) 2015, Rebecca Turner
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/from-url.js b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/from-url.js
new file mode 100644
index 0000000000000..efc1247d59d12
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/from-url.js
@@ -0,0 +1,122 @@
+'use strict'
+
+const parseUrl = require('./parse-url')
+
+// look for github shorthand inputs, such as npm/cli
+const isGitHubShorthand = (arg) => {
+  // it cannot contain whitespace before the first #
+  // it cannot start with a / because that's probably an absolute file path
+  // but it must include a slash since repos are username/repository
+  // it cannot start with a . because that's probably a relative file path
+  // it cannot start with an @ because that's a scoped package if it passes the other tests
+  // it cannot contain a : before a # because that tells us that there's a protocol
+  // a second / may not exist before a #
+  const firstHash = arg.indexOf('#')
+  const firstSlash = arg.indexOf('/')
+  const secondSlash = arg.indexOf('/', firstSlash + 1)
+  const firstColon = arg.indexOf(':')
+  const firstSpace = /\s/.exec(arg)
+  const firstAt = arg.indexOf('@')
+
+  const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash)
+  const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash)
+  const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash)
+  const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash)
+  const hasSlash = firstSlash > 0
+  // if a # is found, what we really want to know is that the character
+  // immediately before # is not a /
+  const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/')
+  const doesNotStartWithDot = !arg.startsWith('.')
+
+  return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash &&
+    doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash &&
+    secondSlashOnlyAfterHash
+}
+
+module.exports = (giturl, opts, { gitHosts, protocols }) => {
+  if (!giturl) {
+    return
+  }
+
+  const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl
+  const parsed = parseUrl(correctedUrl, protocols)
+  if (!parsed) {
+    return
+  }
+
+  const gitHostShortcut = gitHosts.byShortcut[parsed.protocol]
+  const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.')
+    ? parsed.hostname.slice(4)
+    : parsed.hostname]
+  const gitHostName = gitHostShortcut || gitHostDomain
+  if (!gitHostName) {
+    return
+  }
+
+  const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain]
+  let auth = null
+  if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) {
+    auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}`
+  }
+
+  let committish = null
+  let user = null
+  let project = null
+  let defaultRepresentation = null
+
+  try {
+    if (gitHostShortcut) {
+      let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname
+      const firstAt = pathname.indexOf('@')
+      // we ignore auth for shortcuts, so just trim it out
+      if (firstAt > -1) {
+        pathname = pathname.slice(firstAt + 1)
+      }
+
+      const lastSlash = pathname.lastIndexOf('/')
+      if (lastSlash > -1) {
+        user = decodeURIComponent(pathname.slice(0, lastSlash))
+        // we want nulls only, never empty strings
+        if (!user) {
+          user = null
+        }
+        project = decodeURIComponent(pathname.slice(lastSlash + 1))
+      } else {
+        project = decodeURIComponent(pathname)
+      }
+
+      if (project.endsWith('.git')) {
+        project = project.slice(0, -4)
+      }
+
+      if (parsed.hash) {
+        committish = decodeURIComponent(parsed.hash.slice(1))
+      }
+
+      defaultRepresentation = 'shortcut'
+    } else {
+      if (!gitHostInfo.protocols.includes(parsed.protocol)) {
+        return
+      }
+
+      const segments = gitHostInfo.extract(parsed)
+      if (!segments) {
+        return
+      }
+
+      user = segments.user && decodeURIComponent(segments.user)
+      project = decodeURIComponent(segments.project)
+      committish = decodeURIComponent(segments.committish)
+      defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1)
+    }
+  } catch (err) {
+    /* istanbul ignore else */
+    if (err instanceof URIError) {
+      return
+    } else {
+      throw err
+    }
+  }
+
+  return [gitHostName, user, auth, project, committish, defaultRepresentation, opts]
+}
diff --git a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/hosts.js b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/hosts.js
new file mode 100644
index 0000000000000..013712b7842c8
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/hosts.js
@@ -0,0 +1,228 @@
+/* eslint-disable max-len */
+
+'use strict'
+
+const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
+const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
+const formatHashFragment = (f) => f.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-')
+
+const defaults = {
+  sshtemplate: ({ domain, user, project, committish }) =>
+    `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`,
+  sshurltemplate: ({ domain, user, project, committish }) =>
+    `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  edittemplate: ({ domain, user, project, committish, editpath, path }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`,
+  browsetemplate: ({ domain, user, project, committish, treepath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`,
+  browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) =>
+    `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
+  browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) =>
+    `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
+  docstemplate: ({ domain, user, project, treepath, committish }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`,
+  httpstemplate: ({ auth, domain, user, project, committish }) =>
+    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  filetemplate: ({ domain, user, project, committish, path }) =>
+    `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`,
+  shortcuttemplate: ({ type, user, project, committish }) =>
+    `${type}:${user}/${project}${maybeJoin('#', committish)}`,
+  pathtemplate: ({ user, project, committish }) =>
+    `${user}/${project}${maybeJoin('#', committish)}`,
+  bugstemplate: ({ domain, user, project }) =>
+    `https://${domain}/${user}/${project}/issues`,
+  hashformat: formatHashFragment,
+}
+
+const hosts = {}
+hosts.github = {
+  // First two are insecure and generally shouldn't be used any more, but
+  // they are still supported.
+  protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'github.com',
+  treepath: 'tree',
+  blobpath: 'blob',
+  editpath: 'edit',
+  filetemplate: ({ auth, user, project, committish, path }) =>
+    `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`,
+  gittemplate: ({ auth, domain, user, project, committish }) =>
+    `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    let [, user, project, type, committish] = url.pathname.split('/', 5)
+    if (type && type !== 'tree') {
+      return
+    }
+
+    if (!type) {
+      committish = url.hash.slice(1)
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish }
+  },
+}
+
+hosts.bitbucket = {
+  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'bitbucket.org',
+  treepath: 'src',
+  blobpath: 'src',
+  editpath: '?mode=edit',
+  edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+    if (['get'].includes(aux)) {
+      return
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+hosts.gitlab = {
+  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'gitlab.com',
+  treepath: 'tree',
+  blobpath: 'tree',
+  editpath: '-/edit',
+  httpstemplate: ({ auth, domain, user, project, committish }) =>
+    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    const path = url.pathname.slice(1)
+    if (path.includes('/-/') || path.includes('/archive.tar.gz')) {
+      return
+    }
+
+    const segments = path.split('/')
+    let project = segments.pop()
+    if (project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    const user = segments.join('/')
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+hosts.gist = {
+  protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'gist.github.com',
+  editpath: 'edit',
+  sshtemplate: ({ domain, project, committish }) =>
+    `git@${domain}:${project}.git${maybeJoin('#', committish)}`,
+  sshurltemplate: ({ domain, project, committish }) =>
+    `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`,
+  edittemplate: ({ domain, user, project, committish, editpath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`,
+  browsetemplate: ({ domain, project, committish }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
+  browsetreetemplate: ({ domain, project, committish, path, hashformat }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
+  browseblobtemplate: ({ domain, project, committish, path, hashformat }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
+  docstemplate: ({ domain, project, committish }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
+  httpstemplate: ({ domain, project, committish }) =>
+    `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`,
+  filetemplate: ({ user, project, committish, path }) =>
+    `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`,
+  shortcuttemplate: ({ type, project, committish }) =>
+    `${type}:${project}${maybeJoin('#', committish)}`,
+  pathtemplate: ({ project, committish }) =>
+    `${project}${maybeJoin('#', committish)}`,
+  bugstemplate: ({ domain, project }) =>
+    `https://${domain}/${project}`,
+  gittemplate: ({ domain, project, committish }) =>
+    `git://${domain}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ project, committish }) =>
+    `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+    if (aux === 'raw') {
+      return
+    }
+
+    if (!project) {
+      if (!user) {
+        return
+      }
+
+      project = user
+      user = null
+    }
+
+    if (project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+  hashformat: function (fragment) {
+    return fragment && 'file-' + formatHashFragment(fragment)
+  },
+}
+
+hosts.sourcehut = {
+  protocols: ['git+ssh:', 'https:'],
+  domain: 'git.sr.ht',
+  treepath: 'tree',
+  blobpath: 'tree',
+  filetemplate: ({ domain, user, project, committish, path }) =>
+    `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`,
+  httpstemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`,
+  bugstemplate: ({ user, project }) =>
+    `https://todo.sr.ht/${user}/${project}`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+
+    // tarball url
+    if (['archive'].includes(aux)) {
+      return
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+for (const [name, host] of Object.entries(hosts)) {
+  hosts[name] = Object.assign({}, defaults, host)
+}
+
+module.exports = hosts
diff --git a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/index.js b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/index.js
new file mode 100644
index 0000000000000..a7339c217e9a3
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/index.js
@@ -0,0 +1,179 @@
+'use strict'
+
+const LRU = require('lru-cache')
+const hosts = require('./hosts.js')
+const fromUrl = require('./from-url.js')
+const parseUrl = require('./parse-url.js')
+
+const cache = new LRU({ max: 1000 })
+
+class GitHost {
+  constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
+    Object.assign(this, GitHost.#gitHosts[type], {
+      type,
+      user,
+      auth,
+      project,
+      committish,
+      default: defaultRepresentation,
+      opts,
+    })
+  }
+
+  static #gitHosts = { byShortcut: {}, byDomain: {} }
+  static #protocols = {
+    'git+ssh:': { name: 'sshurl' },
+    'ssh:': { name: 'sshurl' },
+    'git+https:': { name: 'https', auth: true },
+    'git:': { auth: true },
+    'http:': { auth: true },
+    'https:': { auth: true },
+    'git+http:': { auth: true },
+  }
+
+  static addHost (name, host) {
+    GitHost.#gitHosts[name] = host
+    GitHost.#gitHosts.byDomain[host.domain] = name
+    GitHost.#gitHosts.byShortcut[`${name}:`] = name
+    GitHost.#protocols[`${name}:`] = { name }
+  }
+
+  static fromUrl (giturl, opts) {
+    if (typeof giturl !== 'string') {
+      return
+    }
+
+    const key = giturl + JSON.stringify(opts || {})
+
+    if (!cache.has(key)) {
+      const hostArgs = fromUrl(giturl, opts, {
+        gitHosts: GitHost.#gitHosts,
+        protocols: GitHost.#protocols,
+      })
+      cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined)
+    }
+
+    return cache.get(key)
+  }
+
+  static parseUrl (url) {
+    return parseUrl(url)
+  }
+
+  #fill (template, opts) {
+    if (typeof template !== 'function') {
+      return null
+    }
+
+    const options = { ...this, ...this.opts, ...opts }
+
+    // the path should always be set so we don't end up with 'undefined' in urls
+    if (!options.path) {
+      options.path = ''
+    }
+
+    // template functions will insert the leading slash themselves
+    if (options.path.startsWith('/')) {
+      options.path = options.path.slice(1)
+    }
+
+    if (options.noCommittish) {
+      options.committish = null
+    }
+
+    const result = template(options)
+    return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result
+  }
+
+  hash () {
+    return this.committish ? `#${this.committish}` : ''
+  }
+
+  ssh (opts) {
+    return this.#fill(this.sshtemplate, opts)
+  }
+
+  sshurl (opts) {
+    return this.#fill(this.sshurltemplate, opts)
+  }
+
+  browse (path, ...args) {
+    // not a string, treat path as opts
+    if (typeof path !== 'string') {
+      return this.#fill(this.browsetemplate, path)
+    }
+
+    if (typeof args[0] !== 'string') {
+      return this.#fill(this.browsetreetemplate, { ...args[0], path })
+    }
+
+    return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path })
+  }
+
+  // If the path is known to be a file, then browseFile should be used. For some hosts
+  // the url is the same as browse, but for others like GitHub a file can use both `/tree/`
+  // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/`
+  // path will redirect to a specific commit. Using the `/blob/` path avoids this and
+  // does not redirect to a different commit.
+  browseFile (path, ...args) {
+    if (typeof args[0] !== 'string') {
+      return this.#fill(this.browseblobtemplate, { ...args[0], path })
+    }
+
+    return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path })
+  }
+
+  docs (opts) {
+    return this.#fill(this.docstemplate, opts)
+  }
+
+  bugs (opts) {
+    return this.#fill(this.bugstemplate, opts)
+  }
+
+  https (opts) {
+    return this.#fill(this.httpstemplate, opts)
+  }
+
+  git (opts) {
+    return this.#fill(this.gittemplate, opts)
+  }
+
+  shortcut (opts) {
+    return this.#fill(this.shortcuttemplate, opts)
+  }
+
+  path (opts) {
+    return this.#fill(this.pathtemplate, opts)
+  }
+
+  tarball (opts) {
+    return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false })
+  }
+
+  file (path, opts) {
+    return this.#fill(this.filetemplate, { ...opts, path })
+  }
+
+  edit (path, opts) {
+    return this.#fill(this.edittemplate, { ...opts, path })
+  }
+
+  getDefaultRepresentation () {
+    return this.default
+  }
+
+  toString (opts) {
+    if (this.default && typeof this[this.default] === 'function') {
+      return this[this.default](opts)
+    }
+
+    return this.sshurl(opts)
+  }
+}
+
+for (const [name, host] of Object.entries(hosts)) {
+  GitHost.addHost(name, host)
+}
+
+module.exports = GitHost
diff --git a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/parse-url.js
new file mode 100644
index 0000000000000..7d5489c008ab4
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/parse-url.js
@@ -0,0 +1,78 @@
+const url = require('url')
+
+const lastIndexOfBefore = (str, char, beforeChar) => {
+  const startPosition = str.indexOf(beforeChar)
+  return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity)
+}
+
+const safeUrl = (u) => {
+  try {
+    return new url.URL(u)
+  } catch {
+    // this fn should never throw
+  }
+}
+
+// accepts input like git:github.com:user/repo and inserts the // after the first :
+const correctProtocol = (arg, protocols) => {
+  const firstColon = arg.indexOf(':')
+  const proto = arg.slice(0, firstColon + 1)
+  if (Object.prototype.hasOwnProperty.call(protocols, proto)) {
+    return arg
+  }
+
+  const firstAt = arg.indexOf('@')
+  if (firstAt > -1) {
+    if (firstAt > firstColon) {
+      return `git+ssh://${arg}`
+    } else {
+      return arg
+    }
+  }
+
+  const doubleSlash = arg.indexOf('//')
+  if (doubleSlash === firstColon + 1) {
+    return arg
+  }
+
+  return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}`
+}
+
+// attempt to correct an scp style url so that it will parse with `new URL()`
+const correctUrl = (giturl) => {
+  // ignore @ that come after the first hash since the denotes the start
+  // of a committish which can contain @ characters
+  const firstAt = lastIndexOfBefore(giturl, '@', '#')
+  // ignore colons that come after the hash since that could include colons such as:
+  // git@github.com:user/package-2#semver:^1.0.0
+  const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#')
+
+  if (lastColonBeforeHash > firstAt) {
+    // the last : comes after the first @ (or there is no @)
+    // like it would in:
+    // proto://hostname.com:user/repo
+    // username@hostname.com:user/repo
+    // :password@hostname.com:user/repo
+    // username:password@hostname.com:user/repo
+    // proto://username@hostname.com:user/repo
+    // proto://:password@hostname.com:user/repo
+    // proto://username:password@hostname.com:user/repo
+    // then we replace the last : with a / to create a valid path
+    giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1)
+  }
+
+  if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) {
+    // we have no : at all
+    // as it would be in:
+    // username@hostname.com/user/repo
+    // then we prepend a protocol
+    giturl = `git+ssh://${giturl}`
+  }
+
+  return giturl
+}
+
+module.exports = (giturl, protocols) => {
+  const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl
+  return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol))
+}
diff --git a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/package.json b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/package.json
new file mode 100644
index 0000000000000..612259948afe7
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/package.json
@@ -0,0 +1,59 @@
+{
+  "name": "hosted-git-info",
+  "version": "6.1.1",
+  "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
+  "main": "./lib/index.js",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/hosted-git-info.git"
+  },
+  "keywords": [
+    "git",
+    "github",
+    "bitbucket",
+    "gitlab"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/hosted-git-info/issues"
+  },
+  "homepage": "https://github.com/npm/hosted-git-info",
+  "scripts": {
+    "posttest": "npm run lint",
+    "snap": "tap",
+    "test": "tap",
+    "test:coverage": "tap --coverage-report=html",
+    "lint": "eslint \"**/*.js\"",
+    "postlint": "template-oss-check",
+    "lintfix": "npm run lint -- --fix",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "dependencies": {
+    "lru-cache": "^7.5.1"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.7.1",
+    "tap": "^16.0.1"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "tap": {
+    "color": 1,
+    "coverage": true,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.7.1"
+  }
+}
diff --git a/node_modules/npm-registry-fetch/node_modules/npm-package-arg/LICENSE b/node_modules/npm-registry-fetch/node_modules/npm-package-arg/LICENSE
new file mode 100644
index 0000000000000..19cec97b18468
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/npm-package-arg/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-registry-fetch/node_modules/npm-package-arg/lib/npa.js b/node_modules/npm-registry-fetch/node_modules/npm-package-arg/lib/npa.js
new file mode 100644
index 0000000000000..36bd18cd9f9a6
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/npm-package-arg/lib/npa.js
@@ -0,0 +1,431 @@
+'use strict'
+module.exports = npa
+module.exports.resolve = resolve
+module.exports.toPurl = toPurl
+module.exports.Result = Result
+
+const url = require('url')
+const HostedGit = require('hosted-git-info')
+const semver = require('semver')
+const path = global.FAKE_WINDOWS ? require('path').win32 : require('path')
+const validatePackageName = require('validate-npm-package-name')
+const { homedir } = require('os')
+const log = require('proc-log')
+
+const isWindows = process.platform === 'win32' || global.FAKE_WINDOWS
+const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
+const isURL = /^(?:git[+])?[a-z]+:/i
+const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
+const isFilename = /[.](?:tgz|tar.gz|tar)$/i
+
+function npa (arg, where) {
+  let name
+  let spec
+  if (typeof arg === 'object') {
+    if (arg instanceof Result && (!where || where === arg.where)) {
+      return arg
+    } else if (arg.name && arg.rawSpec) {
+      return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
+    } else {
+      return npa(arg.raw, where || arg.where)
+    }
+  }
+  const nameEndsAt = arg[0] === '@' ? arg.slice(1).indexOf('@') + 1 : arg.indexOf('@')
+  const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
+  if (isURL.test(arg)) {
+    spec = arg
+  } else if (isGit.test(arg)) {
+    spec = `git+ssh://${arg}`
+  } else if (namePart[0] !== '@' && (hasSlashes.test(namePart) || isFilename.test(namePart))) {
+    spec = arg
+  } else if (nameEndsAt > 0) {
+    name = namePart
+    spec = arg.slice(nameEndsAt + 1) || '*'
+  } else {
+    const valid = validatePackageName(arg)
+    if (valid.validForOldPackages) {
+      name = arg
+      spec = '*'
+    } else {
+      spec = arg
+    }
+  }
+  return resolve(name, spec, where, arg)
+}
+
+const isFilespec = isWindows ? /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/ : /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
+
+function resolve (name, spec, where, arg) {
+  const res = new Result({
+    raw: arg,
+    name: name,
+    rawSpec: spec,
+    fromArgument: arg != null,
+  })
+
+  if (name) {
+    res.setName(name)
+  }
+
+  if (spec && (isFilespec.test(spec) || /^file:/i.test(spec))) {
+    return fromFile(res, where)
+  } else if (spec && /^npm:/i.test(spec)) {
+    return fromAlias(res, where)
+  }
+
+  const hosted = HostedGit.fromUrl(spec, {
+    noGitPlus: true,
+    noCommittish: true,
+  })
+  if (hosted) {
+    return fromHostedGit(res, hosted)
+  } else if (spec && isURL.test(spec)) {
+    return fromURL(res)
+  } else if (spec && (hasSlashes.test(spec) || isFilename.test(spec))) {
+    return fromFile(res, where)
+  } else {
+    return fromRegistry(res)
+  }
+}
+
+const defaultRegistry = 'https://registry.npmjs.org'
+
+function toPurl (arg, reg = defaultRegistry) {
+  const res = npa(arg)
+
+  if (res.type !== 'version') {
+    throw invalidPurlType(res.type, res.raw)
+  }
+
+  // URI-encode leading @ of scoped packages
+  let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec
+  if (reg !== defaultRegistry) {
+    purl += '?repository_url=' + reg
+  }
+
+  return purl
+}
+
+function invalidPackageName (name, valid, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`)
+  err.code = 'EINVALIDPACKAGENAME'
+  return err
+}
+
+function invalidTagName (name, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`)
+  err.code = 'EINVALIDTAGNAME'
+  return err
+}
+
+function invalidPurlType (type, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`)
+  err.code = 'EINVALIDPURLTYPE'
+  return err
+}
+
+function Result (opts) {
+  this.type = opts.type
+  this.registry = opts.registry
+  this.where = opts.where
+  if (opts.raw == null) {
+    this.raw = opts.name ? opts.name + '@' + opts.rawSpec : opts.rawSpec
+  } else {
+    this.raw = opts.raw
+  }
+
+  this.name = undefined
+  this.escapedName = undefined
+  this.scope = undefined
+  this.rawSpec = opts.rawSpec || ''
+  this.saveSpec = opts.saveSpec
+  this.fetchSpec = opts.fetchSpec
+  if (opts.name) {
+    this.setName(opts.name)
+  }
+  this.gitRange = opts.gitRange
+  this.gitCommittish = opts.gitCommittish
+  this.gitSubdir = opts.gitSubdir
+  this.hosted = opts.hosted
+}
+
+Result.prototype.setName = function (name) {
+  const valid = validatePackageName(name)
+  if (!valid.validForOldPackages) {
+    throw invalidPackageName(name, valid, this.raw)
+  }
+
+  this.name = name
+  this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
+  // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
+  this.escapedName = name.replace('/', '%2f')
+  return this
+}
+
+Result.prototype.toString = function () {
+  const full = []
+  if (this.name != null && this.name !== '') {
+    full.push(this.name)
+  }
+  const spec = this.saveSpec || this.fetchSpec || this.rawSpec
+  if (spec != null && spec !== '') {
+    full.push(spec)
+  }
+  return full.length ? full.join('@') : this.raw
+}
+
+Result.prototype.toJSON = function () {
+  const result = Object.assign({}, this)
+  delete result.hosted
+  return result
+}
+
+function setGitCommittish (res, committish) {
+  if (!committish) {
+    res.gitCommittish = null
+    return res
+  }
+
+  // for each :: separated item:
+  for (const part of committish.split('::')) {
+    // if the item has no : the n it is a commit-ish
+    if (!part.includes(':')) {
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a committish')
+      }
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a second committish')
+      }
+      res.gitCommittish = part
+      continue
+    }
+    // split on name:value
+    const [name, value] = part.split(':')
+    // if name is semver do semver lookup of ref or tag
+    if (name === 'semver') {
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a semver range')
+      }
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a second semver range')
+      }
+      res.gitRange = decodeURIComponent(value)
+      continue
+    }
+    if (name === 'path') {
+      if (res.gitSubdir) {
+        throw new Error('cannot override existing path with a second path')
+      }
+      res.gitSubdir = `/${value}`
+      continue
+    }
+    log.warn('npm-package-arg', `ignoring unknown key "${name}"`)
+  }
+
+  return res
+}
+
+function fromFile (res, where) {
+  if (!where) {
+    where = process.cwd()
+  }
+  res.type = isFilename.test(res.rawSpec) ? 'file' : 'directory'
+  res.where = where
+
+  // always put the '/' on where when resolving urls, or else
+  // file:foo from /path/to/bar goes to /path/to/foo, when we want
+  // it to be /path/to/bar/foo
+
+  let specUrl
+  let resolvedUrl
+  const prefix = (!/^file:/.test(res.rawSpec) ? 'file:' : '')
+  const rawWithPrefix = prefix + res.rawSpec
+  let rawNoPrefix = rawWithPrefix.replace(/^file:/, '')
+  try {
+    resolvedUrl = new url.URL(rawWithPrefix, `file://${path.resolve(where)}/`)
+    specUrl = new url.URL(rawWithPrefix)
+  } catch (originalError) {
+    const er = new Error('Invalid file: URL, must comply with RFC 8909')
+    throw Object.assign(er, {
+      raw: res.rawSpec,
+      spec: res,
+      where,
+      originalError,
+    })
+  }
+
+  // environment switch for testing
+  if (process.env.NPM_PACKAGE_ARG_8909_STRICT !== '1') {
+    // XXX backwards compatibility lack of compliance with 8909
+    // Remove when we want a breaking change to come into RFC compliance.
+    if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
+      const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///')
+      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
+      specUrl = new url.URL(rawSpec)
+      rawNoPrefix = rawSpec.replace(/^file:/, '')
+    }
+    // turn file:/../foo into file:../foo
+    // for 1, 2 or 3 leading slashes since we attempted
+    // in the previous step to make it a file protocol url with a leading slash
+    if (/^\/{1,3}\.\.?(\/|$)/.test(rawNoPrefix)) {
+      const rawSpec = res.rawSpec.replace(/^file:\/{1,3}/, 'file:')
+      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
+      specUrl = new url.URL(rawSpec)
+      rawNoPrefix = rawSpec.replace(/^file:/, '')
+    }
+    // XXX end 8909 violation backwards compatibility section
+  }
+
+  // file:foo - relative url to ./foo
+  // file:/foo - absolute path /foo
+  // file:///foo - absolute path to /foo, no authority host
+  // file://localhost/foo - absolute path to /foo, on localhost
+  // file://foo - absolute path to / on foo host (error!)
+  if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
+    const msg = `Invalid file: URL, must be absolute if // present`
+    throw Object.assign(new Error(msg), {
+      raw: res.rawSpec,
+      parsed: resolvedUrl,
+    })
+  }
+
+  // turn /C:/blah into just C:/blah on windows
+  let specPath = decodeURIComponent(specUrl.pathname)
+  let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
+  if (isWindows) {
+    specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1')
+    resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1')
+  }
+
+  // replace ~ with homedir, but keep the ~ in the saveSpec
+  // otherwise, make it relative to where param
+  if (/^\/~(\/|$)/.test(specPath)) {
+    res.saveSpec = `file:${specPath.substr(1)}`
+    resolvedPath = path.resolve(homedir(), specPath.substr(3))
+  } else if (!path.isAbsolute(rawNoPrefix)) {
+    res.saveSpec = `file:${path.relative(where, resolvedPath)}`
+  } else {
+    res.saveSpec = `file:${path.resolve(resolvedPath)}`
+  }
+
+  res.fetchSpec = path.resolve(where, resolvedPath)
+  return res
+}
+
+function fromHostedGit (res, hosted) {
+  res.type = 'git'
+  res.hosted = hosted
+  res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
+  res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
+  return setGitCommittish(res, hosted.committish)
+}
+
+function unsupportedURLType (protocol, spec) {
+  const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
+  err.code = 'EUNSUPPORTEDPROTOCOL'
+  return err
+}
+
+function matchGitScp (spec) {
+  // git ssh specifiers are overloaded to also use scp-style git
+  // specifiers, so we have to parse those out and treat them special.
+  // They are NOT true URIs, so we can't hand them to `url.parse`.
+  //
+  // This regex looks for things that look like:
+  // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
+  //
+  // ...and various combinations. The username in the beginning is *required*.
+  const matched = spec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
+  return matched && !matched[1].match(/:[0-9]+\/?.*$/i) && {
+    fetchSpec: matched[1],
+    gitCommittish: matched[2] == null ? null : matched[2],
+  }
+}
+
+function fromURL (res) {
+  // eslint-disable-next-line node/no-deprecated-api
+  const urlparse = url.parse(res.rawSpec)
+  res.saveSpec = res.rawSpec
+  // check the protocol, and then see if it's git or not
+  switch (urlparse.protocol) {
+    case 'git:':
+    case 'git+http:':
+    case 'git+https:':
+    case 'git+rsync:':
+    case 'git+ftp:':
+    case 'git+file:':
+    case 'git+ssh:': {
+      res.type = 'git'
+      const match = urlparse.protocol === 'git+ssh:' ? matchGitScp(res.rawSpec)
+        : null
+      if (match) {
+        setGitCommittish(res, match.gitCommittish)
+        res.fetchSpec = match.fetchSpec
+      } else {
+        setGitCommittish(res, urlparse.hash != null ? urlparse.hash.slice(1) : '')
+        urlparse.protocol = urlparse.protocol.replace(/^git[+]/, '')
+        if (urlparse.protocol === 'file:' && /^git\+file:\/\/[a-z]:/i.test(res.rawSpec)) {
+          // keep the drive letter : on windows file paths
+          urlparse.host += ':'
+          urlparse.hostname += ':'
+        }
+        delete urlparse.hash
+        res.fetchSpec = url.format(urlparse)
+      }
+      break
+    }
+    case 'http:':
+    case 'https:':
+      res.type = 'remote'
+      res.fetchSpec = res.saveSpec
+      break
+
+    default:
+      throw unsupportedURLType(urlparse.protocol, res.rawSpec)
+  }
+
+  return res
+}
+
+function fromAlias (res, where) {
+  const subSpec = npa(res.rawSpec.substr(4), where)
+  if (subSpec.type === 'alias') {
+    throw new Error('nested aliases not supported')
+  }
+
+  if (!subSpec.registry) {
+    throw new Error('aliases only work for registry deps')
+  }
+
+  res.subSpec = subSpec
+  res.registry = true
+  res.type = 'alias'
+  res.saveSpec = null
+  res.fetchSpec = null
+  return res
+}
+
+function fromRegistry (res) {
+  res.registry = true
+  const spec = res.rawSpec.trim()
+  // no save spec for registry components as we save based on the fetched
+  // version, not on the argument so this can't compute that.
+  res.saveSpec = null
+  res.fetchSpec = spec
+  const version = semver.valid(spec, true)
+  const range = semver.validRange(spec, true)
+  if (version) {
+    res.type = 'version'
+  } else if (range) {
+    res.type = 'range'
+  } else {
+    if (encodeURIComponent(spec) !== spec) {
+      throw invalidTagName(spec, res.raw)
+    }
+    res.type = 'tag'
+  }
+  return res
+}
diff --git a/node_modules/npm-registry-fetch/node_modules/npm-package-arg/package.json b/node_modules/npm-registry-fetch/node_modules/npm-package-arg/package.json
new file mode 100644
index 0000000000000..bb9e71b258a93
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/npm-package-arg/package.json
@@ -0,0 +1,59 @@
+{
+  "name": "npm-package-arg",
+  "version": "10.1.0",
+  "description": "Parse the things that can be arguments to `npm install`",
+  "main": "./lib/npa.js",
+  "directories": {
+    "test": "test"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "dependencies": {
+    "hosted-git-info": "^6.0.0",
+    "proc-log": "^3.0.0",
+    "semver": "^7.3.5",
+    "validate-npm-package-name": "^5.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.10.0",
+    "tap": "^16.0.1"
+  },
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "npmclilint": "npmcli-lint",
+    "lint": "eslint \"**/*.js\"",
+    "lintfix": "npm run lint -- --fix",
+    "posttest": "npm run lint",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/npm-package-arg.git"
+  },
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/npm-package-arg/issues"
+  },
+  "homepage": "https://github.com/npm/npm-package-arg",
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "tap": {
+    "branches": 97,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.10.0"
+  }
+}
diff --git a/node_modules/pacote/node_modules/hosted-git-info/LICENSE b/node_modules/pacote/node_modules/hosted-git-info/LICENSE
new file mode 100644
index 0000000000000..45055763dc838
--- /dev/null
+++ b/node_modules/pacote/node_modules/hosted-git-info/LICENSE
@@ -0,0 +1,13 @@
+Copyright (c) 2015, Rebecca Turner
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/from-url.js b/node_modules/pacote/node_modules/hosted-git-info/lib/from-url.js
new file mode 100644
index 0000000000000..efc1247d59d12
--- /dev/null
+++ b/node_modules/pacote/node_modules/hosted-git-info/lib/from-url.js
@@ -0,0 +1,122 @@
+'use strict'
+
+const parseUrl = require('./parse-url')
+
+// look for github shorthand inputs, such as npm/cli
+const isGitHubShorthand = (arg) => {
+  // it cannot contain whitespace before the first #
+  // it cannot start with a / because that's probably an absolute file path
+  // but it must include a slash since repos are username/repository
+  // it cannot start with a . because that's probably a relative file path
+  // it cannot start with an @ because that's a scoped package if it passes the other tests
+  // it cannot contain a : before a # because that tells us that there's a protocol
+  // a second / may not exist before a #
+  const firstHash = arg.indexOf('#')
+  const firstSlash = arg.indexOf('/')
+  const secondSlash = arg.indexOf('/', firstSlash + 1)
+  const firstColon = arg.indexOf(':')
+  const firstSpace = /\s/.exec(arg)
+  const firstAt = arg.indexOf('@')
+
+  const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash)
+  const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash)
+  const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash)
+  const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash)
+  const hasSlash = firstSlash > 0
+  // if a # is found, what we really want to know is that the character
+  // immediately before # is not a /
+  const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/')
+  const doesNotStartWithDot = !arg.startsWith('.')
+
+  return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash &&
+    doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash &&
+    secondSlashOnlyAfterHash
+}
+
+module.exports = (giturl, opts, { gitHosts, protocols }) => {
+  if (!giturl) {
+    return
+  }
+
+  const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl
+  const parsed = parseUrl(correctedUrl, protocols)
+  if (!parsed) {
+    return
+  }
+
+  const gitHostShortcut = gitHosts.byShortcut[parsed.protocol]
+  const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.')
+    ? parsed.hostname.slice(4)
+    : parsed.hostname]
+  const gitHostName = gitHostShortcut || gitHostDomain
+  if (!gitHostName) {
+    return
+  }
+
+  const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain]
+  let auth = null
+  if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) {
+    auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}`
+  }
+
+  let committish = null
+  let user = null
+  let project = null
+  let defaultRepresentation = null
+
+  try {
+    if (gitHostShortcut) {
+      let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname
+      const firstAt = pathname.indexOf('@')
+      // we ignore auth for shortcuts, so just trim it out
+      if (firstAt > -1) {
+        pathname = pathname.slice(firstAt + 1)
+      }
+
+      const lastSlash = pathname.lastIndexOf('/')
+      if (lastSlash > -1) {
+        user = decodeURIComponent(pathname.slice(0, lastSlash))
+        // we want nulls only, never empty strings
+        if (!user) {
+          user = null
+        }
+        project = decodeURIComponent(pathname.slice(lastSlash + 1))
+      } else {
+        project = decodeURIComponent(pathname)
+      }
+
+      if (project.endsWith('.git')) {
+        project = project.slice(0, -4)
+      }
+
+      if (parsed.hash) {
+        committish = decodeURIComponent(parsed.hash.slice(1))
+      }
+
+      defaultRepresentation = 'shortcut'
+    } else {
+      if (!gitHostInfo.protocols.includes(parsed.protocol)) {
+        return
+      }
+
+      const segments = gitHostInfo.extract(parsed)
+      if (!segments) {
+        return
+      }
+
+      user = segments.user && decodeURIComponent(segments.user)
+      project = decodeURIComponent(segments.project)
+      committish = decodeURIComponent(segments.committish)
+      defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1)
+    }
+  } catch (err) {
+    /* istanbul ignore else */
+    if (err instanceof URIError) {
+      return
+    } else {
+      throw err
+    }
+  }
+
+  return [gitHostName, user, auth, project, committish, defaultRepresentation, opts]
+}
diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/hosts.js b/node_modules/pacote/node_modules/hosted-git-info/lib/hosts.js
new file mode 100644
index 0000000000000..013712b7842c8
--- /dev/null
+++ b/node_modules/pacote/node_modules/hosted-git-info/lib/hosts.js
@@ -0,0 +1,228 @@
+/* eslint-disable max-len */
+
+'use strict'
+
+const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
+const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
+const formatHashFragment = (f) => f.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-')
+
+const defaults = {
+  sshtemplate: ({ domain, user, project, committish }) =>
+    `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`,
+  sshurltemplate: ({ domain, user, project, committish }) =>
+    `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  edittemplate: ({ domain, user, project, committish, editpath, path }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`,
+  browsetemplate: ({ domain, user, project, committish, treepath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`,
+  browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) =>
+    `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
+  browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) =>
+    `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
+  docstemplate: ({ domain, user, project, treepath, committish }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`,
+  httpstemplate: ({ auth, domain, user, project, committish }) =>
+    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  filetemplate: ({ domain, user, project, committish, path }) =>
+    `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`,
+  shortcuttemplate: ({ type, user, project, committish }) =>
+    `${type}:${user}/${project}${maybeJoin('#', committish)}`,
+  pathtemplate: ({ user, project, committish }) =>
+    `${user}/${project}${maybeJoin('#', committish)}`,
+  bugstemplate: ({ domain, user, project }) =>
+    `https://${domain}/${user}/${project}/issues`,
+  hashformat: formatHashFragment,
+}
+
+const hosts = {}
+hosts.github = {
+  // First two are insecure and generally shouldn't be used any more, but
+  // they are still supported.
+  protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'github.com',
+  treepath: 'tree',
+  blobpath: 'blob',
+  editpath: 'edit',
+  filetemplate: ({ auth, user, project, committish, path }) =>
+    `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`,
+  gittemplate: ({ auth, domain, user, project, committish }) =>
+    `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    let [, user, project, type, committish] = url.pathname.split('/', 5)
+    if (type && type !== 'tree') {
+      return
+    }
+
+    if (!type) {
+      committish = url.hash.slice(1)
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish }
+  },
+}
+
+hosts.bitbucket = {
+  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'bitbucket.org',
+  treepath: 'src',
+  blobpath: 'src',
+  editpath: '?mode=edit',
+  edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+    if (['get'].includes(aux)) {
+      return
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+hosts.gitlab = {
+  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'gitlab.com',
+  treepath: 'tree',
+  blobpath: 'tree',
+  editpath: '-/edit',
+  httpstemplate: ({ auth, domain, user, project, committish }) =>
+    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    const path = url.pathname.slice(1)
+    if (path.includes('/-/') || path.includes('/archive.tar.gz')) {
+      return
+    }
+
+    const segments = path.split('/')
+    let project = segments.pop()
+    if (project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    const user = segments.join('/')
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+hosts.gist = {
+  protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'gist.github.com',
+  editpath: 'edit',
+  sshtemplate: ({ domain, project, committish }) =>
+    `git@${domain}:${project}.git${maybeJoin('#', committish)}`,
+  sshurltemplate: ({ domain, project, committish }) =>
+    `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`,
+  edittemplate: ({ domain, user, project, committish, editpath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`,
+  browsetemplate: ({ domain, project, committish }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
+  browsetreetemplate: ({ domain, project, committish, path, hashformat }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
+  browseblobtemplate: ({ domain, project, committish, path, hashformat }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
+  docstemplate: ({ domain, project, committish }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
+  httpstemplate: ({ domain, project, committish }) =>
+    `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`,
+  filetemplate: ({ user, project, committish, path }) =>
+    `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`,
+  shortcuttemplate: ({ type, project, committish }) =>
+    `${type}:${project}${maybeJoin('#', committish)}`,
+  pathtemplate: ({ project, committish }) =>
+    `${project}${maybeJoin('#', committish)}`,
+  bugstemplate: ({ domain, project }) =>
+    `https://${domain}/${project}`,
+  gittemplate: ({ domain, project, committish }) =>
+    `git://${domain}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ project, committish }) =>
+    `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+    if (aux === 'raw') {
+      return
+    }
+
+    if (!project) {
+      if (!user) {
+        return
+      }
+
+      project = user
+      user = null
+    }
+
+    if (project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+  hashformat: function (fragment) {
+    return fragment && 'file-' + formatHashFragment(fragment)
+  },
+}
+
+hosts.sourcehut = {
+  protocols: ['git+ssh:', 'https:'],
+  domain: 'git.sr.ht',
+  treepath: 'tree',
+  blobpath: 'tree',
+  filetemplate: ({ domain, user, project, committish, path }) =>
+    `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`,
+  httpstemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`,
+  bugstemplate: ({ user, project }) =>
+    `https://todo.sr.ht/${user}/${project}`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+
+    // tarball url
+    if (['archive'].includes(aux)) {
+      return
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+for (const [name, host] of Object.entries(hosts)) {
+  hosts[name] = Object.assign({}, defaults, host)
+}
+
+module.exports = hosts
diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/index.js b/node_modules/pacote/node_modules/hosted-git-info/lib/index.js
new file mode 100644
index 0000000000000..a7339c217e9a3
--- /dev/null
+++ b/node_modules/pacote/node_modules/hosted-git-info/lib/index.js
@@ -0,0 +1,179 @@
+'use strict'
+
+const LRU = require('lru-cache')
+const hosts = require('./hosts.js')
+const fromUrl = require('./from-url.js')
+const parseUrl = require('./parse-url.js')
+
+const cache = new LRU({ max: 1000 })
+
+class GitHost {
+  constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
+    Object.assign(this, GitHost.#gitHosts[type], {
+      type,
+      user,
+      auth,
+      project,
+      committish,
+      default: defaultRepresentation,
+      opts,
+    })
+  }
+
+  static #gitHosts = { byShortcut: {}, byDomain: {} }
+  static #protocols = {
+    'git+ssh:': { name: 'sshurl' },
+    'ssh:': { name: 'sshurl' },
+    'git+https:': { name: 'https', auth: true },
+    'git:': { auth: true },
+    'http:': { auth: true },
+    'https:': { auth: true },
+    'git+http:': { auth: true },
+  }
+
+  static addHost (name, host) {
+    GitHost.#gitHosts[name] = host
+    GitHost.#gitHosts.byDomain[host.domain] = name
+    GitHost.#gitHosts.byShortcut[`${name}:`] = name
+    GitHost.#protocols[`${name}:`] = { name }
+  }
+
+  static fromUrl (giturl, opts) {
+    if (typeof giturl !== 'string') {
+      return
+    }
+
+    const key = giturl + JSON.stringify(opts || {})
+
+    if (!cache.has(key)) {
+      const hostArgs = fromUrl(giturl, opts, {
+        gitHosts: GitHost.#gitHosts,
+        protocols: GitHost.#protocols,
+      })
+      cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined)
+    }
+
+    return cache.get(key)
+  }
+
+  static parseUrl (url) {
+    return parseUrl(url)
+  }
+
+  #fill (template, opts) {
+    if (typeof template !== 'function') {
+      return null
+    }
+
+    const options = { ...this, ...this.opts, ...opts }
+
+    // the path should always be set so we don't end up with 'undefined' in urls
+    if (!options.path) {
+      options.path = ''
+    }
+
+    // template functions will insert the leading slash themselves
+    if (options.path.startsWith('/')) {
+      options.path = options.path.slice(1)
+    }
+
+    if (options.noCommittish) {
+      options.committish = null
+    }
+
+    const result = template(options)
+    return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result
+  }
+
+  hash () {
+    return this.committish ? `#${this.committish}` : ''
+  }
+
+  ssh (opts) {
+    return this.#fill(this.sshtemplate, opts)
+  }
+
+  sshurl (opts) {
+    return this.#fill(this.sshurltemplate, opts)
+  }
+
+  browse (path, ...args) {
+    // not a string, treat path as opts
+    if (typeof path !== 'string') {
+      return this.#fill(this.browsetemplate, path)
+    }
+
+    if (typeof args[0] !== 'string') {
+      return this.#fill(this.browsetreetemplate, { ...args[0], path })
+    }
+
+    return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path })
+  }
+
+  // If the path is known to be a file, then browseFile should be used. For some hosts
+  // the url is the same as browse, but for others like GitHub a file can use both `/tree/`
+  // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/`
+  // path will redirect to a specific commit. Using the `/blob/` path avoids this and
+  // does not redirect to a different commit.
+  browseFile (path, ...args) {
+    if (typeof args[0] !== 'string') {
+      return this.#fill(this.browseblobtemplate, { ...args[0], path })
+    }
+
+    return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path })
+  }
+
+  docs (opts) {
+    return this.#fill(this.docstemplate, opts)
+  }
+
+  bugs (opts) {
+    return this.#fill(this.bugstemplate, opts)
+  }
+
+  https (opts) {
+    return this.#fill(this.httpstemplate, opts)
+  }
+
+  git (opts) {
+    return this.#fill(this.gittemplate, opts)
+  }
+
+  shortcut (opts) {
+    return this.#fill(this.shortcuttemplate, opts)
+  }
+
+  path (opts) {
+    return this.#fill(this.pathtemplate, opts)
+  }
+
+  tarball (opts) {
+    return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false })
+  }
+
+  file (path, opts) {
+    return this.#fill(this.filetemplate, { ...opts, path })
+  }
+
+  edit (path, opts) {
+    return this.#fill(this.edittemplate, { ...opts, path })
+  }
+
+  getDefaultRepresentation () {
+    return this.default
+  }
+
+  toString (opts) {
+    if (this.default && typeof this[this.default] === 'function') {
+      return this[this.default](opts)
+    }
+
+    return this.sshurl(opts)
+  }
+}
+
+for (const [name, host] of Object.entries(hosts)) {
+  GitHost.addHost(name, host)
+}
+
+module.exports = GitHost
diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/pacote/node_modules/hosted-git-info/lib/parse-url.js
new file mode 100644
index 0000000000000..7d5489c008ab4
--- /dev/null
+++ b/node_modules/pacote/node_modules/hosted-git-info/lib/parse-url.js
@@ -0,0 +1,78 @@
+const url = require('url')
+
+const lastIndexOfBefore = (str, char, beforeChar) => {
+  const startPosition = str.indexOf(beforeChar)
+  return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity)
+}
+
+const safeUrl = (u) => {
+  try {
+    return new url.URL(u)
+  } catch {
+    // this fn should never throw
+  }
+}
+
+// accepts input like git:github.com:user/repo and inserts the // after the first :
+const correctProtocol = (arg, protocols) => {
+  const firstColon = arg.indexOf(':')
+  const proto = arg.slice(0, firstColon + 1)
+  if (Object.prototype.hasOwnProperty.call(protocols, proto)) {
+    return arg
+  }
+
+  const firstAt = arg.indexOf('@')
+  if (firstAt > -1) {
+    if (firstAt > firstColon) {
+      return `git+ssh://${arg}`
+    } else {
+      return arg
+    }
+  }
+
+  const doubleSlash = arg.indexOf('//')
+  if (doubleSlash === firstColon + 1) {
+    return arg
+  }
+
+  return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}`
+}
+
+// attempt to correct an scp style url so that it will parse with `new URL()`
+const correctUrl = (giturl) => {
+  // ignore @ that come after the first hash since the denotes the start
+  // of a committish which can contain @ characters
+  const firstAt = lastIndexOfBefore(giturl, '@', '#')
+  // ignore colons that come after the hash since that could include colons such as:
+  // git@github.com:user/package-2#semver:^1.0.0
+  const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#')
+
+  if (lastColonBeforeHash > firstAt) {
+    // the last : comes after the first @ (or there is no @)
+    // like it would in:
+    // proto://hostname.com:user/repo
+    // username@hostname.com:user/repo
+    // :password@hostname.com:user/repo
+    // username:password@hostname.com:user/repo
+    // proto://username@hostname.com:user/repo
+    // proto://:password@hostname.com:user/repo
+    // proto://username:password@hostname.com:user/repo
+    // then we replace the last : with a / to create a valid path
+    giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1)
+  }
+
+  if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) {
+    // we have no : at all
+    // as it would be in:
+    // username@hostname.com/user/repo
+    // then we prepend a protocol
+    giturl = `git+ssh://${giturl}`
+  }
+
+  return giturl
+}
+
+module.exports = (giturl, protocols) => {
+  const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl
+  return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol))
+}
diff --git a/node_modules/pacote/node_modules/hosted-git-info/package.json b/node_modules/pacote/node_modules/hosted-git-info/package.json
new file mode 100644
index 0000000000000..612259948afe7
--- /dev/null
+++ b/node_modules/pacote/node_modules/hosted-git-info/package.json
@@ -0,0 +1,59 @@
+{
+  "name": "hosted-git-info",
+  "version": "6.1.1",
+  "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
+  "main": "./lib/index.js",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/hosted-git-info.git"
+  },
+  "keywords": [
+    "git",
+    "github",
+    "bitbucket",
+    "gitlab"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/hosted-git-info/issues"
+  },
+  "homepage": "https://github.com/npm/hosted-git-info",
+  "scripts": {
+    "posttest": "npm run lint",
+    "snap": "tap",
+    "test": "tap",
+    "test:coverage": "tap --coverage-report=html",
+    "lint": "eslint \"**/*.js\"",
+    "postlint": "template-oss-check",
+    "lintfix": "npm run lint -- --fix",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "dependencies": {
+    "lru-cache": "^7.5.1"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.7.1",
+    "tap": "^16.0.1"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "tap": {
+    "color": 1,
+    "coverage": true,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.7.1"
+  }
+}
diff --git a/node_modules/pacote/node_modules/npm-package-arg/LICENSE b/node_modules/pacote/node_modules/npm-package-arg/LICENSE
new file mode 100644
index 0000000000000..19cec97b18468
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-package-arg/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/npm-package-arg/lib/npa.js b/node_modules/pacote/node_modules/npm-package-arg/lib/npa.js
new file mode 100644
index 0000000000000..36bd18cd9f9a6
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-package-arg/lib/npa.js
@@ -0,0 +1,431 @@
+'use strict'
+module.exports = npa
+module.exports.resolve = resolve
+module.exports.toPurl = toPurl
+module.exports.Result = Result
+
+const url = require('url')
+const HostedGit = require('hosted-git-info')
+const semver = require('semver')
+const path = global.FAKE_WINDOWS ? require('path').win32 : require('path')
+const validatePackageName = require('validate-npm-package-name')
+const { homedir } = require('os')
+const log = require('proc-log')
+
+const isWindows = process.platform === 'win32' || global.FAKE_WINDOWS
+const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
+const isURL = /^(?:git[+])?[a-z]+:/i
+const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
+const isFilename = /[.](?:tgz|tar.gz|tar)$/i
+
+function npa (arg, where) {
+  let name
+  let spec
+  if (typeof arg === 'object') {
+    if (arg instanceof Result && (!where || where === arg.where)) {
+      return arg
+    } else if (arg.name && arg.rawSpec) {
+      return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
+    } else {
+      return npa(arg.raw, where || arg.where)
+    }
+  }
+  const nameEndsAt = arg[0] === '@' ? arg.slice(1).indexOf('@') + 1 : arg.indexOf('@')
+  const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
+  if (isURL.test(arg)) {
+    spec = arg
+  } else if (isGit.test(arg)) {
+    spec = `git+ssh://${arg}`
+  } else if (namePart[0] !== '@' && (hasSlashes.test(namePart) || isFilename.test(namePart))) {
+    spec = arg
+  } else if (nameEndsAt > 0) {
+    name = namePart
+    spec = arg.slice(nameEndsAt + 1) || '*'
+  } else {
+    const valid = validatePackageName(arg)
+    if (valid.validForOldPackages) {
+      name = arg
+      spec = '*'
+    } else {
+      spec = arg
+    }
+  }
+  return resolve(name, spec, where, arg)
+}
+
+const isFilespec = isWindows ? /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/ : /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
+
+function resolve (name, spec, where, arg) {
+  const res = new Result({
+    raw: arg,
+    name: name,
+    rawSpec: spec,
+    fromArgument: arg != null,
+  })
+
+  if (name) {
+    res.setName(name)
+  }
+
+  if (spec && (isFilespec.test(spec) || /^file:/i.test(spec))) {
+    return fromFile(res, where)
+  } else if (spec && /^npm:/i.test(spec)) {
+    return fromAlias(res, where)
+  }
+
+  const hosted = HostedGit.fromUrl(spec, {
+    noGitPlus: true,
+    noCommittish: true,
+  })
+  if (hosted) {
+    return fromHostedGit(res, hosted)
+  } else if (spec && isURL.test(spec)) {
+    return fromURL(res)
+  } else if (spec && (hasSlashes.test(spec) || isFilename.test(spec))) {
+    return fromFile(res, where)
+  } else {
+    return fromRegistry(res)
+  }
+}
+
+const defaultRegistry = 'https://registry.npmjs.org'
+
+function toPurl (arg, reg = defaultRegistry) {
+  const res = npa(arg)
+
+  if (res.type !== 'version') {
+    throw invalidPurlType(res.type, res.raw)
+  }
+
+  // URI-encode leading @ of scoped packages
+  let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec
+  if (reg !== defaultRegistry) {
+    purl += '?repository_url=' + reg
+  }
+
+  return purl
+}
+
+function invalidPackageName (name, valid, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`)
+  err.code = 'EINVALIDPACKAGENAME'
+  return err
+}
+
+function invalidTagName (name, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`)
+  err.code = 'EINVALIDTAGNAME'
+  return err
+}
+
+function invalidPurlType (type, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`)
+  err.code = 'EINVALIDPURLTYPE'
+  return err
+}
+
+function Result (opts) {
+  this.type = opts.type
+  this.registry = opts.registry
+  this.where = opts.where
+  if (opts.raw == null) {
+    this.raw = opts.name ? opts.name + '@' + opts.rawSpec : opts.rawSpec
+  } else {
+    this.raw = opts.raw
+  }
+
+  this.name = undefined
+  this.escapedName = undefined
+  this.scope = undefined
+  this.rawSpec = opts.rawSpec || ''
+  this.saveSpec = opts.saveSpec
+  this.fetchSpec = opts.fetchSpec
+  if (opts.name) {
+    this.setName(opts.name)
+  }
+  this.gitRange = opts.gitRange
+  this.gitCommittish = opts.gitCommittish
+  this.gitSubdir = opts.gitSubdir
+  this.hosted = opts.hosted
+}
+
+Result.prototype.setName = function (name) {
+  const valid = validatePackageName(name)
+  if (!valid.validForOldPackages) {
+    throw invalidPackageName(name, valid, this.raw)
+  }
+
+  this.name = name
+  this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
+  // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
+  this.escapedName = name.replace('/', '%2f')
+  return this
+}
+
+Result.prototype.toString = function () {
+  const full = []
+  if (this.name != null && this.name !== '') {
+    full.push(this.name)
+  }
+  const spec = this.saveSpec || this.fetchSpec || this.rawSpec
+  if (spec != null && spec !== '') {
+    full.push(spec)
+  }
+  return full.length ? full.join('@') : this.raw
+}
+
+Result.prototype.toJSON = function () {
+  const result = Object.assign({}, this)
+  delete result.hosted
+  return result
+}
+
+function setGitCommittish (res, committish) {
+  if (!committish) {
+    res.gitCommittish = null
+    return res
+  }
+
+  // for each :: separated item:
+  for (const part of committish.split('::')) {
+    // if the item has no : the n it is a commit-ish
+    if (!part.includes(':')) {
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a committish')
+      }
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a second committish')
+      }
+      res.gitCommittish = part
+      continue
+    }
+    // split on name:value
+    const [name, value] = part.split(':')
+    // if name is semver do semver lookup of ref or tag
+    if (name === 'semver') {
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a semver range')
+      }
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a second semver range')
+      }
+      res.gitRange = decodeURIComponent(value)
+      continue
+    }
+    if (name === 'path') {
+      if (res.gitSubdir) {
+        throw new Error('cannot override existing path with a second path')
+      }
+      res.gitSubdir = `/${value}`
+      continue
+    }
+    log.warn('npm-package-arg', `ignoring unknown key "${name}"`)
+  }
+
+  return res
+}
+
+function fromFile (res, where) {
+  if (!where) {
+    where = process.cwd()
+  }
+  res.type = isFilename.test(res.rawSpec) ? 'file' : 'directory'
+  res.where = where
+
+  // always put the '/' on where when resolving urls, or else
+  // file:foo from /path/to/bar goes to /path/to/foo, when we want
+  // it to be /path/to/bar/foo
+
+  let specUrl
+  let resolvedUrl
+  const prefix = (!/^file:/.test(res.rawSpec) ? 'file:' : '')
+  const rawWithPrefix = prefix + res.rawSpec
+  let rawNoPrefix = rawWithPrefix.replace(/^file:/, '')
+  try {
+    resolvedUrl = new url.URL(rawWithPrefix, `file://${path.resolve(where)}/`)
+    specUrl = new url.URL(rawWithPrefix)
+  } catch (originalError) {
+    const er = new Error('Invalid file: URL, must comply with RFC 8909')
+    throw Object.assign(er, {
+      raw: res.rawSpec,
+      spec: res,
+      where,
+      originalError,
+    })
+  }
+
+  // environment switch for testing
+  if (process.env.NPM_PACKAGE_ARG_8909_STRICT !== '1') {
+    // XXX backwards compatibility lack of compliance with 8909
+    // Remove when we want a breaking change to come into RFC compliance.
+    if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
+      const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///')
+      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
+      specUrl = new url.URL(rawSpec)
+      rawNoPrefix = rawSpec.replace(/^file:/, '')
+    }
+    // turn file:/../foo into file:../foo
+    // for 1, 2 or 3 leading slashes since we attempted
+    // in the previous step to make it a file protocol url with a leading slash
+    if (/^\/{1,3}\.\.?(\/|$)/.test(rawNoPrefix)) {
+      const rawSpec = res.rawSpec.replace(/^file:\/{1,3}/, 'file:')
+      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
+      specUrl = new url.URL(rawSpec)
+      rawNoPrefix = rawSpec.replace(/^file:/, '')
+    }
+    // XXX end 8909 violation backwards compatibility section
+  }
+
+  // file:foo - relative url to ./foo
+  // file:/foo - absolute path /foo
+  // file:///foo - absolute path to /foo, no authority host
+  // file://localhost/foo - absolute path to /foo, on localhost
+  // file://foo - absolute path to / on foo host (error!)
+  if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
+    const msg = `Invalid file: URL, must be absolute if // present`
+    throw Object.assign(new Error(msg), {
+      raw: res.rawSpec,
+      parsed: resolvedUrl,
+    })
+  }
+
+  // turn /C:/blah into just C:/blah on windows
+  let specPath = decodeURIComponent(specUrl.pathname)
+  let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
+  if (isWindows) {
+    specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1')
+    resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1')
+  }
+
+  // replace ~ with homedir, but keep the ~ in the saveSpec
+  // otherwise, make it relative to where param
+  if (/^\/~(\/|$)/.test(specPath)) {
+    res.saveSpec = `file:${specPath.substr(1)}`
+    resolvedPath = path.resolve(homedir(), specPath.substr(3))
+  } else if (!path.isAbsolute(rawNoPrefix)) {
+    res.saveSpec = `file:${path.relative(where, resolvedPath)}`
+  } else {
+    res.saveSpec = `file:${path.resolve(resolvedPath)}`
+  }
+
+  res.fetchSpec = path.resolve(where, resolvedPath)
+  return res
+}
+
+function fromHostedGit (res, hosted) {
+  res.type = 'git'
+  res.hosted = hosted
+  res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
+  res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
+  return setGitCommittish(res, hosted.committish)
+}
+
+function unsupportedURLType (protocol, spec) {
+  const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
+  err.code = 'EUNSUPPORTEDPROTOCOL'
+  return err
+}
+
+function matchGitScp (spec) {
+  // git ssh specifiers are overloaded to also use scp-style git
+  // specifiers, so we have to parse those out and treat them special.
+  // They are NOT true URIs, so we can't hand them to `url.parse`.
+  //
+  // This regex looks for things that look like:
+  // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
+  //
+  // ...and various combinations. The username in the beginning is *required*.
+  const matched = spec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
+  return matched && !matched[1].match(/:[0-9]+\/?.*$/i) && {
+    fetchSpec: matched[1],
+    gitCommittish: matched[2] == null ? null : matched[2],
+  }
+}
+
+function fromURL (res) {
+  // eslint-disable-next-line node/no-deprecated-api
+  const urlparse = url.parse(res.rawSpec)
+  res.saveSpec = res.rawSpec
+  // check the protocol, and then see if it's git or not
+  switch (urlparse.protocol) {
+    case 'git:':
+    case 'git+http:':
+    case 'git+https:':
+    case 'git+rsync:':
+    case 'git+ftp:':
+    case 'git+file:':
+    case 'git+ssh:': {
+      res.type = 'git'
+      const match = urlparse.protocol === 'git+ssh:' ? matchGitScp(res.rawSpec)
+        : null
+      if (match) {
+        setGitCommittish(res, match.gitCommittish)
+        res.fetchSpec = match.fetchSpec
+      } else {
+        setGitCommittish(res, urlparse.hash != null ? urlparse.hash.slice(1) : '')
+        urlparse.protocol = urlparse.protocol.replace(/^git[+]/, '')
+        if (urlparse.protocol === 'file:' && /^git\+file:\/\/[a-z]:/i.test(res.rawSpec)) {
+          // keep the drive letter : on windows file paths
+          urlparse.host += ':'
+          urlparse.hostname += ':'
+        }
+        delete urlparse.hash
+        res.fetchSpec = url.format(urlparse)
+      }
+      break
+    }
+    case 'http:':
+    case 'https:':
+      res.type = 'remote'
+      res.fetchSpec = res.saveSpec
+      break
+
+    default:
+      throw unsupportedURLType(urlparse.protocol, res.rawSpec)
+  }
+
+  return res
+}
+
+function fromAlias (res, where) {
+  const subSpec = npa(res.rawSpec.substr(4), where)
+  if (subSpec.type === 'alias') {
+    throw new Error('nested aliases not supported')
+  }
+
+  if (!subSpec.registry) {
+    throw new Error('aliases only work for registry deps')
+  }
+
+  res.subSpec = subSpec
+  res.registry = true
+  res.type = 'alias'
+  res.saveSpec = null
+  res.fetchSpec = null
+  return res
+}
+
+function fromRegistry (res) {
+  res.registry = true
+  const spec = res.rawSpec.trim()
+  // no save spec for registry components as we save based on the fetched
+  // version, not on the argument so this can't compute that.
+  res.saveSpec = null
+  res.fetchSpec = spec
+  const version = semver.valid(spec, true)
+  const range = semver.validRange(spec, true)
+  if (version) {
+    res.type = 'version'
+  } else if (range) {
+    res.type = 'range'
+  } else {
+    if (encodeURIComponent(spec) !== spec) {
+      throw invalidTagName(spec, res.raw)
+    }
+    res.type = 'tag'
+  }
+  return res
+}
diff --git a/node_modules/pacote/node_modules/npm-package-arg/package.json b/node_modules/pacote/node_modules/npm-package-arg/package.json
new file mode 100644
index 0000000000000..bb9e71b258a93
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-package-arg/package.json
@@ -0,0 +1,59 @@
+{
+  "name": "npm-package-arg",
+  "version": "10.1.0",
+  "description": "Parse the things that can be arguments to `npm install`",
+  "main": "./lib/npa.js",
+  "directories": {
+    "test": "test"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "dependencies": {
+    "hosted-git-info": "^6.0.0",
+    "proc-log": "^3.0.0",
+    "semver": "^7.3.5",
+    "validate-npm-package-name": "^5.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.10.0",
+    "tap": "^16.0.1"
+  },
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "npmclilint": "npmcli-lint",
+    "lint": "eslint \"**/*.js\"",
+    "lintfix": "npm run lint -- --fix",
+    "posttest": "npm run lint",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/npm-package-arg.git"
+  },
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/npm-package-arg/issues"
+  },
+  "homepage": "https://github.com/npm/npm-package-arg",
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "tap": {
+    "branches": 97,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.10.0"
+  }
+}
diff --git a/package-lock.json b/package-lock.json
index 0d171a58688a3..f6716731f65bf 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -131,7 +131,7 @@
         "nopt": "^7.2.0",
         "npm-audit-report": "^5.0.0",
         "npm-install-checks": "^6.2.0",
-        "npm-package-arg": "^10.1.0",
+        "npm-package-arg": "^11.0.0",
         "npm-pick-manifest": "^8.0.2",
         "npm-profile": "^8.0.0",
         "npm-registry-fetch": "^15.0.0",
@@ -229,7 +229,7 @@
         "@npmcli/template-oss": "4.18.0",
         "json-stringify-safe": "^5.0.1",
         "nock": "^13.3.0",
-        "npm-package-arg": "^10.1.0",
+        "npm-package-arg": "^11.0.0",
         "pacote": "^16.0.0",
         "tap": "^16.3.4"
       },
@@ -2706,6 +2706,21 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/npm-package-arg": {
+      "version": "10.1.0",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
+      "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==",
+      "dev": true,
+      "dependencies": {
+        "hosted-git-info": "^6.0.0",
+        "proc-log": "^3.0.0",
+        "semver": "^7.3.5",
+        "validate-npm-package-name": "^5.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@octokit/auth-token": {
       "version": "3.0.3",
       "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-3.0.3.tgz",
@@ -6764,6 +6779,33 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/init-package-json/node_modules/hosted-git-info": {
+      "version": "6.1.1",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
+      "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
+      "inBundle": true,
+      "dependencies": {
+        "lru-cache": "^7.5.1"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/init-package-json/node_modules/npm-package-arg": {
+      "version": "10.1.0",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
+      "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==",
+      "inBundle": true,
+      "dependencies": {
+        "hosted-git-info": "^6.0.0",
+        "proc-log": "^3.0.0",
+        "semver": "^7.3.5",
+        "validate-npm-package-name": "^5.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/internal-slot": {
       "version": "1.0.5",
       "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.5.tgz",
@@ -9796,30 +9838,18 @@
       }
     },
     "node_modules/npm-package-arg": {
-      "version": "10.1.0",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
-      "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==",
+      "version": "11.0.0",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.0.tgz",
+      "integrity": "sha512-D8sItaQ8n6VlBUFed3DLz2sCpkabRAjaiLkTamDppvh8lmmAPirzNfBuhJd/2rlmoxZ2S9mOHmIEvzV2z2jOeA==",
       "inBundle": true,
       "dependencies": {
-        "hosted-git-info": "^6.0.0",
+        "hosted-git-info": "^7.0.0",
         "proc-log": "^3.0.0",
         "semver": "^7.3.5",
         "validate-npm-package-name": "^5.0.0"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/npm-package-arg/node_modules/hosted-git-info": {
-      "version": "6.1.1",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
-      "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
-      "inBundle": true,
-      "dependencies": {
-        "lru-cache": "^7.5.1"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "node_modules/npm-packlist": {
@@ -9849,6 +9879,33 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/npm-pick-manifest/node_modules/hosted-git-info": {
+      "version": "6.1.1",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
+      "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
+      "inBundle": true,
+      "dependencies": {
+        "lru-cache": "^7.5.1"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/npm-pick-manifest/node_modules/npm-package-arg": {
+      "version": "10.1.0",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
+      "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==",
+      "inBundle": true,
+      "dependencies": {
+        "hosted-git-info": "^6.0.0",
+        "proc-log": "^3.0.0",
+        "semver": "^7.3.5",
+        "validate-npm-package-name": "^5.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/npm-profile": {
       "version": "8.0.0",
       "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-8.0.0.tgz",
@@ -9916,6 +9973,18 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/npm-registry-fetch/node_modules/hosted-git-info": {
+      "version": "6.1.1",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
+      "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
+      "inBundle": true,
+      "dependencies": {
+        "lru-cache": "^7.5.1"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/npm-registry-fetch/node_modules/make-fetch-happen": {
       "version": "12.0.0",
       "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-12.0.0.tgz",
@@ -9938,6 +10007,21 @@
         "node": "^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/npm-registry-fetch/node_modules/npm-package-arg": {
+      "version": "10.1.0",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
+      "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==",
+      "inBundle": true,
+      "dependencies": {
+        "hosted-git-info": "^6.0.0",
+        "proc-log": "^3.0.0",
+        "semver": "^7.3.5",
+        "validate-npm-package-name": "^5.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/npm-run-path": {
       "version": "4.0.1",
       "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz",
@@ -10545,6 +10629,33 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/pacote/node_modules/hosted-git-info": {
+      "version": "6.1.1",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
+      "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
+      "inBundle": true,
+      "dependencies": {
+        "lru-cache": "^7.5.1"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/pacote/node_modules/npm-package-arg": {
+      "version": "10.1.0",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
+      "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==",
+      "inBundle": true,
+      "dependencies": {
+        "hosted-git-info": "^6.0.0",
+        "proc-log": "^3.0.0",
+        "semver": "^7.3.5",
+        "validate-npm-package-name": "^5.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/parent-module": {
       "version": "1.0.1",
       "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
@@ -16128,7 +16239,7 @@
         "minimatch": "^9.0.0",
         "nopt": "^7.0.0",
         "npm-install-checks": "^6.2.0",
-        "npm-package-arg": "^10.1.0",
+        "npm-package-arg": "^11.0.0",
         "npm-pick-manifest": "^8.0.2",
         "npm-registry-fetch": "^15.0.0",
         "npmlog": "^7.0.1",
@@ -16188,7 +16299,7 @@
       "version": "7.0.2",
       "license": "ISC",
       "dependencies": {
-        "npm-package-arg": "^10.1.0",
+        "npm-package-arg": "^11.0.0",
         "npm-registry-fetch": "^15.0.0"
       },
       "devDependencies": {
@@ -16212,7 +16323,7 @@
         "binary-extensions": "^2.2.0",
         "diff": "^5.1.0",
         "minimatch": "^9.0.0",
-        "npm-package-arg": "^10.1.0",
+        "npm-package-arg": "^11.0.0",
         "pacote": "^16.0.0",
         "tar": "^6.1.13"
       },
@@ -16232,7 +16343,7 @@
         "@npmcli/arborist": "^6.3.0",
         "@npmcli/run-script": "^6.0.0",
         "ci-info": "^3.7.1",
-        "npm-package-arg": "^10.1.0",
+        "npm-package-arg": "^11.0.0",
         "npmlog": "^7.0.1",
         "pacote": "^16.0.0",
         "proc-log": "^3.0.0",
@@ -16311,7 +16422,7 @@
       "dependencies": {
         "@npmcli/arborist": "^6.3.0",
         "@npmcli/run-script": "^6.0.0",
-        "npm-package-arg": "^10.1.0",
+        "npm-package-arg": "^11.0.0",
         "pacote": "^16.0.0"
       },
       "devDependencies": {
@@ -16331,7 +16442,7 @@
       "dependencies": {
         "ci-info": "^3.6.1",
         "normalize-package-data": "^6.0.0",
-        "npm-package-arg": "^10.1.0",
+        "npm-package-arg": "^11.0.0",
         "npm-registry-fetch": "^15.0.0",
         "proc-log": "^3.0.0",
         "semver": "^7.3.7",
diff --git a/package.json b/package.json
index b56506563983d..6c5504b8f3520 100644
--- a/package.json
+++ b/package.json
@@ -96,7 +96,7 @@
     "nopt": "^7.2.0",
     "npm-audit-report": "^5.0.0",
     "npm-install-checks": "^6.2.0",
-    "npm-package-arg": "^10.1.0",
+    "npm-package-arg": "^11.0.0",
     "npm-pick-manifest": "^8.0.2",
     "npm-profile": "^8.0.0",
     "npm-registry-fetch": "^15.0.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 39f85e857008d..c05be0d03659d 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -22,7 +22,7 @@
     "minimatch": "^9.0.0",
     "nopt": "^7.0.0",
     "npm-install-checks": "^6.2.0",
-    "npm-package-arg": "^10.1.0",
+    "npm-package-arg": "^11.0.0",
     "npm-pick-manifest": "^8.0.2",
     "npm-registry-fetch": "^15.0.0",
     "npmlog": "^7.0.1",
diff --git a/workspaces/libnpmaccess/package.json b/workspaces/libnpmaccess/package.json
index 3ad8bab6f80ef..a78674049c784 100644
--- a/workspaces/libnpmaccess/package.json
+++ b/workspaces/libnpmaccess/package.json
@@ -29,7 +29,7 @@
   "bugs": "https://github.com/npm/libnpmaccess/issues",
   "homepage": "https://npmjs.com/package/libnpmaccess",
   "dependencies": {
-    "npm-package-arg": "^10.1.0",
+    "npm-package-arg": "^11.0.0",
     "npm-registry-fetch": "^15.0.0"
   },
   "engines": {
diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json
index ced4ae5443650..1bdcba1f3b5dc 100644
--- a/workspaces/libnpmdiff/package.json
+++ b/workspaces/libnpmdiff/package.json
@@ -52,7 +52,7 @@
     "binary-extensions": "^2.2.0",
     "diff": "^5.1.0",
     "minimatch": "^9.0.0",
-    "npm-package-arg": "^10.1.0",
+    "npm-package-arg": "^11.0.0",
     "pacote": "^16.0.0",
     "tar": "^6.1.13"
   },
diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json
index 90b2e6b2e74d0..a6501d3169caa 100644
--- a/workspaces/libnpmexec/package.json
+++ b/workspaces/libnpmexec/package.json
@@ -62,7 +62,7 @@
     "@npmcli/arborist": "^6.3.0",
     "@npmcli/run-script": "^6.0.0",
     "ci-info": "^3.7.1",
-    "npm-package-arg": "^10.1.0",
+    "npm-package-arg": "^11.0.0",
     "npmlog": "^7.0.1",
     "pacote": "^16.0.0",
     "proc-log": "^3.0.0",
diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json
index e9b915466acc4..f5e98878096f9 100644
--- a/workspaces/libnpmpack/package.json
+++ b/workspaces/libnpmpack/package.json
@@ -38,7 +38,7 @@
   "dependencies": {
     "@npmcli/arborist": "^6.3.0",
     "@npmcli/run-script": "^6.0.0",
-    "npm-package-arg": "^10.1.0",
+    "npm-package-arg": "^11.0.0",
     "pacote": "^16.0.0"
   },
   "engines": {
diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json
index f2c117a0bef5c..0cb19f7950fe4 100644
--- a/workspaces/libnpmpublish/package.json
+++ b/workspaces/libnpmpublish/package.json
@@ -40,7 +40,7 @@
   "dependencies": {
     "ci-info": "^3.6.1",
     "normalize-package-data": "^6.0.0",
-    "npm-package-arg": "^10.1.0",
+    "npm-package-arg": "^11.0.0",
     "npm-registry-fetch": "^15.0.0",
     "proc-log": "^3.0.0",
     "semver": "^7.3.7",

From f166d99405022776ec7649d3956999bfee623bbc Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Tue, 15 Aug 2023 12:16:43 -0700
Subject: [PATCH 35/68] deps: hoist lru-cache@10.0.1

---
 DEPENDENCIES.md                               |    1 +
 node_modules/.gitignore                       |   24 +-
 .../node_modules/lru-cache/dist/cjs/index.js  | 1404 -----------------
 .../lru-cache/dist/cjs/index.min.js           |    2 -
 .../lru-cache/dist/cjs/package.json           |    3 -
 .../node_modules/lru-cache/dist/mjs/index.js  | 1400 ----------------
 .../lru-cache/dist/mjs/index.min.js           |    2 -
 .../lru-cache/dist/mjs/package.json           |    3 -
 .../node_modules/lru-cache/LICENSE            |    0
 .../node_modules}/lru-cache/index.js          |    0
 .../node_modules}/lru-cache/index.mjs         |    0
 .../node_modules/lru-cache/package.json       |   70 +-
 .../node_modules/lru-cache/dist/cjs/index.js  | 1404 -----------------
 .../lru-cache/dist/cjs/index.min.js           |    2 -
 .../lru-cache/dist/cjs/package.json           |    3 -
 .../node_modules/lru-cache/dist/mjs/index.js  | 1400 ----------------
 .../lru-cache/dist/mjs/index.min.js           |    2 -
 .../lru-cache/dist/mjs/package.json           |    3 -
 .../node_modules/lru-cache/dist/cjs/index.js  | 1404 -----------------
 .../lru-cache/dist/cjs/index.min.js           |    2 -
 .../lru-cache/dist/cjs/package.json           |    3 -
 .../node_modules/lru-cache/dist/mjs/index.js  | 1400 ----------------
 .../lru-cache/dist/mjs/index.min.js           |    2 -
 .../lru-cache/dist/mjs/package.json           |    3 -
 .../node_modules/lru-cache/LICENSE            |    0
 .../node_modules/lru-cache/index.js           | 1227 ++++++++++++++
 .../node_modules/lru-cache/index.mjs          | 1227 ++++++++++++++
 .../node_modules/lru-cache/package.json       |   70 +-
 .../lru-cache/dist/cjs/index.js               |    0
 .../lru-cache/dist/cjs/index.min.js           |    0
 .../lru-cache/dist/cjs/package.json           |    0
 .../lru-cache/dist/mjs/index.js               |    0
 .../lru-cache/dist/mjs/index.min.js           |    0
 .../lru-cache/dist/mjs/package.json           |    0
 node_modules/lru-cache/package.json           |   70 +-
 .../node_modules/lru-cache/LICENSE            |    0
 .../node-gyp/node_modules/lru-cache/index.js  | 1227 ++++++++++++++
 .../node-gyp/node_modules/lru-cache/index.mjs | 1227 ++++++++++++++
 .../node_modules/lru-cache/package.json       |   70 +-
 .../node_modules/lru-cache/LICENSE            |    0
 .../node_modules/lru-cache/index.js           | 1227 ++++++++++++++
 .../node_modules/lru-cache/index.mjs          | 1227 ++++++++++++++
 .../node_modules/lru-cache/package.json       |   70 +-
 .../node_modules/lru-cache/LICENSE            |    0
 .../node_modules/lru-cache/index.js           | 1227 ++++++++++++++
 .../node_modules/lru-cache/index.mjs          | 1227 ++++++++++++++
 .../node_modules/lru-cache/package.json       |   96 ++
 .../node_modules/lru-cache/LICENSE            |   15 +
 .../node_modules/lru-cache/index.js           | 1227 ++++++++++++++
 .../node_modules/lru-cache/index.mjs          | 1227 ++++++++++++++
 .../node_modules/lru-cache/package.json       |   96 ++
 .../pacote/node_modules/lru-cache/LICENSE     |   15 +
 .../pacote/node_modules/lru-cache/index.js    | 1227 ++++++++++++++
 .../pacote/node_modules/lru-cache/index.mjs   | 1227 ++++++++++++++
 .../node_modules/lru-cache/package.json       |   96 ++
 .../node_modules/lru-cache/dist/cjs/index.js  | 1404 -----------------
 .../lru-cache/dist/cjs/index.min.js           |    2 -
 .../lru-cache/dist/cjs/package.json           |    3 -
 .../node_modules/lru-cache/dist/mjs/index.js  | 1400 ----------------
 .../lru-cache/dist/mjs/index.min.js           |    2 -
 .../lru-cache/dist/mjs/package.json           |    3 -
 .../node_modules/lru-cache/package.json       |  108 --
 .../sigstore/node_modules/lru-cache/LICENSE   |   15 +
 .../sigstore/node_modules/lru-cache/index.js  | 1227 ++++++++++++++
 .../sigstore/node_modules/lru-cache/index.mjs | 1227 ++++++++++++++
 .../node_modules/lru-cache/package.json       |   96 ++
 .../tuf-js/node_modules/lru-cache/LICENSE     |   15 +
 .../tuf-js/node_modules/lru-cache/index.js    | 1227 ++++++++++++++
 .../tuf-js/node_modules/lru-cache/index.mjs   | 1227 ++++++++++++++
 .../node_modules/lru-cache/package.json       |   96 ++
 package-lock.json                             |  142 +-
 71 files changed, 20432 insertions(+), 11621 deletions(-)
 delete mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.min.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/package.json
 delete mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.min.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/package.json
 rename node_modules/@npmcli/{agent => metavuln-calculator}/node_modules/lru-cache/LICENSE (100%)
 rename node_modules/{ => @npmcli/metavuln-calculator/node_modules}/lru-cache/index.js (100%)
 rename node_modules/{ => @npmcli/metavuln-calculator/node_modules}/lru-cache/index.mjs (100%)
 rename node_modules/@npmcli/{agent => metavuln-calculator}/node_modules/lru-cache/package.json (52%)
 delete mode 100644 node_modules/cacache/node_modules/lru-cache/dist/cjs/index.js
 delete mode 100644 node_modules/cacache/node_modules/lru-cache/dist/cjs/index.min.js
 delete mode 100644 node_modules/cacache/node_modules/lru-cache/dist/cjs/package.json
 delete mode 100644 node_modules/cacache/node_modules/lru-cache/dist/mjs/index.js
 delete mode 100644 node_modules/cacache/node_modules/lru-cache/dist/mjs/index.min.js
 delete mode 100644 node_modules/cacache/node_modules/lru-cache/dist/mjs/package.json
 delete mode 100644 node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/index.js
 delete mode 100644 node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/index.min.js
 delete mode 100644 node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/package.json
 delete mode 100644 node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/index.js
 delete mode 100644 node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/index.min.js
 delete mode 100644 node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/package.json
 rename node_modules/{@npmcli/git => init-package-json}/node_modules/lru-cache/LICENSE (100%)
 create mode 100644 node_modules/init-package-json/node_modules/lru-cache/index.js
 create mode 100644 node_modules/init-package-json/node_modules/lru-cache/index.mjs
 rename node_modules/{cacache => init-package-json}/node_modules/lru-cache/package.json (52%)
 rename node_modules/{@npmcli/agent/node_modules => }/lru-cache/dist/cjs/index.js (100%)
 rename node_modules/{@npmcli/agent/node_modules => }/lru-cache/dist/cjs/index.min.js (100%)
 rename node_modules/{@npmcli/agent/node_modules => }/lru-cache/dist/cjs/package.json (100%)
 rename node_modules/{@npmcli/agent/node_modules => }/lru-cache/dist/mjs/index.js (100%)
 rename node_modules/{@npmcli/agent/node_modules => }/lru-cache/dist/mjs/index.min.js (100%)
 rename node_modules/{@npmcli/agent/node_modules => }/lru-cache/dist/mjs/package.json (100%)
 rename node_modules/{cacache => node-gyp}/node_modules/lru-cache/LICENSE (100%)
 create mode 100644 node_modules/node-gyp/node_modules/lru-cache/index.js
 create mode 100644 node_modules/node-gyp/node_modules/lru-cache/index.mjs
 rename node_modules/{@npmcli/git => node-gyp}/node_modules/lru-cache/package.json (52%)
 rename node_modules/{hosted-git-info => normalize-package-data}/node_modules/lru-cache/LICENSE (100%)
 create mode 100644 node_modules/normalize-package-data/node_modules/lru-cache/index.js
 create mode 100644 node_modules/normalize-package-data/node_modules/lru-cache/index.mjs
 rename node_modules/{hosted-git-info => normalize-package-data}/node_modules/lru-cache/package.json (52%)
 rename node_modules/{path-scurry => npm-pick-manifest}/node_modules/lru-cache/LICENSE (100%)
 create mode 100644 node_modules/npm-pick-manifest/node_modules/lru-cache/index.js
 create mode 100644 node_modules/npm-pick-manifest/node_modules/lru-cache/index.mjs
 create mode 100644 node_modules/npm-pick-manifest/node_modules/lru-cache/package.json
 create mode 100644 node_modules/npm-registry-fetch/node_modules/lru-cache/LICENSE
 create mode 100644 node_modules/npm-registry-fetch/node_modules/lru-cache/index.js
 create mode 100644 node_modules/npm-registry-fetch/node_modules/lru-cache/index.mjs
 create mode 100644 node_modules/npm-registry-fetch/node_modules/lru-cache/package.json
 create mode 100644 node_modules/pacote/node_modules/lru-cache/LICENSE
 create mode 100644 node_modules/pacote/node_modules/lru-cache/index.js
 create mode 100644 node_modules/pacote/node_modules/lru-cache/index.mjs
 create mode 100644 node_modules/pacote/node_modules/lru-cache/package.json
 delete mode 100644 node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.js
 delete mode 100644 node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.min.js
 delete mode 100644 node_modules/path-scurry/node_modules/lru-cache/dist/cjs/package.json
 delete mode 100644 node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.js
 delete mode 100644 node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.min.js
 delete mode 100644 node_modules/path-scurry/node_modules/lru-cache/dist/mjs/package.json
 delete mode 100644 node_modules/path-scurry/node_modules/lru-cache/package.json
 create mode 100644 node_modules/sigstore/node_modules/lru-cache/LICENSE
 create mode 100644 node_modules/sigstore/node_modules/lru-cache/index.js
 create mode 100644 node_modules/sigstore/node_modules/lru-cache/index.mjs
 create mode 100644 node_modules/sigstore/node_modules/lru-cache/package.json
 create mode 100644 node_modules/tuf-js/node_modules/lru-cache/LICENSE
 create mode 100644 node_modules/tuf-js/node_modules/lru-cache/index.js
 create mode 100644 node_modules/tuf-js/node_modules/lru-cache/index.mjs
 create mode 100644 node_modules/tuf-js/node_modules/lru-cache/package.json

diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md
index 324dbb190ca34..4a2ff45175b42 100644
--- a/DEPENDENCIES.md
+++ b/DEPENDENCIES.md
@@ -524,6 +524,7 @@ graph LR;
   npm-->libnpmteam;
   npm-->libnpmversion;
   npm-->licensee;
+  npm-->lru-cache;
   npm-->make-fetch-happen;
   npm-->minimatch;
   npm-->minipass-pipeline;
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 71679c6fedcfd..e39385838321d 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -19,21 +19,16 @@
 !/@npmcli/
 /@npmcli/*
 !/@npmcli/agent
-!/@npmcli/agent/node_modules/
-/@npmcli/agent/node_modules/*
-!/@npmcli/agent/node_modules/lru-cache
 !/@npmcli/disparity-colors
 !/@npmcli/fs
 !/@npmcli/git
-!/@npmcli/git/node_modules/
-/@npmcli/git/node_modules/*
-!/@npmcli/git/node_modules/lru-cache
 !/@npmcli/installed-package-contents
 !/@npmcli/map-workspaces
 !/@npmcli/metavuln-calculator
 !/@npmcli/metavuln-calculator/node_modules/
 /@npmcli/metavuln-calculator/node_modules/*
 !/@npmcli/metavuln-calculator/node_modules/cacache
+!/@npmcli/metavuln-calculator/node_modules/lru-cache
 !/@npmcli/name-from-folder
 !/@npmcli/node-gyp
 !/@npmcli/package-json
@@ -75,9 +70,6 @@
 !/buffer
 !/builtins
 !/cacache
-!/cacache/node_modules/
-/cacache/node_modules/*
-!/cacache/node_modules/lru-cache
 !/chalk
 !/chownr
 !/ci-info
@@ -125,9 +117,6 @@
 !/has-unicode
 !/has
 !/hosted-git-info
-!/hosted-git-info/node_modules/
-/hosted-git-info/node_modules/*
-!/hosted-git-info/node_modules/lru-cache
 !/http-cache-semantics
 !/http-proxy-agent
 !/https-proxy-agent
@@ -144,6 +133,7 @@
 !/init-package-json/node_modules/
 /init-package-json/node_modules/*
 !/init-package-json/node_modules/hosted-git-info
+!/init-package-json/node_modules/lru-cache
 !/init-package-json/node_modules/npm-package-arg
 !/ip-regex
 !/ip
@@ -206,6 +196,7 @@
 !/node-gyp/node_modules/cacache/node_modules/minipass
 !/node-gyp/node_modules/gauge
 !/node-gyp/node_modules/glob
+!/node-gyp/node_modules/lru-cache
 !/node-gyp/node_modules/make-fetch-happen
 !/node-gyp/node_modules/minimatch
 !/node-gyp/node_modules/minipass
@@ -219,6 +210,7 @@
 !/normalize-package-data/node_modules/
 /normalize-package-data/node_modules/*
 !/normalize-package-data/node_modules/hosted-git-info
+!/normalize-package-data/node_modules/lru-cache
 !/npm-audit-report
 !/npm-bundled
 !/npm-install-checks
@@ -229,6 +221,7 @@
 !/npm-pick-manifest/node_modules/
 /npm-pick-manifest/node_modules/*
 !/npm-pick-manifest/node_modules/hosted-git-info
+!/npm-pick-manifest/node_modules/lru-cache
 !/npm-pick-manifest/node_modules/npm-package-arg
 !/npm-profile
 !/npm-registry-fetch
@@ -239,6 +232,7 @@
 !/npm-registry-fetch/node_modules/@npmcli/agent
 !/npm-registry-fetch/node_modules/cacache
 !/npm-registry-fetch/node_modules/hosted-git-info
+!/npm-registry-fetch/node_modules/lru-cache
 !/npm-registry-fetch/node_modules/make-fetch-happen
 !/npm-registry-fetch/node_modules/npm-package-arg
 !/npm-user-validate
@@ -253,14 +247,12 @@
 !/pacote/node_modules/@npmcli/git
 !/pacote/node_modules/cacache
 !/pacote/node_modules/hosted-git-info
+!/pacote/node_modules/lru-cache
 !/pacote/node_modules/npm-package-arg
 !/parse-conflict-json
 !/path-is-absolute
 !/path-key
 !/path-scurry
-!/path-scurry/node_modules/
-/path-scurry/node_modules/*
-!/path-scurry/node_modules/lru-cache
 !/postcss-selector-parser
 !/proc-log
 !/process
@@ -299,6 +291,7 @@
 !/sigstore/node_modules/cacache/node_modules/
 /sigstore/node_modules/cacache/node_modules/*
 !/sigstore/node_modules/cacache/node_modules/minipass
+!/sigstore/node_modules/lru-cache
 !/sigstore/node_modules/make-fetch-happen
 !/sigstore/node_modules/minipass
 !/smart-buffer
@@ -333,6 +326,7 @@
 !/tuf-js/node_modules/cacache/node_modules/
 /tuf-js/node_modules/cacache/node_modules/*
 !/tuf-js/node_modules/cacache/node_modules/minipass
+!/tuf-js/node_modules/lru-cache
 !/tuf-js/node_modules/make-fetch-happen
 !/tuf-js/node_modules/minipass
 !/unique-filename
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.js b/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.js
deleted file mode 100644
index 02d76ec800a92..0000000000000
--- a/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.js
+++ /dev/null
@@ -1,1404 +0,0 @@
-"use strict";
-/**
- * @module LRUCache
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.LRUCache = void 0;
-const perf = typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function'
-    ? performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ? process : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function'
-        ? PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-        ? Uint8Array
-        : max <= Math.pow(2, 16)
-            ? Uint16Array
-            : max <= Math.pow(2, 32)
-                ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER
-                    ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * All properties from the options object (with the exception of
- * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as
- * normal public members. (`max` and `maxBase` are read-only getters.)
- * Changing any of these will alter the defaults for subsequent method calls,
- * but is otherwise safe.
- */
-class LRUCache {
-    // properties coming in from the options of these, only max and maxSize
-    // really *need* to be protected. The rest can be modified, as they just
-    // set defaults for various methods.
-    #max;
-    #maxSize;
-    #dispose;
-    #disposeAfter;
-    #fetchMethod;
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0
-                ? ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the remaining TTL time for a given entry key
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.delete(this.#keyList[index]);
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (ttl === 0 || start === 0) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            return (ttls[index] !== 0 &&
-                starts[index] !== 0 &&
-                (cachedNow || getNow()) - starts[index] > ttls[index]);
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to Array.find().  fn is called as fn(value, key, cache).
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from
-     * most recently used to least recently used.  fn is called as
-     * fn(value, key, cache).  Does not update age or recenty of use.
-     * Does not iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.delete(this.#keyList[i]);
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to cache.load()
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     * Note that the shape of the resulting cache may be different if the
-     * same options are not used in both caches.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.delete(k);
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0
-                ? this.#tail
-                : this.#free.length !== 0
-                    ? this.#free.pop()
-                    : this.#size === this.#max
-                        ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
-                        ? oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined &&
-            (allowStale || !this.#isStale(index))) {
-            const v = this.#valList[index];
-            // either stale and allowed, or forcing a refresh of non-stale value
-            return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-        }
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.delete(k);
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.delete(k);
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.delete(k);
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.clear();
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, 'delete');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, 'delete']);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        this.#next[this.#prev[index]] = this.#next[index];
-                        this.#prev[this.#next[index]] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, 'delete');
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, 'delete']);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-exports.LRUCache = LRUCache;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.min.js b/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.min.js
deleted file mode 100644
index 8d34a03041d25..0000000000000
--- a/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=R.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#W(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};exports.LRUCache=C;
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/package.json b/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.js b/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.js
deleted file mode 100644
index 23b9754ad6c76..0000000000000
--- a/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.js
+++ /dev/null
@@ -1,1400 +0,0 @@
-/**
- * @module LRUCache
- */
-const perf = typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function'
-    ? performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ? process : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function'
-        ? PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-        ? Uint8Array
-        : max <= Math.pow(2, 16)
-            ? Uint16Array
-            : max <= Math.pow(2, 32)
-                ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER
-                    ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * All properties from the options object (with the exception of
- * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as
- * normal public members. (`max` and `maxBase` are read-only getters.)
- * Changing any of these will alter the defaults for subsequent method calls,
- * but is otherwise safe.
- */
-export class LRUCache {
-    // properties coming in from the options of these, only max and maxSize
-    // really *need* to be protected. The rest can be modified, as they just
-    // set defaults for various methods.
-    #max;
-    #maxSize;
-    #dispose;
-    #disposeAfter;
-    #fetchMethod;
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0
-                ? ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the remaining TTL time for a given entry key
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.delete(this.#keyList[index]);
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (ttl === 0 || start === 0) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            return (ttls[index] !== 0 &&
-                starts[index] !== 0 &&
-                (cachedNow || getNow()) - starts[index] > ttls[index]);
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to Array.find().  fn is called as fn(value, key, cache).
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from
-     * most recently used to least recently used.  fn is called as
-     * fn(value, key, cache).  Does not update age or recenty of use.
-     * Does not iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.delete(this.#keyList[i]);
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to cache.load()
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     * Note that the shape of the resulting cache may be different if the
-     * same options are not used in both caches.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.delete(k);
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0
-                ? this.#tail
-                : this.#free.length !== 0
-                    ? this.#free.pop()
-                    : this.#size === this.#max
-                        ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
-                        ? oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined &&
-            (allowStale || !this.#isStale(index))) {
-            const v = this.#valList[index];
-            // either stale and allowed, or forcing a refresh of non-stale value
-            return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-        }
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.delete(k);
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.delete(k);
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.delete(k);
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.clear();
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, 'delete');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, 'delete']);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        this.#next[this.#prev[index]] = this.#next[index];
-                        this.#prev[this.#next[index]] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, 'delete');
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, 'delete']);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.min.js b/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.min.js
deleted file mode 100644
index 5a16b3940d6df..0000000000000
--- a/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var R=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=C.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,R))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#W=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#R(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#W=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#R(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#W(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#W(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#R(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#R(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};export{R as LRUCache};
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/package.json b/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/LICENSE
similarity index 100%
rename from node_modules/@npmcli/agent/node_modules/lru-cache/LICENSE
rename to node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/LICENSE
diff --git a/node_modules/lru-cache/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/index.js
similarity index 100%
rename from node_modules/lru-cache/index.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/index.js
diff --git a/node_modules/lru-cache/index.mjs b/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/index.mjs
similarity index 100%
rename from node_modules/lru-cache/index.mjs
rename to node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/index.mjs
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/package.json
similarity index 52%
rename from node_modules/@npmcli/agent/node_modules/lru-cache/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/package.json
index bae4a04839d1f..9684991727e7a 100644
--- a/node_modules/@npmcli/agent/node_modules/lru-cache/package.json
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/package.json
@@ -1,7 +1,7 @@
 {
   "name": "lru-cache",
   "description": "A cache object that deletes the least-recently-used items.",
-  "version": "10.0.1",
+  "version": "7.18.3",
   "author": "Isaac Z. Schlueter ",
   "keywords": [
     "mru",
@@ -11,74 +11,60 @@
   "sideEffects": false,
   "scripts": {
     "build": "npm run prepare",
-    "preprepare": "rm -rf dist",
-    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
-    "postprepare": "bash fixup.sh",
     "pretest": "npm run prepare",
     "presnap": "npm run prepare",
-    "test": "c8 tap",
-    "snap": "c8 tap",
+    "prepare": "node ./scripts/transpile-to-esm.js",
+    "size": "size-limit",
+    "test": "tap",
+    "snap": "tap",
     "preversion": "npm test",
     "postversion": "npm publish",
     "prepublishOnly": "git push origin --follow-tags",
     "format": "prettier --write .",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts",
-    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
-    "prebenchmark": "npm run prepare",
-    "benchmark": "make -C benchmark",
-    "preprofile": "npm run prepare",
-    "profile": "make -C benchmark profile"
+    "typedoc": "typedoc ./index.d.ts"
   },
-  "main": "./dist/cjs/index.js",
-  "module": "./dist/mjs/index.js",
+  "type": "commonjs",
+  "main": "./index.js",
+  "module": "./index.mjs",
+  "types": "./index.d.ts",
   "exports": {
-    "./min": {
-      "import": {
-        "types": "./dist/mjs/index.d.ts",
-        "default": "./dist/mjs/index.min.js"
-      },
-      "require": {
-        "types": "./dist/cjs/index.d.ts",
-        "default": "./dist/cjs/index.min.js"
-      }
-    },
     ".": {
       "import": {
-        "types": "./dist/mjs/index.d.ts",
-        "default": "./dist/mjs/index.js"
+        "types": "./index.d.ts",
+        "default": "./index.mjs"
       },
       "require": {
-        "types": "./dist/cjs/index.d.ts",
-        "default": "./dist/cjs/index.js"
+        "types": "./index.d.ts",
+        "default": "./index.js"
       }
-    }
+    },
+    "./package.json": "./package.json"
   },
   "repository": "git://github.com/isaacs/node-lru-cache.git",
   "devDependencies": {
     "@size-limit/preset-small-lib": "^7.0.8",
-    "@types/node": "^20.2.5",
+    "@types/node": "^17.0.31",
     "@types/tap": "^15.0.6",
     "benchmark": "^2.1.4",
     "c8": "^7.11.2",
     "clock-mock": "^1.0.6",
-    "esbuild": "^0.17.11",
     "eslint-config-prettier": "^8.5.0",
-    "marked": "^4.2.12",
-    "mkdirp": "^2.1.5",
     "prettier": "^2.6.2",
     "size-limit": "^7.0.8",
     "tap": "^16.3.4",
-    "ts-node": "^10.9.1",
+    "ts-node": "^10.7.0",
     "tslib": "^2.4.0",
-    "typedoc": "^0.24.6",
-    "typescript": "^5.0.4"
+    "typedoc": "^0.23.24",
+    "typescript": "^4.6.4"
   },
   "license": "ISC",
   "files": [
-    "dist"
+    "index.js",
+    "index.mjs",
+    "index.d.ts"
   ],
   "engines": {
-    "node": "14 || >=16.14"
+    "node": ">=12"
   },
   "prettier": {
     "semi": false,
@@ -92,17 +78,19 @@
     "endOfLine": "lf"
   },
   "tap": {
-    "coverage": false,
+    "nyc-arg": [
+      "--include=index.js"
+    ],
     "node-arg": [
       "--expose-gc",
-      "-r",
+      "--require",
       "ts-node/register"
     ],
     "ts": false
   },
   "size-limit": [
     {
-      "path": "./dist/mjs/index.js"
+      "path": "./index.js"
     }
   ]
 }
diff --git a/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.js b/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.js
deleted file mode 100644
index 02d76ec800a92..0000000000000
--- a/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.js
+++ /dev/null
@@ -1,1404 +0,0 @@
-"use strict";
-/**
- * @module LRUCache
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.LRUCache = void 0;
-const perf = typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function'
-    ? performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ? process : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function'
-        ? PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-        ? Uint8Array
-        : max <= Math.pow(2, 16)
-            ? Uint16Array
-            : max <= Math.pow(2, 32)
-                ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER
-                    ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * All properties from the options object (with the exception of
- * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as
- * normal public members. (`max` and `maxBase` are read-only getters.)
- * Changing any of these will alter the defaults for subsequent method calls,
- * but is otherwise safe.
- */
-class LRUCache {
-    // properties coming in from the options of these, only max and maxSize
-    // really *need* to be protected. The rest can be modified, as they just
-    // set defaults for various methods.
-    #max;
-    #maxSize;
-    #dispose;
-    #disposeAfter;
-    #fetchMethod;
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0
-                ? ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the remaining TTL time for a given entry key
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.delete(this.#keyList[index]);
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (ttl === 0 || start === 0) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            return (ttls[index] !== 0 &&
-                starts[index] !== 0 &&
-                (cachedNow || getNow()) - starts[index] > ttls[index]);
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to Array.find().  fn is called as fn(value, key, cache).
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from
-     * most recently used to least recently used.  fn is called as
-     * fn(value, key, cache).  Does not update age or recenty of use.
-     * Does not iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.delete(this.#keyList[i]);
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to cache.load()
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     * Note that the shape of the resulting cache may be different if the
-     * same options are not used in both caches.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.delete(k);
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0
-                ? this.#tail
-                : this.#free.length !== 0
-                    ? this.#free.pop()
-                    : this.#size === this.#max
-                        ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
-                        ? oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined &&
-            (allowStale || !this.#isStale(index))) {
-            const v = this.#valList[index];
-            // either stale and allowed, or forcing a refresh of non-stale value
-            return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-        }
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.delete(k);
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.delete(k);
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.delete(k);
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.clear();
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, 'delete');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, 'delete']);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        this.#next[this.#prev[index]] = this.#next[index];
-                        this.#prev[this.#next[index]] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, 'delete');
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, 'delete']);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-exports.LRUCache = LRUCache;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.min.js b/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.min.js
deleted file mode 100644
index 8d34a03041d25..0000000000000
--- a/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=R.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#W(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};exports.LRUCache=C;
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/cacache/node_modules/lru-cache/dist/cjs/package.json b/node_modules/cacache/node_modules/lru-cache/dist/cjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/cacache/node_modules/lru-cache/dist/cjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.js b/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.js
deleted file mode 100644
index 23b9754ad6c76..0000000000000
--- a/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.js
+++ /dev/null
@@ -1,1400 +0,0 @@
-/**
- * @module LRUCache
- */
-const perf = typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function'
-    ? performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ? process : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function'
-        ? PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-        ? Uint8Array
-        : max <= Math.pow(2, 16)
-            ? Uint16Array
-            : max <= Math.pow(2, 32)
-                ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER
-                    ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * All properties from the options object (with the exception of
- * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as
- * normal public members. (`max` and `maxBase` are read-only getters.)
- * Changing any of these will alter the defaults for subsequent method calls,
- * but is otherwise safe.
- */
-export class LRUCache {
-    // properties coming in from the options of these, only max and maxSize
-    // really *need* to be protected. The rest can be modified, as they just
-    // set defaults for various methods.
-    #max;
-    #maxSize;
-    #dispose;
-    #disposeAfter;
-    #fetchMethod;
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0
-                ? ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the remaining TTL time for a given entry key
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.delete(this.#keyList[index]);
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (ttl === 0 || start === 0) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            return (ttls[index] !== 0 &&
-                starts[index] !== 0 &&
-                (cachedNow || getNow()) - starts[index] > ttls[index]);
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to Array.find().  fn is called as fn(value, key, cache).
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from
-     * most recently used to least recently used.  fn is called as
-     * fn(value, key, cache).  Does not update age or recenty of use.
-     * Does not iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.delete(this.#keyList[i]);
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to cache.load()
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     * Note that the shape of the resulting cache may be different if the
-     * same options are not used in both caches.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.delete(k);
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0
-                ? this.#tail
-                : this.#free.length !== 0
-                    ? this.#free.pop()
-                    : this.#size === this.#max
-                        ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
-                        ? oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined &&
-            (allowStale || !this.#isStale(index))) {
-            const v = this.#valList[index];
-            // either stale and allowed, or forcing a refresh of non-stale value
-            return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-        }
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.delete(k);
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.delete(k);
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.delete(k);
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.clear();
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, 'delete');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, 'delete']);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        this.#next[this.#prev[index]] = this.#next[index];
-                        this.#prev[this.#next[index]] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, 'delete');
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, 'delete']);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.min.js b/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.min.js
deleted file mode 100644
index 5a16b3940d6df..0000000000000
--- a/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var R=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=C.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,R))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#W=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#R(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#W=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#R(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#W(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#W(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#R(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#R(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};export{R as LRUCache};
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/cacache/node_modules/lru-cache/dist/mjs/package.json b/node_modules/cacache/node_modules/lru-cache/dist/mjs/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/cacache/node_modules/lru-cache/dist/mjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/index.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/index.js
deleted file mode 100644
index 02d76ec800a92..0000000000000
--- a/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/index.js
+++ /dev/null
@@ -1,1404 +0,0 @@
-"use strict";
-/**
- * @module LRUCache
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.LRUCache = void 0;
-const perf = typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function'
-    ? performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ? process : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function'
-        ? PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-        ? Uint8Array
-        : max <= Math.pow(2, 16)
-            ? Uint16Array
-            : max <= Math.pow(2, 32)
-                ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER
-                    ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * All properties from the options object (with the exception of
- * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as
- * normal public members. (`max` and `maxBase` are read-only getters.)
- * Changing any of these will alter the defaults for subsequent method calls,
- * but is otherwise safe.
- */
-class LRUCache {
-    // properties coming in from the options of these, only max and maxSize
-    // really *need* to be protected. The rest can be modified, as they just
-    // set defaults for various methods.
-    #max;
-    #maxSize;
-    #dispose;
-    #disposeAfter;
-    #fetchMethod;
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0
-                ? ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the remaining TTL time for a given entry key
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.delete(this.#keyList[index]);
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (ttl === 0 || start === 0) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            return (ttls[index] !== 0 &&
-                starts[index] !== 0 &&
-                (cachedNow || getNow()) - starts[index] > ttls[index]);
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to Array.find().  fn is called as fn(value, key, cache).
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from
-     * most recently used to least recently used.  fn is called as
-     * fn(value, key, cache).  Does not update age or recenty of use.
-     * Does not iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.delete(this.#keyList[i]);
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to cache.load()
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     * Note that the shape of the resulting cache may be different if the
-     * same options are not used in both caches.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.delete(k);
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0
-                ? this.#tail
-                : this.#free.length !== 0
-                    ? this.#free.pop()
-                    : this.#size === this.#max
-                        ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
-                        ? oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined &&
-            (allowStale || !this.#isStale(index))) {
-            const v = this.#valList[index];
-            // either stale and allowed, or forcing a refresh of non-stale value
-            return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-        }
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.delete(k);
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.delete(k);
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.delete(k);
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.clear();
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, 'delete');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, 'delete']);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        this.#next[this.#prev[index]] = this.#next[index];
-                        this.#prev[this.#next[index]] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, 'delete');
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, 'delete']);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-exports.LRUCache = LRUCache;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/index.min.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/index.min.js
deleted file mode 100644
index 8d34a03041d25..0000000000000
--- a/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=R.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#W(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};exports.LRUCache=C;
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/package.json b/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/index.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/index.js
deleted file mode 100644
index 23b9754ad6c76..0000000000000
--- a/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/index.js
+++ /dev/null
@@ -1,1400 +0,0 @@
-/**
- * @module LRUCache
- */
-const perf = typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function'
-    ? performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ? process : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function'
-        ? PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-        ? Uint8Array
-        : max <= Math.pow(2, 16)
-            ? Uint16Array
-            : max <= Math.pow(2, 32)
-                ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER
-                    ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * All properties from the options object (with the exception of
- * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as
- * normal public members. (`max` and `maxBase` are read-only getters.)
- * Changing any of these will alter the defaults for subsequent method calls,
- * but is otherwise safe.
- */
-export class LRUCache {
-    // properties coming in from the options of these, only max and maxSize
-    // really *need* to be protected. The rest can be modified, as they just
-    // set defaults for various methods.
-    #max;
-    #maxSize;
-    #dispose;
-    #disposeAfter;
-    #fetchMethod;
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0
-                ? ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the remaining TTL time for a given entry key
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.delete(this.#keyList[index]);
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (ttl === 0 || start === 0) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            return (ttls[index] !== 0 &&
-                starts[index] !== 0 &&
-                (cachedNow || getNow()) - starts[index] > ttls[index]);
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to Array.find().  fn is called as fn(value, key, cache).
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from
-     * most recently used to least recently used.  fn is called as
-     * fn(value, key, cache).  Does not update age or recenty of use.
-     * Does not iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.delete(this.#keyList[i]);
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to cache.load()
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     * Note that the shape of the resulting cache may be different if the
-     * same options are not used in both caches.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.delete(k);
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0
-                ? this.#tail
-                : this.#free.length !== 0
-                    ? this.#free.pop()
-                    : this.#size === this.#max
-                        ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
-                        ? oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined &&
-            (allowStale || !this.#isStale(index))) {
-            const v = this.#valList[index];
-            // either stale and allowed, or forcing a refresh of non-stale value
-            return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-        }
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.delete(k);
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.delete(k);
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.delete(k);
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.clear();
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, 'delete');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, 'delete']);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        this.#next[this.#prev[index]] = this.#next[index];
-                        this.#prev[this.#next[index]] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, 'delete');
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, 'delete']);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/index.min.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/index.min.js
deleted file mode 100644
index 5a16b3940d6df..0000000000000
--- a/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var R=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=C.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,R))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#W=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#R(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#W=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#R(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#W(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#W(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#R(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#R(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};export{R as LRUCache};
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/package.json b/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/LICENSE b/node_modules/init-package-json/node_modules/lru-cache/LICENSE
similarity index 100%
rename from node_modules/@npmcli/git/node_modules/lru-cache/LICENSE
rename to node_modules/init-package-json/node_modules/lru-cache/LICENSE
diff --git a/node_modules/init-package-json/node_modules/lru-cache/index.js b/node_modules/init-package-json/node_modules/lru-cache/index.js
new file mode 100644
index 0000000000000..48e99fe5e5a70
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/lru-cache/index.js
@@ -0,0 +1,1227 @@
+const perf =
+  typeof performance === 'object' &&
+  performance &&
+  typeof performance.now === 'function'
+    ? performance
+    : Date
+
+const hasAbortController = typeof AbortController === 'function'
+
+// minimal backwards-compatibility polyfill
+// this doesn't have nearly all the checks and whatnot that
+// actual AbortController/Signal has, but it's enough for
+// our purposes, and if used properly, behaves the same.
+const AC = hasAbortController
+  ? AbortController
+  : class AbortController {
+      constructor() {
+        this.signal = new AS()
+      }
+      abort(reason = new Error('This operation was aborted')) {
+        this.signal.reason = this.signal.reason || reason
+        this.signal.aborted = true
+        this.signal.dispatchEvent({
+          type: 'abort',
+          target: this.signal,
+        })
+      }
+    }
+
+const hasAbortSignal = typeof AbortSignal === 'function'
+// Some polyfills put this on the AC class, not global
+const hasACAbortSignal = typeof AC.AbortSignal === 'function'
+const AS = hasAbortSignal
+  ? AbortSignal
+  : hasACAbortSignal
+  ? AC.AbortController
+  : class AbortSignal {
+      constructor() {
+        this.reason = undefined
+        this.aborted = false
+        this._listeners = []
+      }
+      dispatchEvent(e) {
+        if (e.type === 'abort') {
+          this.aborted = true
+          this.onabort(e)
+          this._listeners.forEach(f => f(e), this)
+        }
+      }
+      onabort() {}
+      addEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners.push(fn)
+        }
+      }
+      removeEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners = this._listeners.filter(f => f !== fn)
+        }
+      }
+    }
+
+const warned = new Set()
+const deprecatedOption = (opt, instead) => {
+  const code = `LRU_CACHE_OPTION_${opt}`
+  if (shouldWarn(code)) {
+    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
+  }
+}
+const deprecatedMethod = (method, instead) => {
+  const code = `LRU_CACHE_METHOD_${method}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
+    warn(code, `${method} method`, `cache.${instead}()`, get)
+  }
+}
+const deprecatedProperty = (field, instead) => {
+  const code = `LRU_CACHE_PROPERTY_${field}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
+    warn(code, `${field} property`, `cache.${instead}`, get)
+  }
+}
+
+const emitWarning = (...a) => {
+  typeof process === 'object' &&
+  process &&
+  typeof process.emitWarning === 'function'
+    ? process.emitWarning(...a)
+    : console.error(...a)
+}
+
+const shouldWarn = code => !warned.has(code)
+
+const warn = (code, what, instead, fn) => {
+  warned.add(code)
+  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
+  emitWarning(msg, 'DeprecationWarning', code, fn)
+}
+
+const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
+
+/* istanbul ignore next - This is a little bit ridiculous, tbh.
+ * The maximum array length is 2^32-1 or thereabouts on most JS impls.
+ * And well before that point, you're caching the entire world, I mean,
+ * that's ~32GB of just integers for the next/prev links, plus whatever
+ * else to hold that many keys and values.  Just filling the memory with
+ * zeroes at init time is brutal when you get that big.
+ * But why not be complete?
+ * Maybe in the future, these limits will have expanded. */
+const getUintArray = max =>
+  !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+    ? Uint8Array
+    : max <= Math.pow(2, 16)
+    ? Uint16Array
+    : max <= Math.pow(2, 32)
+    ? Uint32Array
+    : max <= Number.MAX_SAFE_INTEGER
+    ? ZeroArray
+    : null
+
+class ZeroArray extends Array {
+  constructor(size) {
+    super(size)
+    this.fill(0)
+  }
+}
+
+class Stack {
+  constructor(max) {
+    if (max === 0) {
+      return []
+    }
+    const UintArray = getUintArray(max)
+    this.heap = new UintArray(max)
+    this.length = 0
+  }
+  push(n) {
+    this.heap[this.length++] = n
+  }
+  pop() {
+    return this.heap[--this.length]
+  }
+}
+
+class LRUCache {
+  constructor(options = {}) {
+    const {
+      max = 0,
+      ttl,
+      ttlResolution = 1,
+      ttlAutopurge,
+      updateAgeOnGet,
+      updateAgeOnHas,
+      allowStale,
+      dispose,
+      disposeAfter,
+      noDisposeOnSet,
+      noUpdateTTL,
+      maxSize = 0,
+      maxEntrySize = 0,
+      sizeCalculation,
+      fetchMethod,
+      fetchContext,
+      noDeleteOnFetchRejection,
+      noDeleteOnStaleGet,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+    } = options
+
+    // deprecated options, don't trigger a warning for getting them if
+    // the thing being passed in is another LRUCache we're copying.
+    const { length, maxAge, stale } =
+      options instanceof LRUCache ? {} : options
+
+    if (max !== 0 && !isPosInt(max)) {
+      throw new TypeError('max option must be a nonnegative integer')
+    }
+
+    const UintArray = max ? getUintArray(max) : Array
+    if (!UintArray) {
+      throw new Error('invalid max value: ' + max)
+    }
+
+    this.max = max
+    this.maxSize = maxSize
+    this.maxEntrySize = maxEntrySize || this.maxSize
+    this.sizeCalculation = sizeCalculation || length
+    if (this.sizeCalculation) {
+      if (!this.maxSize && !this.maxEntrySize) {
+        throw new TypeError(
+          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
+        )
+      }
+      if (typeof this.sizeCalculation !== 'function') {
+        throw new TypeError('sizeCalculation set to non-function')
+      }
+    }
+
+    this.fetchMethod = fetchMethod || null
+    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
+      throw new TypeError(
+        'fetchMethod must be a function if specified'
+      )
+    }
+
+    this.fetchContext = fetchContext
+    if (!this.fetchMethod && fetchContext !== undefined) {
+      throw new TypeError(
+        'cannot set fetchContext without fetchMethod'
+      )
+    }
+
+    this.keyMap = new Map()
+    this.keyList = new Array(max).fill(null)
+    this.valList = new Array(max).fill(null)
+    this.next = new UintArray(max)
+    this.prev = new UintArray(max)
+    this.head = 0
+    this.tail = 0
+    this.free = new Stack(max)
+    this.initialFill = 1
+    this.size = 0
+
+    if (typeof dispose === 'function') {
+      this.dispose = dispose
+    }
+    if (typeof disposeAfter === 'function') {
+      this.disposeAfter = disposeAfter
+      this.disposed = []
+    } else {
+      this.disposeAfter = null
+      this.disposed = null
+    }
+    this.noDisposeOnSet = !!noDisposeOnSet
+    this.noUpdateTTL = !!noUpdateTTL
+    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
+    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
+    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
+    this.ignoreFetchAbort = !!ignoreFetchAbort
+
+    // NB: maxEntrySize is set to maxSize if it's set
+    if (this.maxEntrySize !== 0) {
+      if (this.maxSize !== 0) {
+        if (!isPosInt(this.maxSize)) {
+          throw new TypeError(
+            'maxSize must be a positive integer if specified'
+          )
+        }
+      }
+      if (!isPosInt(this.maxEntrySize)) {
+        throw new TypeError(
+          'maxEntrySize must be a positive integer if specified'
+        )
+      }
+      this.initializeSizeTracking()
+    }
+
+    this.allowStale = !!allowStale || !!stale
+    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
+    this.updateAgeOnGet = !!updateAgeOnGet
+    this.updateAgeOnHas = !!updateAgeOnHas
+    this.ttlResolution =
+      isPosInt(ttlResolution) || ttlResolution === 0
+        ? ttlResolution
+        : 1
+    this.ttlAutopurge = !!ttlAutopurge
+    this.ttl = ttl || maxAge || 0
+    if (this.ttl) {
+      if (!isPosInt(this.ttl)) {
+        throw new TypeError(
+          'ttl must be a positive integer if specified'
+        )
+      }
+      this.initializeTTLTracking()
+    }
+
+    // do not allow completely unbounded caches
+    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
+      throw new TypeError(
+        'At least one of max, maxSize, or ttl is required'
+      )
+    }
+    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
+      const code = 'LRU_CACHE_UNBOUNDED'
+      if (shouldWarn(code)) {
+        warned.add(code)
+        const msg =
+          'TTL caching without ttlAutopurge, max, or maxSize can ' +
+          'result in unbounded memory consumption.'
+        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
+      }
+    }
+
+    if (stale) {
+      deprecatedOption('stale', 'allowStale')
+    }
+    if (maxAge) {
+      deprecatedOption('maxAge', 'ttl')
+    }
+    if (length) {
+      deprecatedOption('length', 'sizeCalculation')
+    }
+  }
+
+  getRemainingTTL(key) {
+    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
+  }
+
+  initializeTTLTracking() {
+    this.ttls = new ZeroArray(this.max)
+    this.starts = new ZeroArray(this.max)
+
+    this.setItemTTL = (index, ttl, start = perf.now()) => {
+      this.starts[index] = ttl !== 0 ? start : 0
+      this.ttls[index] = ttl
+      if (ttl !== 0 && this.ttlAutopurge) {
+        const t = setTimeout(() => {
+          if (this.isStale(index)) {
+            this.delete(this.keyList[index])
+          }
+        }, ttl + 1)
+        /* istanbul ignore else - unref() not supported on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+    }
+
+    this.updateItemAge = index => {
+      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
+    }
+
+    this.statusTTL = (status, index) => {
+      if (status) {
+        status.ttl = this.ttls[index]
+        status.start = this.starts[index]
+        status.now = cachedNow || getNow()
+        status.remainingTTL = status.now + status.ttl - status.start
+      }
+    }
+
+    // debounce calls to perf.now() to 1s so we're not hitting
+    // that costly call repeatedly.
+    let cachedNow = 0
+    const getNow = () => {
+      const n = perf.now()
+      if (this.ttlResolution > 0) {
+        cachedNow = n
+        const t = setTimeout(
+          () => (cachedNow = 0),
+          this.ttlResolution
+        )
+        /* istanbul ignore else - not available on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+      return n
+    }
+
+    this.getRemainingTTL = key => {
+      const index = this.keyMap.get(key)
+      if (index === undefined) {
+        return 0
+      }
+      return this.ttls[index] === 0 || this.starts[index] === 0
+        ? Infinity
+        : this.starts[index] +
+            this.ttls[index] -
+            (cachedNow || getNow())
+    }
+
+    this.isStale = index => {
+      return (
+        this.ttls[index] !== 0 &&
+        this.starts[index] !== 0 &&
+        (cachedNow || getNow()) - this.starts[index] >
+          this.ttls[index]
+      )
+    }
+  }
+  updateItemAge(_index) {}
+  statusTTL(_status, _index) {}
+  setItemTTL(_index, _ttl, _start) {}
+  isStale(_index) {
+    return false
+  }
+
+  initializeSizeTracking() {
+    this.calculatedSize = 0
+    this.sizes = new ZeroArray(this.max)
+    this.removeItemSize = index => {
+      this.calculatedSize -= this.sizes[index]
+      this.sizes[index] = 0
+    }
+    this.requireSize = (k, v, size, sizeCalculation) => {
+      // provisionally accept background fetches.
+      // actual value size will be checked when they return.
+      if (this.isBackgroundFetch(v)) {
+        return 0
+      }
+      if (!isPosInt(size)) {
+        if (sizeCalculation) {
+          if (typeof sizeCalculation !== 'function') {
+            throw new TypeError('sizeCalculation must be a function')
+          }
+          size = sizeCalculation(v, k)
+          if (!isPosInt(size)) {
+            throw new TypeError(
+              'sizeCalculation return invalid (expect positive integer)'
+            )
+          }
+        } else {
+          throw new TypeError(
+            'invalid size value (must be positive integer). ' +
+              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
+              'must be set.'
+          )
+        }
+      }
+      return size
+    }
+    this.addItemSize = (index, size, status) => {
+      this.sizes[index] = size
+      if (this.maxSize) {
+        const maxSize = this.maxSize - this.sizes[index]
+        while (this.calculatedSize > maxSize) {
+          this.evict(true)
+        }
+      }
+      this.calculatedSize += this.sizes[index]
+      if (status) {
+        status.entrySize = size
+        status.totalCalculatedSize = this.calculatedSize
+      }
+    }
+  }
+  removeItemSize(_index) {}
+  addItemSize(_index, _size) {}
+  requireSize(_k, _v, size, sizeCalculation) {
+    if (size || sizeCalculation) {
+      throw new TypeError(
+        'cannot set size without setting maxSize or maxEntrySize on cache'
+      )
+    }
+  }
+
+  *indexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.tail; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.head) {
+          break
+        } else {
+          i = this.prev[i]
+        }
+      }
+    }
+  }
+
+  *rindexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.head; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.tail) {
+          break
+        } else {
+          i = this.next[i]
+        }
+      }
+    }
+  }
+
+  isValidIndex(index) {
+    return (
+      index !== undefined &&
+      this.keyMap.get(this.keyList[index]) === index
+    )
+  }
+
+  *entries() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+  *rentries() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+
+  *keys() {
+    for (const i of this.indexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+  *rkeys() {
+    for (const i of this.rindexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+
+  *values() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+  *rvalues() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+
+  [Symbol.iterator]() {
+    return this.entries()
+  }
+
+  find(fn, getOptions) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      if (fn(value, this.keyList[i], this)) {
+        return this.get(this.keyList[i], getOptions)
+      }
+    }
+  }
+
+  forEach(fn, thisp = this) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  rforEach(fn, thisp = this) {
+    for (const i of this.rindexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  get prune() {
+    deprecatedMethod('prune', 'purgeStale')
+    return this.purgeStale
+  }
+
+  purgeStale() {
+    let deleted = false
+    for (const i of this.rindexes({ allowStale: true })) {
+      if (this.isStale(i)) {
+        this.delete(this.keyList[i])
+        deleted = true
+      }
+    }
+    return deleted
+  }
+
+  dump() {
+    const arr = []
+    for (const i of this.indexes({ allowStale: true })) {
+      const key = this.keyList[i]
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      const entry = { value }
+      if (this.ttls) {
+        entry.ttl = this.ttls[i]
+        // always dump the start relative to a portable timestamp
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = perf.now() - this.starts[i]
+        entry.start = Math.floor(Date.now() - age)
+      }
+      if (this.sizes) {
+        entry.size = this.sizes[i]
+      }
+      arr.unshift([key, entry])
+    }
+    return arr
+  }
+
+  load(arr) {
+    this.clear()
+    for (const [key, entry] of arr) {
+      if (entry.start) {
+        // entry.start is a portable timestamp, but we may be using
+        // node's performance.now(), so calculate the offset.
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = Date.now() - entry.start
+        entry.start = perf.now() - age
+      }
+      this.set(key, entry.value, entry)
+    }
+  }
+
+  dispose(_v, _k, _reason) {}
+
+  set(
+    k,
+    v,
+    {
+      ttl = this.ttl,
+      start,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      status,
+    } = {}
+  ) {
+    size = this.requireSize(k, v, size, sizeCalculation)
+    // if the item doesn't fit, don't do anything
+    // NB: maxEntrySize set to maxSize by default
+    if (this.maxEntrySize && size > this.maxEntrySize) {
+      if (status) {
+        status.set = 'miss'
+        status.maxEntrySizeExceeded = true
+      }
+      // have to delete, in case a background fetch is there already.
+      // in non-async cases, this is a no-op
+      this.delete(k)
+      return this
+    }
+    let index = this.size === 0 ? undefined : this.keyMap.get(k)
+    if (index === undefined) {
+      // addition
+      index = this.newIndex()
+      this.keyList[index] = k
+      this.valList[index] = v
+      this.keyMap.set(k, index)
+      this.next[this.tail] = index
+      this.prev[index] = this.tail
+      this.tail = index
+      this.size++
+      this.addItemSize(index, size, status)
+      if (status) {
+        status.set = 'add'
+      }
+      noUpdateTTL = false
+    } else {
+      // update
+      this.moveToTail(index)
+      const oldVal = this.valList[index]
+      if (v !== oldVal) {
+        if (this.isBackgroundFetch(oldVal)) {
+          oldVal.__abortController.abort(new Error('replaced'))
+        } else {
+          if (!noDisposeOnSet) {
+            this.dispose(oldVal, k, 'set')
+            if (this.disposeAfter) {
+              this.disposed.push([oldVal, k, 'set'])
+            }
+          }
+        }
+        this.removeItemSize(index)
+        this.valList[index] = v
+        this.addItemSize(index, size, status)
+        if (status) {
+          status.set = 'replace'
+          const oldValue =
+            oldVal && this.isBackgroundFetch(oldVal)
+              ? oldVal.__staleWhileFetching
+              : oldVal
+          if (oldValue !== undefined) status.oldValue = oldValue
+        }
+      } else if (status) {
+        status.set = 'update'
+      }
+    }
+    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
+      this.initializeTTLTracking()
+    }
+    if (!noUpdateTTL) {
+      this.setItemTTL(index, ttl, start)
+    }
+    this.statusTTL(status, index)
+    if (this.disposeAfter) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return this
+  }
+
+  newIndex() {
+    if (this.size === 0) {
+      return this.tail
+    }
+    if (this.size === this.max && this.max !== 0) {
+      return this.evict(false)
+    }
+    if (this.free.length !== 0) {
+      return this.free.pop()
+    }
+    // initial fill, just keep writing down the list
+    return this.initialFill++
+  }
+
+  pop() {
+    if (this.size) {
+      const val = this.valList[this.head]
+      this.evict(true)
+      return val
+    }
+  }
+
+  evict(free) {
+    const head = this.head
+    const k = this.keyList[head]
+    const v = this.valList[head]
+    if (this.isBackgroundFetch(v)) {
+      v.__abortController.abort(new Error('evicted'))
+    } else {
+      this.dispose(v, k, 'evict')
+      if (this.disposeAfter) {
+        this.disposed.push([v, k, 'evict'])
+      }
+    }
+    this.removeItemSize(head)
+    // if we aren't about to use the index, then null these out
+    if (free) {
+      this.keyList[head] = null
+      this.valList[head] = null
+      this.free.push(head)
+    }
+    this.head = this.next[head]
+    this.keyMap.delete(k)
+    this.size--
+    return head
+  }
+
+  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      if (!this.isStale(index)) {
+        if (updateAgeOnHas) {
+          this.updateItemAge(index)
+        }
+        if (status) status.has = 'hit'
+        this.statusTTL(status, index)
+        return true
+      } else if (status) {
+        status.has = 'stale'
+        this.statusTTL(status, index)
+      }
+    } else if (status) {
+      status.has = 'miss'
+    }
+    return false
+  }
+
+  // like get(), but without any LRU updating or TTL expiration
+  peek(k, { allowStale = this.allowStale } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined && (allowStale || !this.isStale(index))) {
+      const v = this.valList[index]
+      // either stale and allowed, or forcing a refresh of non-stale value
+      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
+    }
+  }
+
+  backgroundFetch(k, index, options, context) {
+    const v = index === undefined ? undefined : this.valList[index]
+    if (this.isBackgroundFetch(v)) {
+      return v
+    }
+    const ac = new AC()
+    if (options.signal) {
+      options.signal.addEventListener('abort', () =>
+        ac.abort(options.signal.reason)
+      )
+    }
+    const fetchOpts = {
+      signal: ac.signal,
+      options,
+      context,
+    }
+    const cb = (v, updateCache = false) => {
+      const { aborted } = ac.signal
+      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
+      if (options.status) {
+        if (aborted && !updateCache) {
+          options.status.fetchAborted = true
+          options.status.fetchError = ac.signal.reason
+          if (ignoreAbort) options.status.fetchAbortIgnored = true
+        } else {
+          options.status.fetchResolved = true
+        }
+      }
+      if (aborted && !ignoreAbort && !updateCache) {
+        return fetchFail(ac.signal.reason)
+      }
+      // either we didn't abort, and are still here, or we did, and ignored
+      if (this.valList[index] === p) {
+        if (v === undefined) {
+          if (p.__staleWhileFetching) {
+            this.valList[index] = p.__staleWhileFetching
+          } else {
+            this.delete(k)
+          }
+        } else {
+          if (options.status) options.status.fetchUpdated = true
+          this.set(k, v, fetchOpts.options)
+        }
+      }
+      return v
+    }
+    const eb = er => {
+      if (options.status) {
+        options.status.fetchRejected = true
+        options.status.fetchError = er
+      }
+      return fetchFail(er)
+    }
+    const fetchFail = er => {
+      const { aborted } = ac.signal
+      const allowStaleAborted =
+        aborted && options.allowStaleOnFetchAbort
+      const allowStale =
+        allowStaleAborted || options.allowStaleOnFetchRejection
+      const noDelete = allowStale || options.noDeleteOnFetchRejection
+      if (this.valList[index] === p) {
+        // if we allow stale on fetch rejections, then we need to ensure that
+        // the stale value is not removed from the cache when the fetch fails.
+        const del = !noDelete || p.__staleWhileFetching === undefined
+        if (del) {
+          this.delete(k)
+        } else if (!allowStaleAborted) {
+          // still replace the *promise* with the stale value,
+          // since we are done with the promise at this point.
+          // leave it untouched if we're still waiting for an
+          // aborted background fetch that hasn't yet returned.
+          this.valList[index] = p.__staleWhileFetching
+        }
+      }
+      if (allowStale) {
+        if (options.status && p.__staleWhileFetching !== undefined) {
+          options.status.returnedStale = true
+        }
+        return p.__staleWhileFetching
+      } else if (p.__returned === p) {
+        throw er
+      }
+    }
+    const pcall = (res, rej) => {
+      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
+      // ignored, we go until we finish, regardless.
+      // defer check until we are actually aborting,
+      // so fetchMethod can override.
+      ac.signal.addEventListener('abort', () => {
+        if (
+          !options.ignoreFetchAbort ||
+          options.allowStaleOnFetchAbort
+        ) {
+          res()
+          // when it eventually resolves, update the cache.
+          if (options.allowStaleOnFetchAbort) {
+            res = v => cb(v, true)
+          }
+        }
+      })
+    }
+    if (options.status) options.status.fetchDispatched = true
+    const p = new Promise(pcall).then(cb, eb)
+    p.__abortController = ac
+    p.__staleWhileFetching = v
+    p.__returned = null
+    if (index === undefined) {
+      // internal, don't expose status.
+      this.set(k, p, { ...fetchOpts.options, status: undefined })
+      index = this.keyMap.get(k)
+    } else {
+      this.valList[index] = p
+    }
+    return p
+  }
+
+  isBackgroundFetch(p) {
+    return (
+      p &&
+      typeof p === 'object' &&
+      typeof p.then === 'function' &&
+      Object.prototype.hasOwnProperty.call(
+        p,
+        '__staleWhileFetching'
+      ) &&
+      Object.prototype.hasOwnProperty.call(p, '__returned') &&
+      (p.__returned === p || p.__returned === null)
+    )
+  }
+
+  // this takes the union of get() and set() opts, because it does both
+  async fetch(
+    k,
+    {
+      // get options
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      // set options
+      ttl = this.ttl,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      // fetch exclusive options
+      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
+      ignoreFetchAbort = this.ignoreFetchAbort,
+      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
+      fetchContext = this.fetchContext,
+      forceRefresh = false,
+      status,
+      signal,
+    } = {}
+  ) {
+    if (!this.fetchMethod) {
+      if (status) status.fetch = 'get'
+      return this.get(k, {
+        allowStale,
+        updateAgeOnGet,
+        noDeleteOnStaleGet,
+        status,
+      })
+    }
+
+    const options = {
+      allowStale,
+      updateAgeOnGet,
+      noDeleteOnStaleGet,
+      ttl,
+      noDisposeOnSet,
+      size,
+      sizeCalculation,
+      noUpdateTTL,
+      noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+      status,
+      signal,
+    }
+
+    let index = this.keyMap.get(k)
+    if (index === undefined) {
+      if (status) status.fetch = 'miss'
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      return (p.__returned = p)
+    } else {
+      // in cache, maybe already fetching
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        const stale =
+          allowStale && v.__staleWhileFetching !== undefined
+        if (status) {
+          status.fetch = 'inflight'
+          if (stale) status.returnedStale = true
+        }
+        return stale ? v.__staleWhileFetching : (v.__returned = v)
+      }
+
+      // if we force a refresh, that means do NOT serve the cached value,
+      // unless we are already in the process of refreshing the cache.
+      const isStale = this.isStale(index)
+      if (!forceRefresh && !isStale) {
+        if (status) status.fetch = 'hit'
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        this.statusTTL(status, index)
+        return v
+      }
+
+      // ok, it is stale or a forced refresh, and not already fetching.
+      // refresh the cache.
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      const hasStale = p.__staleWhileFetching !== undefined
+      const staleVal = hasStale && allowStale
+      if (status) {
+        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
+        if (staleVal && isStale) status.returnedStale = true
+      }
+      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
+    }
+  }
+
+  get(
+    k,
+    {
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      status,
+    } = {}
+  ) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      const value = this.valList[index]
+      const fetching = this.isBackgroundFetch(value)
+      this.statusTTL(status, index)
+      if (this.isStale(index)) {
+        if (status) status.get = 'stale'
+        // delete only if not an in-flight background fetch
+        if (!fetching) {
+          if (!noDeleteOnStaleGet) {
+            this.delete(k)
+          }
+          if (status) status.returnedStale = allowStale
+          return allowStale ? value : undefined
+        } else {
+          if (status) {
+            status.returnedStale =
+              allowStale && value.__staleWhileFetching !== undefined
+          }
+          return allowStale ? value.__staleWhileFetching : undefined
+        }
+      } else {
+        if (status) status.get = 'hit'
+        // if we're currently fetching it, we don't actually have it yet
+        // it's not stale, which means this isn't a staleWhileRefetching.
+        // If it's not stale, and fetching, AND has a __staleWhileFetching
+        // value, then that means the user fetched with {forceRefresh:true},
+        // so it's safe to return that value.
+        if (fetching) {
+          return value.__staleWhileFetching
+        }
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        return value
+      }
+    } else if (status) {
+      status.get = 'miss'
+    }
+  }
+
+  connect(p, n) {
+    this.prev[n] = p
+    this.next[p] = n
+  }
+
+  moveToTail(index) {
+    // if tail already, nothing to do
+    // if head, move head to next[index]
+    // else
+    //   move next[prev[index]] to next[index] (head has no prev)
+    //   move prev[next[index]] to prev[index]
+    // prev[index] = tail
+    // next[tail] = index
+    // tail = index
+    if (index !== this.tail) {
+      if (index === this.head) {
+        this.head = this.next[index]
+      } else {
+        this.connect(this.prev[index], this.next[index])
+      }
+      this.connect(this.tail, index)
+      this.tail = index
+    }
+  }
+
+  get del() {
+    deprecatedMethod('del', 'delete')
+    return this.delete
+  }
+
+  delete(k) {
+    let deleted = false
+    if (this.size !== 0) {
+      const index = this.keyMap.get(k)
+      if (index !== undefined) {
+        deleted = true
+        if (this.size === 1) {
+          this.clear()
+        } else {
+          this.removeItemSize(index)
+          const v = this.valList[index]
+          if (this.isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('deleted'))
+          } else {
+            this.dispose(v, k, 'delete')
+            if (this.disposeAfter) {
+              this.disposed.push([v, k, 'delete'])
+            }
+          }
+          this.keyMap.delete(k)
+          this.keyList[index] = null
+          this.valList[index] = null
+          if (index === this.tail) {
+            this.tail = this.prev[index]
+          } else if (index === this.head) {
+            this.head = this.next[index]
+          } else {
+            this.next[this.prev[index]] = this.next[index]
+            this.prev[this.next[index]] = this.prev[index]
+          }
+          this.size--
+          this.free.push(index)
+        }
+      }
+    }
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return deleted
+  }
+
+  clear() {
+    for (const index of this.rindexes({ allowStale: true })) {
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        v.__abortController.abort(new Error('deleted'))
+      } else {
+        const k = this.keyList[index]
+        this.dispose(v, k, 'delete')
+        if (this.disposeAfter) {
+          this.disposed.push([v, k, 'delete'])
+        }
+      }
+    }
+
+    this.keyMap.clear()
+    this.valList.fill(null)
+    this.keyList.fill(null)
+    if (this.ttls) {
+      this.ttls.fill(0)
+      this.starts.fill(0)
+    }
+    if (this.sizes) {
+      this.sizes.fill(0)
+    }
+    this.head = 0
+    this.tail = 0
+    this.initialFill = 1
+    this.free.length = 0
+    this.calculatedSize = 0
+    this.size = 0
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+  }
+
+  get reset() {
+    deprecatedMethod('reset', 'clear')
+    return this.clear
+  }
+
+  get length() {
+    deprecatedProperty('length', 'size')
+    return this.size
+  }
+
+  static get AbortController() {
+    return AC
+  }
+  static get AbortSignal() {
+    return AS
+  }
+}
+
+module.exports = LRUCache
diff --git a/node_modules/init-package-json/node_modules/lru-cache/index.mjs b/node_modules/init-package-json/node_modules/lru-cache/index.mjs
new file mode 100644
index 0000000000000..4a0b4813ec515
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/lru-cache/index.mjs
@@ -0,0 +1,1227 @@
+const perf =
+  typeof performance === 'object' &&
+  performance &&
+  typeof performance.now === 'function'
+    ? performance
+    : Date
+
+const hasAbortController = typeof AbortController === 'function'
+
+// minimal backwards-compatibility polyfill
+// this doesn't have nearly all the checks and whatnot that
+// actual AbortController/Signal has, but it's enough for
+// our purposes, and if used properly, behaves the same.
+const AC = hasAbortController
+  ? AbortController
+  : class AbortController {
+      constructor() {
+        this.signal = new AS()
+      }
+      abort(reason = new Error('This operation was aborted')) {
+        this.signal.reason = this.signal.reason || reason
+        this.signal.aborted = true
+        this.signal.dispatchEvent({
+          type: 'abort',
+          target: this.signal,
+        })
+      }
+    }
+
+const hasAbortSignal = typeof AbortSignal === 'function'
+// Some polyfills put this on the AC class, not global
+const hasACAbortSignal = typeof AC.AbortSignal === 'function'
+const AS = hasAbortSignal
+  ? AbortSignal
+  : hasACAbortSignal
+  ? AC.AbortController
+  : class AbortSignal {
+      constructor() {
+        this.reason = undefined
+        this.aborted = false
+        this._listeners = []
+      }
+      dispatchEvent(e) {
+        if (e.type === 'abort') {
+          this.aborted = true
+          this.onabort(e)
+          this._listeners.forEach(f => f(e), this)
+        }
+      }
+      onabort() {}
+      addEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners.push(fn)
+        }
+      }
+      removeEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners = this._listeners.filter(f => f !== fn)
+        }
+      }
+    }
+
+const warned = new Set()
+const deprecatedOption = (opt, instead) => {
+  const code = `LRU_CACHE_OPTION_${opt}`
+  if (shouldWarn(code)) {
+    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
+  }
+}
+const deprecatedMethod = (method, instead) => {
+  const code = `LRU_CACHE_METHOD_${method}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
+    warn(code, `${method} method`, `cache.${instead}()`, get)
+  }
+}
+const deprecatedProperty = (field, instead) => {
+  const code = `LRU_CACHE_PROPERTY_${field}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
+    warn(code, `${field} property`, `cache.${instead}`, get)
+  }
+}
+
+const emitWarning = (...a) => {
+  typeof process === 'object' &&
+  process &&
+  typeof process.emitWarning === 'function'
+    ? process.emitWarning(...a)
+    : console.error(...a)
+}
+
+const shouldWarn = code => !warned.has(code)
+
+const warn = (code, what, instead, fn) => {
+  warned.add(code)
+  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
+  emitWarning(msg, 'DeprecationWarning', code, fn)
+}
+
+const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
+
+/* istanbul ignore next - This is a little bit ridiculous, tbh.
+ * The maximum array length is 2^32-1 or thereabouts on most JS impls.
+ * And well before that point, you're caching the entire world, I mean,
+ * that's ~32GB of just integers for the next/prev links, plus whatever
+ * else to hold that many keys and values.  Just filling the memory with
+ * zeroes at init time is brutal when you get that big.
+ * But why not be complete?
+ * Maybe in the future, these limits will have expanded. */
+const getUintArray = max =>
+  !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+    ? Uint8Array
+    : max <= Math.pow(2, 16)
+    ? Uint16Array
+    : max <= Math.pow(2, 32)
+    ? Uint32Array
+    : max <= Number.MAX_SAFE_INTEGER
+    ? ZeroArray
+    : null
+
+class ZeroArray extends Array {
+  constructor(size) {
+    super(size)
+    this.fill(0)
+  }
+}
+
+class Stack {
+  constructor(max) {
+    if (max === 0) {
+      return []
+    }
+    const UintArray = getUintArray(max)
+    this.heap = new UintArray(max)
+    this.length = 0
+  }
+  push(n) {
+    this.heap[this.length++] = n
+  }
+  pop() {
+    return this.heap[--this.length]
+  }
+}
+
+class LRUCache {
+  constructor(options = {}) {
+    const {
+      max = 0,
+      ttl,
+      ttlResolution = 1,
+      ttlAutopurge,
+      updateAgeOnGet,
+      updateAgeOnHas,
+      allowStale,
+      dispose,
+      disposeAfter,
+      noDisposeOnSet,
+      noUpdateTTL,
+      maxSize = 0,
+      maxEntrySize = 0,
+      sizeCalculation,
+      fetchMethod,
+      fetchContext,
+      noDeleteOnFetchRejection,
+      noDeleteOnStaleGet,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+    } = options
+
+    // deprecated options, don't trigger a warning for getting them if
+    // the thing being passed in is another LRUCache we're copying.
+    const { length, maxAge, stale } =
+      options instanceof LRUCache ? {} : options
+
+    if (max !== 0 && !isPosInt(max)) {
+      throw new TypeError('max option must be a nonnegative integer')
+    }
+
+    const UintArray = max ? getUintArray(max) : Array
+    if (!UintArray) {
+      throw new Error('invalid max value: ' + max)
+    }
+
+    this.max = max
+    this.maxSize = maxSize
+    this.maxEntrySize = maxEntrySize || this.maxSize
+    this.sizeCalculation = sizeCalculation || length
+    if (this.sizeCalculation) {
+      if (!this.maxSize && !this.maxEntrySize) {
+        throw new TypeError(
+          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
+        )
+      }
+      if (typeof this.sizeCalculation !== 'function') {
+        throw new TypeError('sizeCalculation set to non-function')
+      }
+    }
+
+    this.fetchMethod = fetchMethod || null
+    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
+      throw new TypeError(
+        'fetchMethod must be a function if specified'
+      )
+    }
+
+    this.fetchContext = fetchContext
+    if (!this.fetchMethod && fetchContext !== undefined) {
+      throw new TypeError(
+        'cannot set fetchContext without fetchMethod'
+      )
+    }
+
+    this.keyMap = new Map()
+    this.keyList = new Array(max).fill(null)
+    this.valList = new Array(max).fill(null)
+    this.next = new UintArray(max)
+    this.prev = new UintArray(max)
+    this.head = 0
+    this.tail = 0
+    this.free = new Stack(max)
+    this.initialFill = 1
+    this.size = 0
+
+    if (typeof dispose === 'function') {
+      this.dispose = dispose
+    }
+    if (typeof disposeAfter === 'function') {
+      this.disposeAfter = disposeAfter
+      this.disposed = []
+    } else {
+      this.disposeAfter = null
+      this.disposed = null
+    }
+    this.noDisposeOnSet = !!noDisposeOnSet
+    this.noUpdateTTL = !!noUpdateTTL
+    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
+    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
+    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
+    this.ignoreFetchAbort = !!ignoreFetchAbort
+
+    // NB: maxEntrySize is set to maxSize if it's set
+    if (this.maxEntrySize !== 0) {
+      if (this.maxSize !== 0) {
+        if (!isPosInt(this.maxSize)) {
+          throw new TypeError(
+            'maxSize must be a positive integer if specified'
+          )
+        }
+      }
+      if (!isPosInt(this.maxEntrySize)) {
+        throw new TypeError(
+          'maxEntrySize must be a positive integer if specified'
+        )
+      }
+      this.initializeSizeTracking()
+    }
+
+    this.allowStale = !!allowStale || !!stale
+    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
+    this.updateAgeOnGet = !!updateAgeOnGet
+    this.updateAgeOnHas = !!updateAgeOnHas
+    this.ttlResolution =
+      isPosInt(ttlResolution) || ttlResolution === 0
+        ? ttlResolution
+        : 1
+    this.ttlAutopurge = !!ttlAutopurge
+    this.ttl = ttl || maxAge || 0
+    if (this.ttl) {
+      if (!isPosInt(this.ttl)) {
+        throw new TypeError(
+          'ttl must be a positive integer if specified'
+        )
+      }
+      this.initializeTTLTracking()
+    }
+
+    // do not allow completely unbounded caches
+    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
+      throw new TypeError(
+        'At least one of max, maxSize, or ttl is required'
+      )
+    }
+    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
+      const code = 'LRU_CACHE_UNBOUNDED'
+      if (shouldWarn(code)) {
+        warned.add(code)
+        const msg =
+          'TTL caching without ttlAutopurge, max, or maxSize can ' +
+          'result in unbounded memory consumption.'
+        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
+      }
+    }
+
+    if (stale) {
+      deprecatedOption('stale', 'allowStale')
+    }
+    if (maxAge) {
+      deprecatedOption('maxAge', 'ttl')
+    }
+    if (length) {
+      deprecatedOption('length', 'sizeCalculation')
+    }
+  }
+
+  getRemainingTTL(key) {
+    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
+  }
+
+  initializeTTLTracking() {
+    this.ttls = new ZeroArray(this.max)
+    this.starts = new ZeroArray(this.max)
+
+    this.setItemTTL = (index, ttl, start = perf.now()) => {
+      this.starts[index] = ttl !== 0 ? start : 0
+      this.ttls[index] = ttl
+      if (ttl !== 0 && this.ttlAutopurge) {
+        const t = setTimeout(() => {
+          if (this.isStale(index)) {
+            this.delete(this.keyList[index])
+          }
+        }, ttl + 1)
+        /* istanbul ignore else - unref() not supported on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+    }
+
+    this.updateItemAge = index => {
+      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
+    }
+
+    this.statusTTL = (status, index) => {
+      if (status) {
+        status.ttl = this.ttls[index]
+        status.start = this.starts[index]
+        status.now = cachedNow || getNow()
+        status.remainingTTL = status.now + status.ttl - status.start
+      }
+    }
+
+    // debounce calls to perf.now() to 1s so we're not hitting
+    // that costly call repeatedly.
+    let cachedNow = 0
+    const getNow = () => {
+      const n = perf.now()
+      if (this.ttlResolution > 0) {
+        cachedNow = n
+        const t = setTimeout(
+          () => (cachedNow = 0),
+          this.ttlResolution
+        )
+        /* istanbul ignore else - not available on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+      return n
+    }
+
+    this.getRemainingTTL = key => {
+      const index = this.keyMap.get(key)
+      if (index === undefined) {
+        return 0
+      }
+      return this.ttls[index] === 0 || this.starts[index] === 0
+        ? Infinity
+        : this.starts[index] +
+            this.ttls[index] -
+            (cachedNow || getNow())
+    }
+
+    this.isStale = index => {
+      return (
+        this.ttls[index] !== 0 &&
+        this.starts[index] !== 0 &&
+        (cachedNow || getNow()) - this.starts[index] >
+          this.ttls[index]
+      )
+    }
+  }
+  updateItemAge(_index) {}
+  statusTTL(_status, _index) {}
+  setItemTTL(_index, _ttl, _start) {}
+  isStale(_index) {
+    return false
+  }
+
+  initializeSizeTracking() {
+    this.calculatedSize = 0
+    this.sizes = new ZeroArray(this.max)
+    this.removeItemSize = index => {
+      this.calculatedSize -= this.sizes[index]
+      this.sizes[index] = 0
+    }
+    this.requireSize = (k, v, size, sizeCalculation) => {
+      // provisionally accept background fetches.
+      // actual value size will be checked when they return.
+      if (this.isBackgroundFetch(v)) {
+        return 0
+      }
+      if (!isPosInt(size)) {
+        if (sizeCalculation) {
+          if (typeof sizeCalculation !== 'function') {
+            throw new TypeError('sizeCalculation must be a function')
+          }
+          size = sizeCalculation(v, k)
+          if (!isPosInt(size)) {
+            throw new TypeError(
+              'sizeCalculation return invalid (expect positive integer)'
+            )
+          }
+        } else {
+          throw new TypeError(
+            'invalid size value (must be positive integer). ' +
+              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
+              'must be set.'
+          )
+        }
+      }
+      return size
+    }
+    this.addItemSize = (index, size, status) => {
+      this.sizes[index] = size
+      if (this.maxSize) {
+        const maxSize = this.maxSize - this.sizes[index]
+        while (this.calculatedSize > maxSize) {
+          this.evict(true)
+        }
+      }
+      this.calculatedSize += this.sizes[index]
+      if (status) {
+        status.entrySize = size
+        status.totalCalculatedSize = this.calculatedSize
+      }
+    }
+  }
+  removeItemSize(_index) {}
+  addItemSize(_index, _size) {}
+  requireSize(_k, _v, size, sizeCalculation) {
+    if (size || sizeCalculation) {
+      throw new TypeError(
+        'cannot set size without setting maxSize or maxEntrySize on cache'
+      )
+    }
+  }
+
+  *indexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.tail; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.head) {
+          break
+        } else {
+          i = this.prev[i]
+        }
+      }
+    }
+  }
+
+  *rindexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.head; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.tail) {
+          break
+        } else {
+          i = this.next[i]
+        }
+      }
+    }
+  }
+
+  isValidIndex(index) {
+    return (
+      index !== undefined &&
+      this.keyMap.get(this.keyList[index]) === index
+    )
+  }
+
+  *entries() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+  *rentries() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+
+  *keys() {
+    for (const i of this.indexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+  *rkeys() {
+    for (const i of this.rindexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+
+  *values() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+  *rvalues() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+
+  [Symbol.iterator]() {
+    return this.entries()
+  }
+
+  find(fn, getOptions) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      if (fn(value, this.keyList[i], this)) {
+        return this.get(this.keyList[i], getOptions)
+      }
+    }
+  }
+
+  forEach(fn, thisp = this) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  rforEach(fn, thisp = this) {
+    for (const i of this.rindexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  get prune() {
+    deprecatedMethod('prune', 'purgeStale')
+    return this.purgeStale
+  }
+
+  purgeStale() {
+    let deleted = false
+    for (const i of this.rindexes({ allowStale: true })) {
+      if (this.isStale(i)) {
+        this.delete(this.keyList[i])
+        deleted = true
+      }
+    }
+    return deleted
+  }
+
+  dump() {
+    const arr = []
+    for (const i of this.indexes({ allowStale: true })) {
+      const key = this.keyList[i]
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      const entry = { value }
+      if (this.ttls) {
+        entry.ttl = this.ttls[i]
+        // always dump the start relative to a portable timestamp
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = perf.now() - this.starts[i]
+        entry.start = Math.floor(Date.now() - age)
+      }
+      if (this.sizes) {
+        entry.size = this.sizes[i]
+      }
+      arr.unshift([key, entry])
+    }
+    return arr
+  }
+
+  load(arr) {
+    this.clear()
+    for (const [key, entry] of arr) {
+      if (entry.start) {
+        // entry.start is a portable timestamp, but we may be using
+        // node's performance.now(), so calculate the offset.
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = Date.now() - entry.start
+        entry.start = perf.now() - age
+      }
+      this.set(key, entry.value, entry)
+    }
+  }
+
+  dispose(_v, _k, _reason) {}
+
+  set(
+    k,
+    v,
+    {
+      ttl = this.ttl,
+      start,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      status,
+    } = {}
+  ) {
+    size = this.requireSize(k, v, size, sizeCalculation)
+    // if the item doesn't fit, don't do anything
+    // NB: maxEntrySize set to maxSize by default
+    if (this.maxEntrySize && size > this.maxEntrySize) {
+      if (status) {
+        status.set = 'miss'
+        status.maxEntrySizeExceeded = true
+      }
+      // have to delete, in case a background fetch is there already.
+      // in non-async cases, this is a no-op
+      this.delete(k)
+      return this
+    }
+    let index = this.size === 0 ? undefined : this.keyMap.get(k)
+    if (index === undefined) {
+      // addition
+      index = this.newIndex()
+      this.keyList[index] = k
+      this.valList[index] = v
+      this.keyMap.set(k, index)
+      this.next[this.tail] = index
+      this.prev[index] = this.tail
+      this.tail = index
+      this.size++
+      this.addItemSize(index, size, status)
+      if (status) {
+        status.set = 'add'
+      }
+      noUpdateTTL = false
+    } else {
+      // update
+      this.moveToTail(index)
+      const oldVal = this.valList[index]
+      if (v !== oldVal) {
+        if (this.isBackgroundFetch(oldVal)) {
+          oldVal.__abortController.abort(new Error('replaced'))
+        } else {
+          if (!noDisposeOnSet) {
+            this.dispose(oldVal, k, 'set')
+            if (this.disposeAfter) {
+              this.disposed.push([oldVal, k, 'set'])
+            }
+          }
+        }
+        this.removeItemSize(index)
+        this.valList[index] = v
+        this.addItemSize(index, size, status)
+        if (status) {
+          status.set = 'replace'
+          const oldValue =
+            oldVal && this.isBackgroundFetch(oldVal)
+              ? oldVal.__staleWhileFetching
+              : oldVal
+          if (oldValue !== undefined) status.oldValue = oldValue
+        }
+      } else if (status) {
+        status.set = 'update'
+      }
+    }
+    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
+      this.initializeTTLTracking()
+    }
+    if (!noUpdateTTL) {
+      this.setItemTTL(index, ttl, start)
+    }
+    this.statusTTL(status, index)
+    if (this.disposeAfter) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return this
+  }
+
+  newIndex() {
+    if (this.size === 0) {
+      return this.tail
+    }
+    if (this.size === this.max && this.max !== 0) {
+      return this.evict(false)
+    }
+    if (this.free.length !== 0) {
+      return this.free.pop()
+    }
+    // initial fill, just keep writing down the list
+    return this.initialFill++
+  }
+
+  pop() {
+    if (this.size) {
+      const val = this.valList[this.head]
+      this.evict(true)
+      return val
+    }
+  }
+
+  evict(free) {
+    const head = this.head
+    const k = this.keyList[head]
+    const v = this.valList[head]
+    if (this.isBackgroundFetch(v)) {
+      v.__abortController.abort(new Error('evicted'))
+    } else {
+      this.dispose(v, k, 'evict')
+      if (this.disposeAfter) {
+        this.disposed.push([v, k, 'evict'])
+      }
+    }
+    this.removeItemSize(head)
+    // if we aren't about to use the index, then null these out
+    if (free) {
+      this.keyList[head] = null
+      this.valList[head] = null
+      this.free.push(head)
+    }
+    this.head = this.next[head]
+    this.keyMap.delete(k)
+    this.size--
+    return head
+  }
+
+  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      if (!this.isStale(index)) {
+        if (updateAgeOnHas) {
+          this.updateItemAge(index)
+        }
+        if (status) status.has = 'hit'
+        this.statusTTL(status, index)
+        return true
+      } else if (status) {
+        status.has = 'stale'
+        this.statusTTL(status, index)
+      }
+    } else if (status) {
+      status.has = 'miss'
+    }
+    return false
+  }
+
+  // like get(), but without any LRU updating or TTL expiration
+  peek(k, { allowStale = this.allowStale } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined && (allowStale || !this.isStale(index))) {
+      const v = this.valList[index]
+      // either stale and allowed, or forcing a refresh of non-stale value
+      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
+    }
+  }
+
+  backgroundFetch(k, index, options, context) {
+    const v = index === undefined ? undefined : this.valList[index]
+    if (this.isBackgroundFetch(v)) {
+      return v
+    }
+    const ac = new AC()
+    if (options.signal) {
+      options.signal.addEventListener('abort', () =>
+        ac.abort(options.signal.reason)
+      )
+    }
+    const fetchOpts = {
+      signal: ac.signal,
+      options,
+      context,
+    }
+    const cb = (v, updateCache = false) => {
+      const { aborted } = ac.signal
+      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
+      if (options.status) {
+        if (aborted && !updateCache) {
+          options.status.fetchAborted = true
+          options.status.fetchError = ac.signal.reason
+          if (ignoreAbort) options.status.fetchAbortIgnored = true
+        } else {
+          options.status.fetchResolved = true
+        }
+      }
+      if (aborted && !ignoreAbort && !updateCache) {
+        return fetchFail(ac.signal.reason)
+      }
+      // either we didn't abort, and are still here, or we did, and ignored
+      if (this.valList[index] === p) {
+        if (v === undefined) {
+          if (p.__staleWhileFetching) {
+            this.valList[index] = p.__staleWhileFetching
+          } else {
+            this.delete(k)
+          }
+        } else {
+          if (options.status) options.status.fetchUpdated = true
+          this.set(k, v, fetchOpts.options)
+        }
+      }
+      return v
+    }
+    const eb = er => {
+      if (options.status) {
+        options.status.fetchRejected = true
+        options.status.fetchError = er
+      }
+      return fetchFail(er)
+    }
+    const fetchFail = er => {
+      const { aborted } = ac.signal
+      const allowStaleAborted =
+        aborted && options.allowStaleOnFetchAbort
+      const allowStale =
+        allowStaleAborted || options.allowStaleOnFetchRejection
+      const noDelete = allowStale || options.noDeleteOnFetchRejection
+      if (this.valList[index] === p) {
+        // if we allow stale on fetch rejections, then we need to ensure that
+        // the stale value is not removed from the cache when the fetch fails.
+        const del = !noDelete || p.__staleWhileFetching === undefined
+        if (del) {
+          this.delete(k)
+        } else if (!allowStaleAborted) {
+          // still replace the *promise* with the stale value,
+          // since we are done with the promise at this point.
+          // leave it untouched if we're still waiting for an
+          // aborted background fetch that hasn't yet returned.
+          this.valList[index] = p.__staleWhileFetching
+        }
+      }
+      if (allowStale) {
+        if (options.status && p.__staleWhileFetching !== undefined) {
+          options.status.returnedStale = true
+        }
+        return p.__staleWhileFetching
+      } else if (p.__returned === p) {
+        throw er
+      }
+    }
+    const pcall = (res, rej) => {
+      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
+      // ignored, we go until we finish, regardless.
+      // defer check until we are actually aborting,
+      // so fetchMethod can override.
+      ac.signal.addEventListener('abort', () => {
+        if (
+          !options.ignoreFetchAbort ||
+          options.allowStaleOnFetchAbort
+        ) {
+          res()
+          // when it eventually resolves, update the cache.
+          if (options.allowStaleOnFetchAbort) {
+            res = v => cb(v, true)
+          }
+        }
+      })
+    }
+    if (options.status) options.status.fetchDispatched = true
+    const p = new Promise(pcall).then(cb, eb)
+    p.__abortController = ac
+    p.__staleWhileFetching = v
+    p.__returned = null
+    if (index === undefined) {
+      // internal, don't expose status.
+      this.set(k, p, { ...fetchOpts.options, status: undefined })
+      index = this.keyMap.get(k)
+    } else {
+      this.valList[index] = p
+    }
+    return p
+  }
+
+  isBackgroundFetch(p) {
+    return (
+      p &&
+      typeof p === 'object' &&
+      typeof p.then === 'function' &&
+      Object.prototype.hasOwnProperty.call(
+        p,
+        '__staleWhileFetching'
+      ) &&
+      Object.prototype.hasOwnProperty.call(p, '__returned') &&
+      (p.__returned === p || p.__returned === null)
+    )
+  }
+
+  // this takes the union of get() and set() opts, because it does both
+  async fetch(
+    k,
+    {
+      // get options
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      // set options
+      ttl = this.ttl,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      // fetch exclusive options
+      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
+      ignoreFetchAbort = this.ignoreFetchAbort,
+      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
+      fetchContext = this.fetchContext,
+      forceRefresh = false,
+      status,
+      signal,
+    } = {}
+  ) {
+    if (!this.fetchMethod) {
+      if (status) status.fetch = 'get'
+      return this.get(k, {
+        allowStale,
+        updateAgeOnGet,
+        noDeleteOnStaleGet,
+        status,
+      })
+    }
+
+    const options = {
+      allowStale,
+      updateAgeOnGet,
+      noDeleteOnStaleGet,
+      ttl,
+      noDisposeOnSet,
+      size,
+      sizeCalculation,
+      noUpdateTTL,
+      noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+      status,
+      signal,
+    }
+
+    let index = this.keyMap.get(k)
+    if (index === undefined) {
+      if (status) status.fetch = 'miss'
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      return (p.__returned = p)
+    } else {
+      // in cache, maybe already fetching
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        const stale =
+          allowStale && v.__staleWhileFetching !== undefined
+        if (status) {
+          status.fetch = 'inflight'
+          if (stale) status.returnedStale = true
+        }
+        return stale ? v.__staleWhileFetching : (v.__returned = v)
+      }
+
+      // if we force a refresh, that means do NOT serve the cached value,
+      // unless we are already in the process of refreshing the cache.
+      const isStale = this.isStale(index)
+      if (!forceRefresh && !isStale) {
+        if (status) status.fetch = 'hit'
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        this.statusTTL(status, index)
+        return v
+      }
+
+      // ok, it is stale or a forced refresh, and not already fetching.
+      // refresh the cache.
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      const hasStale = p.__staleWhileFetching !== undefined
+      const staleVal = hasStale && allowStale
+      if (status) {
+        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
+        if (staleVal && isStale) status.returnedStale = true
+      }
+      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
+    }
+  }
+
+  get(
+    k,
+    {
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      status,
+    } = {}
+  ) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      const value = this.valList[index]
+      const fetching = this.isBackgroundFetch(value)
+      this.statusTTL(status, index)
+      if (this.isStale(index)) {
+        if (status) status.get = 'stale'
+        // delete only if not an in-flight background fetch
+        if (!fetching) {
+          if (!noDeleteOnStaleGet) {
+            this.delete(k)
+          }
+          if (status) status.returnedStale = allowStale
+          return allowStale ? value : undefined
+        } else {
+          if (status) {
+            status.returnedStale =
+              allowStale && value.__staleWhileFetching !== undefined
+          }
+          return allowStale ? value.__staleWhileFetching : undefined
+        }
+      } else {
+        if (status) status.get = 'hit'
+        // if we're currently fetching it, we don't actually have it yet
+        // it's not stale, which means this isn't a staleWhileRefetching.
+        // If it's not stale, and fetching, AND has a __staleWhileFetching
+        // value, then that means the user fetched with {forceRefresh:true},
+        // so it's safe to return that value.
+        if (fetching) {
+          return value.__staleWhileFetching
+        }
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        return value
+      }
+    } else if (status) {
+      status.get = 'miss'
+    }
+  }
+
+  connect(p, n) {
+    this.prev[n] = p
+    this.next[p] = n
+  }
+
+  moveToTail(index) {
+    // if tail already, nothing to do
+    // if head, move head to next[index]
+    // else
+    //   move next[prev[index]] to next[index] (head has no prev)
+    //   move prev[next[index]] to prev[index]
+    // prev[index] = tail
+    // next[tail] = index
+    // tail = index
+    if (index !== this.tail) {
+      if (index === this.head) {
+        this.head = this.next[index]
+      } else {
+        this.connect(this.prev[index], this.next[index])
+      }
+      this.connect(this.tail, index)
+      this.tail = index
+    }
+  }
+
+  get del() {
+    deprecatedMethod('del', 'delete')
+    return this.delete
+  }
+
+  delete(k) {
+    let deleted = false
+    if (this.size !== 0) {
+      const index = this.keyMap.get(k)
+      if (index !== undefined) {
+        deleted = true
+        if (this.size === 1) {
+          this.clear()
+        } else {
+          this.removeItemSize(index)
+          const v = this.valList[index]
+          if (this.isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('deleted'))
+          } else {
+            this.dispose(v, k, 'delete')
+            if (this.disposeAfter) {
+              this.disposed.push([v, k, 'delete'])
+            }
+          }
+          this.keyMap.delete(k)
+          this.keyList[index] = null
+          this.valList[index] = null
+          if (index === this.tail) {
+            this.tail = this.prev[index]
+          } else if (index === this.head) {
+            this.head = this.next[index]
+          } else {
+            this.next[this.prev[index]] = this.next[index]
+            this.prev[this.next[index]] = this.prev[index]
+          }
+          this.size--
+          this.free.push(index)
+        }
+      }
+    }
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return deleted
+  }
+
+  clear() {
+    for (const index of this.rindexes({ allowStale: true })) {
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        v.__abortController.abort(new Error('deleted'))
+      } else {
+        const k = this.keyList[index]
+        this.dispose(v, k, 'delete')
+        if (this.disposeAfter) {
+          this.disposed.push([v, k, 'delete'])
+        }
+      }
+    }
+
+    this.keyMap.clear()
+    this.valList.fill(null)
+    this.keyList.fill(null)
+    if (this.ttls) {
+      this.ttls.fill(0)
+      this.starts.fill(0)
+    }
+    if (this.sizes) {
+      this.sizes.fill(0)
+    }
+    this.head = 0
+    this.tail = 0
+    this.initialFill = 1
+    this.free.length = 0
+    this.calculatedSize = 0
+    this.size = 0
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+  }
+
+  get reset() {
+    deprecatedMethod('reset', 'clear')
+    return this.clear
+  }
+
+  get length() {
+    deprecatedProperty('length', 'size')
+    return this.size
+  }
+
+  static get AbortController() {
+    return AC
+  }
+  static get AbortSignal() {
+    return AS
+  }
+}
+
+export default LRUCache
diff --git a/node_modules/cacache/node_modules/lru-cache/package.json b/node_modules/init-package-json/node_modules/lru-cache/package.json
similarity index 52%
rename from node_modules/cacache/node_modules/lru-cache/package.json
rename to node_modules/init-package-json/node_modules/lru-cache/package.json
index bae4a04839d1f..9684991727e7a 100644
--- a/node_modules/cacache/node_modules/lru-cache/package.json
+++ b/node_modules/init-package-json/node_modules/lru-cache/package.json
@@ -1,7 +1,7 @@
 {
   "name": "lru-cache",
   "description": "A cache object that deletes the least-recently-used items.",
-  "version": "10.0.1",
+  "version": "7.18.3",
   "author": "Isaac Z. Schlueter ",
   "keywords": [
     "mru",
@@ -11,74 +11,60 @@
   "sideEffects": false,
   "scripts": {
     "build": "npm run prepare",
-    "preprepare": "rm -rf dist",
-    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
-    "postprepare": "bash fixup.sh",
     "pretest": "npm run prepare",
     "presnap": "npm run prepare",
-    "test": "c8 tap",
-    "snap": "c8 tap",
+    "prepare": "node ./scripts/transpile-to-esm.js",
+    "size": "size-limit",
+    "test": "tap",
+    "snap": "tap",
     "preversion": "npm test",
     "postversion": "npm publish",
     "prepublishOnly": "git push origin --follow-tags",
     "format": "prettier --write .",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts",
-    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
-    "prebenchmark": "npm run prepare",
-    "benchmark": "make -C benchmark",
-    "preprofile": "npm run prepare",
-    "profile": "make -C benchmark profile"
+    "typedoc": "typedoc ./index.d.ts"
   },
-  "main": "./dist/cjs/index.js",
-  "module": "./dist/mjs/index.js",
+  "type": "commonjs",
+  "main": "./index.js",
+  "module": "./index.mjs",
+  "types": "./index.d.ts",
   "exports": {
-    "./min": {
-      "import": {
-        "types": "./dist/mjs/index.d.ts",
-        "default": "./dist/mjs/index.min.js"
-      },
-      "require": {
-        "types": "./dist/cjs/index.d.ts",
-        "default": "./dist/cjs/index.min.js"
-      }
-    },
     ".": {
       "import": {
-        "types": "./dist/mjs/index.d.ts",
-        "default": "./dist/mjs/index.js"
+        "types": "./index.d.ts",
+        "default": "./index.mjs"
       },
       "require": {
-        "types": "./dist/cjs/index.d.ts",
-        "default": "./dist/cjs/index.js"
+        "types": "./index.d.ts",
+        "default": "./index.js"
       }
-    }
+    },
+    "./package.json": "./package.json"
   },
   "repository": "git://github.com/isaacs/node-lru-cache.git",
   "devDependencies": {
     "@size-limit/preset-small-lib": "^7.0.8",
-    "@types/node": "^20.2.5",
+    "@types/node": "^17.0.31",
     "@types/tap": "^15.0.6",
     "benchmark": "^2.1.4",
     "c8": "^7.11.2",
     "clock-mock": "^1.0.6",
-    "esbuild": "^0.17.11",
     "eslint-config-prettier": "^8.5.0",
-    "marked": "^4.2.12",
-    "mkdirp": "^2.1.5",
     "prettier": "^2.6.2",
     "size-limit": "^7.0.8",
     "tap": "^16.3.4",
-    "ts-node": "^10.9.1",
+    "ts-node": "^10.7.0",
     "tslib": "^2.4.0",
-    "typedoc": "^0.24.6",
-    "typescript": "^5.0.4"
+    "typedoc": "^0.23.24",
+    "typescript": "^4.6.4"
   },
   "license": "ISC",
   "files": [
-    "dist"
+    "index.js",
+    "index.mjs",
+    "index.d.ts"
   ],
   "engines": {
-    "node": "14 || >=16.14"
+    "node": ">=12"
   },
   "prettier": {
     "semi": false,
@@ -92,17 +78,19 @@
     "endOfLine": "lf"
   },
   "tap": {
-    "coverage": false,
+    "nyc-arg": [
+      "--include=index.js"
+    ],
     "node-arg": [
       "--expose-gc",
-      "-r",
+      "--require",
       "ts-node/register"
     ],
     "ts": false
   },
   "size-limit": [
     {
-      "path": "./dist/mjs/index.js"
+      "path": "./index.js"
     }
   ]
 }
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/index.js b/node_modules/lru-cache/dist/cjs/index.js
similarity index 100%
rename from node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/index.js
rename to node_modules/lru-cache/dist/cjs/index.js
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/index.min.js b/node_modules/lru-cache/dist/cjs/index.min.js
similarity index 100%
rename from node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/index.min.js
rename to node_modules/lru-cache/dist/cjs/index.min.js
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/package.json b/node_modules/lru-cache/dist/cjs/package.json
similarity index 100%
rename from node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/package.json
rename to node_modules/lru-cache/dist/cjs/package.json
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/index.js b/node_modules/lru-cache/dist/mjs/index.js
similarity index 100%
rename from node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/index.js
rename to node_modules/lru-cache/dist/mjs/index.js
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/index.min.js b/node_modules/lru-cache/dist/mjs/index.min.js
similarity index 100%
rename from node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/index.min.js
rename to node_modules/lru-cache/dist/mjs/index.min.js
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/package.json b/node_modules/lru-cache/dist/mjs/package.json
similarity index 100%
rename from node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/package.json
rename to node_modules/lru-cache/dist/mjs/package.json
diff --git a/node_modules/lru-cache/package.json b/node_modules/lru-cache/package.json
index 9684991727e7a..bae4a04839d1f 100644
--- a/node_modules/lru-cache/package.json
+++ b/node_modules/lru-cache/package.json
@@ -1,7 +1,7 @@
 {
   "name": "lru-cache",
   "description": "A cache object that deletes the least-recently-used items.",
-  "version": "7.18.3",
+  "version": "10.0.1",
   "author": "Isaac Z. Schlueter ",
   "keywords": [
     "mru",
@@ -11,60 +11,74 @@
   "sideEffects": false,
   "scripts": {
     "build": "npm run prepare",
+    "preprepare": "rm -rf dist",
+    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
+    "postprepare": "bash fixup.sh",
     "pretest": "npm run prepare",
     "presnap": "npm run prepare",
-    "prepare": "node ./scripts/transpile-to-esm.js",
-    "size": "size-limit",
-    "test": "tap",
-    "snap": "tap",
+    "test": "c8 tap",
+    "snap": "c8 tap",
     "preversion": "npm test",
     "postversion": "npm publish",
     "prepublishOnly": "git push origin --follow-tags",
     "format": "prettier --write .",
-    "typedoc": "typedoc ./index.d.ts"
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts",
+    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
+    "prebenchmark": "npm run prepare",
+    "benchmark": "make -C benchmark",
+    "preprofile": "npm run prepare",
+    "profile": "make -C benchmark profile"
   },
-  "type": "commonjs",
-  "main": "./index.js",
-  "module": "./index.mjs",
-  "types": "./index.d.ts",
+  "main": "./dist/cjs/index.js",
+  "module": "./dist/mjs/index.js",
   "exports": {
-    ".": {
+    "./min": {
       "import": {
-        "types": "./index.d.ts",
-        "default": "./index.mjs"
+        "types": "./dist/mjs/index.d.ts",
+        "default": "./dist/mjs/index.min.js"
       },
       "require": {
-        "types": "./index.d.ts",
-        "default": "./index.js"
+        "types": "./dist/cjs/index.d.ts",
+        "default": "./dist/cjs/index.min.js"
       }
     },
-    "./package.json": "./package.json"
+    ".": {
+      "import": {
+        "types": "./dist/mjs/index.d.ts",
+        "default": "./dist/mjs/index.js"
+      },
+      "require": {
+        "types": "./dist/cjs/index.d.ts",
+        "default": "./dist/cjs/index.js"
+      }
+    }
   },
   "repository": "git://github.com/isaacs/node-lru-cache.git",
   "devDependencies": {
     "@size-limit/preset-small-lib": "^7.0.8",
-    "@types/node": "^17.0.31",
+    "@types/node": "^20.2.5",
     "@types/tap": "^15.0.6",
     "benchmark": "^2.1.4",
     "c8": "^7.11.2",
     "clock-mock": "^1.0.6",
+    "esbuild": "^0.17.11",
     "eslint-config-prettier": "^8.5.0",
+    "marked": "^4.2.12",
+    "mkdirp": "^2.1.5",
     "prettier": "^2.6.2",
     "size-limit": "^7.0.8",
     "tap": "^16.3.4",
-    "ts-node": "^10.7.0",
+    "ts-node": "^10.9.1",
     "tslib": "^2.4.0",
-    "typedoc": "^0.23.24",
-    "typescript": "^4.6.4"
+    "typedoc": "^0.24.6",
+    "typescript": "^5.0.4"
   },
   "license": "ISC",
   "files": [
-    "index.js",
-    "index.mjs",
-    "index.d.ts"
+    "dist"
   ],
   "engines": {
-    "node": ">=12"
+    "node": "14 || >=16.14"
   },
   "prettier": {
     "semi": false,
@@ -78,19 +92,17 @@
     "endOfLine": "lf"
   },
   "tap": {
-    "nyc-arg": [
-      "--include=index.js"
-    ],
+    "coverage": false,
     "node-arg": [
       "--expose-gc",
-      "--require",
+      "-r",
       "ts-node/register"
     ],
     "ts": false
   },
   "size-limit": [
     {
-      "path": "./index.js"
+      "path": "./dist/mjs/index.js"
     }
   ]
 }
diff --git a/node_modules/cacache/node_modules/lru-cache/LICENSE b/node_modules/node-gyp/node_modules/lru-cache/LICENSE
similarity index 100%
rename from node_modules/cacache/node_modules/lru-cache/LICENSE
rename to node_modules/node-gyp/node_modules/lru-cache/LICENSE
diff --git a/node_modules/node-gyp/node_modules/lru-cache/index.js b/node_modules/node-gyp/node_modules/lru-cache/index.js
new file mode 100644
index 0000000000000..48e99fe5e5a70
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/lru-cache/index.js
@@ -0,0 +1,1227 @@
+const perf =
+  typeof performance === 'object' &&
+  performance &&
+  typeof performance.now === 'function'
+    ? performance
+    : Date
+
+const hasAbortController = typeof AbortController === 'function'
+
+// minimal backwards-compatibility polyfill
+// this doesn't have nearly all the checks and whatnot that
+// actual AbortController/Signal has, but it's enough for
+// our purposes, and if used properly, behaves the same.
+const AC = hasAbortController
+  ? AbortController
+  : class AbortController {
+      constructor() {
+        this.signal = new AS()
+      }
+      abort(reason = new Error('This operation was aborted')) {
+        this.signal.reason = this.signal.reason || reason
+        this.signal.aborted = true
+        this.signal.dispatchEvent({
+          type: 'abort',
+          target: this.signal,
+        })
+      }
+    }
+
+const hasAbortSignal = typeof AbortSignal === 'function'
+// Some polyfills put this on the AC class, not global
+const hasACAbortSignal = typeof AC.AbortSignal === 'function'
+const AS = hasAbortSignal
+  ? AbortSignal
+  : hasACAbortSignal
+  ? AC.AbortController
+  : class AbortSignal {
+      constructor() {
+        this.reason = undefined
+        this.aborted = false
+        this._listeners = []
+      }
+      dispatchEvent(e) {
+        if (e.type === 'abort') {
+          this.aborted = true
+          this.onabort(e)
+          this._listeners.forEach(f => f(e), this)
+        }
+      }
+      onabort() {}
+      addEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners.push(fn)
+        }
+      }
+      removeEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners = this._listeners.filter(f => f !== fn)
+        }
+      }
+    }
+
+const warned = new Set()
+const deprecatedOption = (opt, instead) => {
+  const code = `LRU_CACHE_OPTION_${opt}`
+  if (shouldWarn(code)) {
+    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
+  }
+}
+const deprecatedMethod = (method, instead) => {
+  const code = `LRU_CACHE_METHOD_${method}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
+    warn(code, `${method} method`, `cache.${instead}()`, get)
+  }
+}
+const deprecatedProperty = (field, instead) => {
+  const code = `LRU_CACHE_PROPERTY_${field}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
+    warn(code, `${field} property`, `cache.${instead}`, get)
+  }
+}
+
+const emitWarning = (...a) => {
+  typeof process === 'object' &&
+  process &&
+  typeof process.emitWarning === 'function'
+    ? process.emitWarning(...a)
+    : console.error(...a)
+}
+
+const shouldWarn = code => !warned.has(code)
+
+const warn = (code, what, instead, fn) => {
+  warned.add(code)
+  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
+  emitWarning(msg, 'DeprecationWarning', code, fn)
+}
+
+const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
+
+/* istanbul ignore next - This is a little bit ridiculous, tbh.
+ * The maximum array length is 2^32-1 or thereabouts on most JS impls.
+ * And well before that point, you're caching the entire world, I mean,
+ * that's ~32GB of just integers for the next/prev links, plus whatever
+ * else to hold that many keys and values.  Just filling the memory with
+ * zeroes at init time is brutal when you get that big.
+ * But why not be complete?
+ * Maybe in the future, these limits will have expanded. */
+const getUintArray = max =>
+  !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+    ? Uint8Array
+    : max <= Math.pow(2, 16)
+    ? Uint16Array
+    : max <= Math.pow(2, 32)
+    ? Uint32Array
+    : max <= Number.MAX_SAFE_INTEGER
+    ? ZeroArray
+    : null
+
+class ZeroArray extends Array {
+  constructor(size) {
+    super(size)
+    this.fill(0)
+  }
+}
+
+class Stack {
+  constructor(max) {
+    if (max === 0) {
+      return []
+    }
+    const UintArray = getUintArray(max)
+    this.heap = new UintArray(max)
+    this.length = 0
+  }
+  push(n) {
+    this.heap[this.length++] = n
+  }
+  pop() {
+    return this.heap[--this.length]
+  }
+}
+
+class LRUCache {
+  constructor(options = {}) {
+    const {
+      max = 0,
+      ttl,
+      ttlResolution = 1,
+      ttlAutopurge,
+      updateAgeOnGet,
+      updateAgeOnHas,
+      allowStale,
+      dispose,
+      disposeAfter,
+      noDisposeOnSet,
+      noUpdateTTL,
+      maxSize = 0,
+      maxEntrySize = 0,
+      sizeCalculation,
+      fetchMethod,
+      fetchContext,
+      noDeleteOnFetchRejection,
+      noDeleteOnStaleGet,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+    } = options
+
+    // deprecated options, don't trigger a warning for getting them if
+    // the thing being passed in is another LRUCache we're copying.
+    const { length, maxAge, stale } =
+      options instanceof LRUCache ? {} : options
+
+    if (max !== 0 && !isPosInt(max)) {
+      throw new TypeError('max option must be a nonnegative integer')
+    }
+
+    const UintArray = max ? getUintArray(max) : Array
+    if (!UintArray) {
+      throw new Error('invalid max value: ' + max)
+    }
+
+    this.max = max
+    this.maxSize = maxSize
+    this.maxEntrySize = maxEntrySize || this.maxSize
+    this.sizeCalculation = sizeCalculation || length
+    if (this.sizeCalculation) {
+      if (!this.maxSize && !this.maxEntrySize) {
+        throw new TypeError(
+          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
+        )
+      }
+      if (typeof this.sizeCalculation !== 'function') {
+        throw new TypeError('sizeCalculation set to non-function')
+      }
+    }
+
+    this.fetchMethod = fetchMethod || null
+    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
+      throw new TypeError(
+        'fetchMethod must be a function if specified'
+      )
+    }
+
+    this.fetchContext = fetchContext
+    if (!this.fetchMethod && fetchContext !== undefined) {
+      throw new TypeError(
+        'cannot set fetchContext without fetchMethod'
+      )
+    }
+
+    this.keyMap = new Map()
+    this.keyList = new Array(max).fill(null)
+    this.valList = new Array(max).fill(null)
+    this.next = new UintArray(max)
+    this.prev = new UintArray(max)
+    this.head = 0
+    this.tail = 0
+    this.free = new Stack(max)
+    this.initialFill = 1
+    this.size = 0
+
+    if (typeof dispose === 'function') {
+      this.dispose = dispose
+    }
+    if (typeof disposeAfter === 'function') {
+      this.disposeAfter = disposeAfter
+      this.disposed = []
+    } else {
+      this.disposeAfter = null
+      this.disposed = null
+    }
+    this.noDisposeOnSet = !!noDisposeOnSet
+    this.noUpdateTTL = !!noUpdateTTL
+    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
+    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
+    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
+    this.ignoreFetchAbort = !!ignoreFetchAbort
+
+    // NB: maxEntrySize is set to maxSize if it's set
+    if (this.maxEntrySize !== 0) {
+      if (this.maxSize !== 0) {
+        if (!isPosInt(this.maxSize)) {
+          throw new TypeError(
+            'maxSize must be a positive integer if specified'
+          )
+        }
+      }
+      if (!isPosInt(this.maxEntrySize)) {
+        throw new TypeError(
+          'maxEntrySize must be a positive integer if specified'
+        )
+      }
+      this.initializeSizeTracking()
+    }
+
+    this.allowStale = !!allowStale || !!stale
+    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
+    this.updateAgeOnGet = !!updateAgeOnGet
+    this.updateAgeOnHas = !!updateAgeOnHas
+    this.ttlResolution =
+      isPosInt(ttlResolution) || ttlResolution === 0
+        ? ttlResolution
+        : 1
+    this.ttlAutopurge = !!ttlAutopurge
+    this.ttl = ttl || maxAge || 0
+    if (this.ttl) {
+      if (!isPosInt(this.ttl)) {
+        throw new TypeError(
+          'ttl must be a positive integer if specified'
+        )
+      }
+      this.initializeTTLTracking()
+    }
+
+    // do not allow completely unbounded caches
+    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
+      throw new TypeError(
+        'At least one of max, maxSize, or ttl is required'
+      )
+    }
+    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
+      const code = 'LRU_CACHE_UNBOUNDED'
+      if (shouldWarn(code)) {
+        warned.add(code)
+        const msg =
+          'TTL caching without ttlAutopurge, max, or maxSize can ' +
+          'result in unbounded memory consumption.'
+        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
+      }
+    }
+
+    if (stale) {
+      deprecatedOption('stale', 'allowStale')
+    }
+    if (maxAge) {
+      deprecatedOption('maxAge', 'ttl')
+    }
+    if (length) {
+      deprecatedOption('length', 'sizeCalculation')
+    }
+  }
+
+  getRemainingTTL(key) {
+    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
+  }
+
+  initializeTTLTracking() {
+    this.ttls = new ZeroArray(this.max)
+    this.starts = new ZeroArray(this.max)
+
+    this.setItemTTL = (index, ttl, start = perf.now()) => {
+      this.starts[index] = ttl !== 0 ? start : 0
+      this.ttls[index] = ttl
+      if (ttl !== 0 && this.ttlAutopurge) {
+        const t = setTimeout(() => {
+          if (this.isStale(index)) {
+            this.delete(this.keyList[index])
+          }
+        }, ttl + 1)
+        /* istanbul ignore else - unref() not supported on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+    }
+
+    this.updateItemAge = index => {
+      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
+    }
+
+    this.statusTTL = (status, index) => {
+      if (status) {
+        status.ttl = this.ttls[index]
+        status.start = this.starts[index]
+        status.now = cachedNow || getNow()
+        status.remainingTTL = status.now + status.ttl - status.start
+      }
+    }
+
+    // debounce calls to perf.now() to 1s so we're not hitting
+    // that costly call repeatedly.
+    let cachedNow = 0
+    const getNow = () => {
+      const n = perf.now()
+      if (this.ttlResolution > 0) {
+        cachedNow = n
+        const t = setTimeout(
+          () => (cachedNow = 0),
+          this.ttlResolution
+        )
+        /* istanbul ignore else - not available on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+      return n
+    }
+
+    this.getRemainingTTL = key => {
+      const index = this.keyMap.get(key)
+      if (index === undefined) {
+        return 0
+      }
+      return this.ttls[index] === 0 || this.starts[index] === 0
+        ? Infinity
+        : this.starts[index] +
+            this.ttls[index] -
+            (cachedNow || getNow())
+    }
+
+    this.isStale = index => {
+      return (
+        this.ttls[index] !== 0 &&
+        this.starts[index] !== 0 &&
+        (cachedNow || getNow()) - this.starts[index] >
+          this.ttls[index]
+      )
+    }
+  }
+  updateItemAge(_index) {}
+  statusTTL(_status, _index) {}
+  setItemTTL(_index, _ttl, _start) {}
+  isStale(_index) {
+    return false
+  }
+
+  initializeSizeTracking() {
+    this.calculatedSize = 0
+    this.sizes = new ZeroArray(this.max)
+    this.removeItemSize = index => {
+      this.calculatedSize -= this.sizes[index]
+      this.sizes[index] = 0
+    }
+    this.requireSize = (k, v, size, sizeCalculation) => {
+      // provisionally accept background fetches.
+      // actual value size will be checked when they return.
+      if (this.isBackgroundFetch(v)) {
+        return 0
+      }
+      if (!isPosInt(size)) {
+        if (sizeCalculation) {
+          if (typeof sizeCalculation !== 'function') {
+            throw new TypeError('sizeCalculation must be a function')
+          }
+          size = sizeCalculation(v, k)
+          if (!isPosInt(size)) {
+            throw new TypeError(
+              'sizeCalculation return invalid (expect positive integer)'
+            )
+          }
+        } else {
+          throw new TypeError(
+            'invalid size value (must be positive integer). ' +
+              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
+              'must be set.'
+          )
+        }
+      }
+      return size
+    }
+    this.addItemSize = (index, size, status) => {
+      this.sizes[index] = size
+      if (this.maxSize) {
+        const maxSize = this.maxSize - this.sizes[index]
+        while (this.calculatedSize > maxSize) {
+          this.evict(true)
+        }
+      }
+      this.calculatedSize += this.sizes[index]
+      if (status) {
+        status.entrySize = size
+        status.totalCalculatedSize = this.calculatedSize
+      }
+    }
+  }
+  removeItemSize(_index) {}
+  addItemSize(_index, _size) {}
+  requireSize(_k, _v, size, sizeCalculation) {
+    if (size || sizeCalculation) {
+      throw new TypeError(
+        'cannot set size without setting maxSize or maxEntrySize on cache'
+      )
+    }
+  }
+
+  *indexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.tail; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.head) {
+          break
+        } else {
+          i = this.prev[i]
+        }
+      }
+    }
+  }
+
+  *rindexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.head; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.tail) {
+          break
+        } else {
+          i = this.next[i]
+        }
+      }
+    }
+  }
+
+  isValidIndex(index) {
+    return (
+      index !== undefined &&
+      this.keyMap.get(this.keyList[index]) === index
+    )
+  }
+
+  *entries() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+  *rentries() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+
+  *keys() {
+    for (const i of this.indexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+  *rkeys() {
+    for (const i of this.rindexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+
+  *values() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+  *rvalues() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+
+  [Symbol.iterator]() {
+    return this.entries()
+  }
+
+  find(fn, getOptions) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      if (fn(value, this.keyList[i], this)) {
+        return this.get(this.keyList[i], getOptions)
+      }
+    }
+  }
+
+  forEach(fn, thisp = this) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  rforEach(fn, thisp = this) {
+    for (const i of this.rindexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  get prune() {
+    deprecatedMethod('prune', 'purgeStale')
+    return this.purgeStale
+  }
+
+  purgeStale() {
+    let deleted = false
+    for (const i of this.rindexes({ allowStale: true })) {
+      if (this.isStale(i)) {
+        this.delete(this.keyList[i])
+        deleted = true
+      }
+    }
+    return deleted
+  }
+
+  dump() {
+    const arr = []
+    for (const i of this.indexes({ allowStale: true })) {
+      const key = this.keyList[i]
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      const entry = { value }
+      if (this.ttls) {
+        entry.ttl = this.ttls[i]
+        // always dump the start relative to a portable timestamp
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = perf.now() - this.starts[i]
+        entry.start = Math.floor(Date.now() - age)
+      }
+      if (this.sizes) {
+        entry.size = this.sizes[i]
+      }
+      arr.unshift([key, entry])
+    }
+    return arr
+  }
+
+  load(arr) {
+    this.clear()
+    for (const [key, entry] of arr) {
+      if (entry.start) {
+        // entry.start is a portable timestamp, but we may be using
+        // node's performance.now(), so calculate the offset.
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = Date.now() - entry.start
+        entry.start = perf.now() - age
+      }
+      this.set(key, entry.value, entry)
+    }
+  }
+
+  dispose(_v, _k, _reason) {}
+
+  set(
+    k,
+    v,
+    {
+      ttl = this.ttl,
+      start,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      status,
+    } = {}
+  ) {
+    size = this.requireSize(k, v, size, sizeCalculation)
+    // if the item doesn't fit, don't do anything
+    // NB: maxEntrySize set to maxSize by default
+    if (this.maxEntrySize && size > this.maxEntrySize) {
+      if (status) {
+        status.set = 'miss'
+        status.maxEntrySizeExceeded = true
+      }
+      // have to delete, in case a background fetch is there already.
+      // in non-async cases, this is a no-op
+      this.delete(k)
+      return this
+    }
+    let index = this.size === 0 ? undefined : this.keyMap.get(k)
+    if (index === undefined) {
+      // addition
+      index = this.newIndex()
+      this.keyList[index] = k
+      this.valList[index] = v
+      this.keyMap.set(k, index)
+      this.next[this.tail] = index
+      this.prev[index] = this.tail
+      this.tail = index
+      this.size++
+      this.addItemSize(index, size, status)
+      if (status) {
+        status.set = 'add'
+      }
+      noUpdateTTL = false
+    } else {
+      // update
+      this.moveToTail(index)
+      const oldVal = this.valList[index]
+      if (v !== oldVal) {
+        if (this.isBackgroundFetch(oldVal)) {
+          oldVal.__abortController.abort(new Error('replaced'))
+        } else {
+          if (!noDisposeOnSet) {
+            this.dispose(oldVal, k, 'set')
+            if (this.disposeAfter) {
+              this.disposed.push([oldVal, k, 'set'])
+            }
+          }
+        }
+        this.removeItemSize(index)
+        this.valList[index] = v
+        this.addItemSize(index, size, status)
+        if (status) {
+          status.set = 'replace'
+          const oldValue =
+            oldVal && this.isBackgroundFetch(oldVal)
+              ? oldVal.__staleWhileFetching
+              : oldVal
+          if (oldValue !== undefined) status.oldValue = oldValue
+        }
+      } else if (status) {
+        status.set = 'update'
+      }
+    }
+    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
+      this.initializeTTLTracking()
+    }
+    if (!noUpdateTTL) {
+      this.setItemTTL(index, ttl, start)
+    }
+    this.statusTTL(status, index)
+    if (this.disposeAfter) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return this
+  }
+
+  newIndex() {
+    if (this.size === 0) {
+      return this.tail
+    }
+    if (this.size === this.max && this.max !== 0) {
+      return this.evict(false)
+    }
+    if (this.free.length !== 0) {
+      return this.free.pop()
+    }
+    // initial fill, just keep writing down the list
+    return this.initialFill++
+  }
+
+  pop() {
+    if (this.size) {
+      const val = this.valList[this.head]
+      this.evict(true)
+      return val
+    }
+  }
+
+  evict(free) {
+    const head = this.head
+    const k = this.keyList[head]
+    const v = this.valList[head]
+    if (this.isBackgroundFetch(v)) {
+      v.__abortController.abort(new Error('evicted'))
+    } else {
+      this.dispose(v, k, 'evict')
+      if (this.disposeAfter) {
+        this.disposed.push([v, k, 'evict'])
+      }
+    }
+    this.removeItemSize(head)
+    // if we aren't about to use the index, then null these out
+    if (free) {
+      this.keyList[head] = null
+      this.valList[head] = null
+      this.free.push(head)
+    }
+    this.head = this.next[head]
+    this.keyMap.delete(k)
+    this.size--
+    return head
+  }
+
+  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      if (!this.isStale(index)) {
+        if (updateAgeOnHas) {
+          this.updateItemAge(index)
+        }
+        if (status) status.has = 'hit'
+        this.statusTTL(status, index)
+        return true
+      } else if (status) {
+        status.has = 'stale'
+        this.statusTTL(status, index)
+      }
+    } else if (status) {
+      status.has = 'miss'
+    }
+    return false
+  }
+
+  // like get(), but without any LRU updating or TTL expiration
+  peek(k, { allowStale = this.allowStale } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined && (allowStale || !this.isStale(index))) {
+      const v = this.valList[index]
+      // either stale and allowed, or forcing a refresh of non-stale value
+      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
+    }
+  }
+
+  backgroundFetch(k, index, options, context) {
+    const v = index === undefined ? undefined : this.valList[index]
+    if (this.isBackgroundFetch(v)) {
+      return v
+    }
+    const ac = new AC()
+    if (options.signal) {
+      options.signal.addEventListener('abort', () =>
+        ac.abort(options.signal.reason)
+      )
+    }
+    const fetchOpts = {
+      signal: ac.signal,
+      options,
+      context,
+    }
+    const cb = (v, updateCache = false) => {
+      const { aborted } = ac.signal
+      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
+      if (options.status) {
+        if (aborted && !updateCache) {
+          options.status.fetchAborted = true
+          options.status.fetchError = ac.signal.reason
+          if (ignoreAbort) options.status.fetchAbortIgnored = true
+        } else {
+          options.status.fetchResolved = true
+        }
+      }
+      if (aborted && !ignoreAbort && !updateCache) {
+        return fetchFail(ac.signal.reason)
+      }
+      // either we didn't abort, and are still here, or we did, and ignored
+      if (this.valList[index] === p) {
+        if (v === undefined) {
+          if (p.__staleWhileFetching) {
+            this.valList[index] = p.__staleWhileFetching
+          } else {
+            this.delete(k)
+          }
+        } else {
+          if (options.status) options.status.fetchUpdated = true
+          this.set(k, v, fetchOpts.options)
+        }
+      }
+      return v
+    }
+    const eb = er => {
+      if (options.status) {
+        options.status.fetchRejected = true
+        options.status.fetchError = er
+      }
+      return fetchFail(er)
+    }
+    const fetchFail = er => {
+      const { aborted } = ac.signal
+      const allowStaleAborted =
+        aborted && options.allowStaleOnFetchAbort
+      const allowStale =
+        allowStaleAborted || options.allowStaleOnFetchRejection
+      const noDelete = allowStale || options.noDeleteOnFetchRejection
+      if (this.valList[index] === p) {
+        // if we allow stale on fetch rejections, then we need to ensure that
+        // the stale value is not removed from the cache when the fetch fails.
+        const del = !noDelete || p.__staleWhileFetching === undefined
+        if (del) {
+          this.delete(k)
+        } else if (!allowStaleAborted) {
+          // still replace the *promise* with the stale value,
+          // since we are done with the promise at this point.
+          // leave it untouched if we're still waiting for an
+          // aborted background fetch that hasn't yet returned.
+          this.valList[index] = p.__staleWhileFetching
+        }
+      }
+      if (allowStale) {
+        if (options.status && p.__staleWhileFetching !== undefined) {
+          options.status.returnedStale = true
+        }
+        return p.__staleWhileFetching
+      } else if (p.__returned === p) {
+        throw er
+      }
+    }
+    const pcall = (res, rej) => {
+      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
+      // ignored, we go until we finish, regardless.
+      // defer check until we are actually aborting,
+      // so fetchMethod can override.
+      ac.signal.addEventListener('abort', () => {
+        if (
+          !options.ignoreFetchAbort ||
+          options.allowStaleOnFetchAbort
+        ) {
+          res()
+          // when it eventually resolves, update the cache.
+          if (options.allowStaleOnFetchAbort) {
+            res = v => cb(v, true)
+          }
+        }
+      })
+    }
+    if (options.status) options.status.fetchDispatched = true
+    const p = new Promise(pcall).then(cb, eb)
+    p.__abortController = ac
+    p.__staleWhileFetching = v
+    p.__returned = null
+    if (index === undefined) {
+      // internal, don't expose status.
+      this.set(k, p, { ...fetchOpts.options, status: undefined })
+      index = this.keyMap.get(k)
+    } else {
+      this.valList[index] = p
+    }
+    return p
+  }
+
+  isBackgroundFetch(p) {
+    return (
+      p &&
+      typeof p === 'object' &&
+      typeof p.then === 'function' &&
+      Object.prototype.hasOwnProperty.call(
+        p,
+        '__staleWhileFetching'
+      ) &&
+      Object.prototype.hasOwnProperty.call(p, '__returned') &&
+      (p.__returned === p || p.__returned === null)
+    )
+  }
+
+  // this takes the union of get() and set() opts, because it does both
+  async fetch(
+    k,
+    {
+      // get options
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      // set options
+      ttl = this.ttl,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      // fetch exclusive options
+      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
+      ignoreFetchAbort = this.ignoreFetchAbort,
+      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
+      fetchContext = this.fetchContext,
+      forceRefresh = false,
+      status,
+      signal,
+    } = {}
+  ) {
+    if (!this.fetchMethod) {
+      if (status) status.fetch = 'get'
+      return this.get(k, {
+        allowStale,
+        updateAgeOnGet,
+        noDeleteOnStaleGet,
+        status,
+      })
+    }
+
+    const options = {
+      allowStale,
+      updateAgeOnGet,
+      noDeleteOnStaleGet,
+      ttl,
+      noDisposeOnSet,
+      size,
+      sizeCalculation,
+      noUpdateTTL,
+      noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+      status,
+      signal,
+    }
+
+    let index = this.keyMap.get(k)
+    if (index === undefined) {
+      if (status) status.fetch = 'miss'
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      return (p.__returned = p)
+    } else {
+      // in cache, maybe already fetching
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        const stale =
+          allowStale && v.__staleWhileFetching !== undefined
+        if (status) {
+          status.fetch = 'inflight'
+          if (stale) status.returnedStale = true
+        }
+        return stale ? v.__staleWhileFetching : (v.__returned = v)
+      }
+
+      // if we force a refresh, that means do NOT serve the cached value,
+      // unless we are already in the process of refreshing the cache.
+      const isStale = this.isStale(index)
+      if (!forceRefresh && !isStale) {
+        if (status) status.fetch = 'hit'
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        this.statusTTL(status, index)
+        return v
+      }
+
+      // ok, it is stale or a forced refresh, and not already fetching.
+      // refresh the cache.
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      const hasStale = p.__staleWhileFetching !== undefined
+      const staleVal = hasStale && allowStale
+      if (status) {
+        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
+        if (staleVal && isStale) status.returnedStale = true
+      }
+      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
+    }
+  }
+
+  get(
+    k,
+    {
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      status,
+    } = {}
+  ) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      const value = this.valList[index]
+      const fetching = this.isBackgroundFetch(value)
+      this.statusTTL(status, index)
+      if (this.isStale(index)) {
+        if (status) status.get = 'stale'
+        // delete only if not an in-flight background fetch
+        if (!fetching) {
+          if (!noDeleteOnStaleGet) {
+            this.delete(k)
+          }
+          if (status) status.returnedStale = allowStale
+          return allowStale ? value : undefined
+        } else {
+          if (status) {
+            status.returnedStale =
+              allowStale && value.__staleWhileFetching !== undefined
+          }
+          return allowStale ? value.__staleWhileFetching : undefined
+        }
+      } else {
+        if (status) status.get = 'hit'
+        // if we're currently fetching it, we don't actually have it yet
+        // it's not stale, which means this isn't a staleWhileRefetching.
+        // If it's not stale, and fetching, AND has a __staleWhileFetching
+        // value, then that means the user fetched with {forceRefresh:true},
+        // so it's safe to return that value.
+        if (fetching) {
+          return value.__staleWhileFetching
+        }
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        return value
+      }
+    } else if (status) {
+      status.get = 'miss'
+    }
+  }
+
+  connect(p, n) {
+    this.prev[n] = p
+    this.next[p] = n
+  }
+
+  moveToTail(index) {
+    // if tail already, nothing to do
+    // if head, move head to next[index]
+    // else
+    //   move next[prev[index]] to next[index] (head has no prev)
+    //   move prev[next[index]] to prev[index]
+    // prev[index] = tail
+    // next[tail] = index
+    // tail = index
+    if (index !== this.tail) {
+      if (index === this.head) {
+        this.head = this.next[index]
+      } else {
+        this.connect(this.prev[index], this.next[index])
+      }
+      this.connect(this.tail, index)
+      this.tail = index
+    }
+  }
+
+  get del() {
+    deprecatedMethod('del', 'delete')
+    return this.delete
+  }
+
+  delete(k) {
+    let deleted = false
+    if (this.size !== 0) {
+      const index = this.keyMap.get(k)
+      if (index !== undefined) {
+        deleted = true
+        if (this.size === 1) {
+          this.clear()
+        } else {
+          this.removeItemSize(index)
+          const v = this.valList[index]
+          if (this.isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('deleted'))
+          } else {
+            this.dispose(v, k, 'delete')
+            if (this.disposeAfter) {
+              this.disposed.push([v, k, 'delete'])
+            }
+          }
+          this.keyMap.delete(k)
+          this.keyList[index] = null
+          this.valList[index] = null
+          if (index === this.tail) {
+            this.tail = this.prev[index]
+          } else if (index === this.head) {
+            this.head = this.next[index]
+          } else {
+            this.next[this.prev[index]] = this.next[index]
+            this.prev[this.next[index]] = this.prev[index]
+          }
+          this.size--
+          this.free.push(index)
+        }
+      }
+    }
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return deleted
+  }
+
+  clear() {
+    for (const index of this.rindexes({ allowStale: true })) {
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        v.__abortController.abort(new Error('deleted'))
+      } else {
+        const k = this.keyList[index]
+        this.dispose(v, k, 'delete')
+        if (this.disposeAfter) {
+          this.disposed.push([v, k, 'delete'])
+        }
+      }
+    }
+
+    this.keyMap.clear()
+    this.valList.fill(null)
+    this.keyList.fill(null)
+    if (this.ttls) {
+      this.ttls.fill(0)
+      this.starts.fill(0)
+    }
+    if (this.sizes) {
+      this.sizes.fill(0)
+    }
+    this.head = 0
+    this.tail = 0
+    this.initialFill = 1
+    this.free.length = 0
+    this.calculatedSize = 0
+    this.size = 0
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+  }
+
+  get reset() {
+    deprecatedMethod('reset', 'clear')
+    return this.clear
+  }
+
+  get length() {
+    deprecatedProperty('length', 'size')
+    return this.size
+  }
+
+  static get AbortController() {
+    return AC
+  }
+  static get AbortSignal() {
+    return AS
+  }
+}
+
+module.exports = LRUCache
diff --git a/node_modules/node-gyp/node_modules/lru-cache/index.mjs b/node_modules/node-gyp/node_modules/lru-cache/index.mjs
new file mode 100644
index 0000000000000..4a0b4813ec515
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/lru-cache/index.mjs
@@ -0,0 +1,1227 @@
+const perf =
+  typeof performance === 'object' &&
+  performance &&
+  typeof performance.now === 'function'
+    ? performance
+    : Date
+
+const hasAbortController = typeof AbortController === 'function'
+
+// minimal backwards-compatibility polyfill
+// this doesn't have nearly all the checks and whatnot that
+// actual AbortController/Signal has, but it's enough for
+// our purposes, and if used properly, behaves the same.
+const AC = hasAbortController
+  ? AbortController
+  : class AbortController {
+      constructor() {
+        this.signal = new AS()
+      }
+      abort(reason = new Error('This operation was aborted')) {
+        this.signal.reason = this.signal.reason || reason
+        this.signal.aborted = true
+        this.signal.dispatchEvent({
+          type: 'abort',
+          target: this.signal,
+        })
+      }
+    }
+
+const hasAbortSignal = typeof AbortSignal === 'function'
+// Some polyfills put this on the AC class, not global
+const hasACAbortSignal = typeof AC.AbortSignal === 'function'
+const AS = hasAbortSignal
+  ? AbortSignal
+  : hasACAbortSignal
+  ? AC.AbortController
+  : class AbortSignal {
+      constructor() {
+        this.reason = undefined
+        this.aborted = false
+        this._listeners = []
+      }
+      dispatchEvent(e) {
+        if (e.type === 'abort') {
+          this.aborted = true
+          this.onabort(e)
+          this._listeners.forEach(f => f(e), this)
+        }
+      }
+      onabort() {}
+      addEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners.push(fn)
+        }
+      }
+      removeEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners = this._listeners.filter(f => f !== fn)
+        }
+      }
+    }
+
+const warned = new Set()
+const deprecatedOption = (opt, instead) => {
+  const code = `LRU_CACHE_OPTION_${opt}`
+  if (shouldWarn(code)) {
+    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
+  }
+}
+const deprecatedMethod = (method, instead) => {
+  const code = `LRU_CACHE_METHOD_${method}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
+    warn(code, `${method} method`, `cache.${instead}()`, get)
+  }
+}
+const deprecatedProperty = (field, instead) => {
+  const code = `LRU_CACHE_PROPERTY_${field}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
+    warn(code, `${field} property`, `cache.${instead}`, get)
+  }
+}
+
+const emitWarning = (...a) => {
+  typeof process === 'object' &&
+  process &&
+  typeof process.emitWarning === 'function'
+    ? process.emitWarning(...a)
+    : console.error(...a)
+}
+
+const shouldWarn = code => !warned.has(code)
+
+const warn = (code, what, instead, fn) => {
+  warned.add(code)
+  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
+  emitWarning(msg, 'DeprecationWarning', code, fn)
+}
+
+const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
+
+/* istanbul ignore next - This is a little bit ridiculous, tbh.
+ * The maximum array length is 2^32-1 or thereabouts on most JS impls.
+ * And well before that point, you're caching the entire world, I mean,
+ * that's ~32GB of just integers for the next/prev links, plus whatever
+ * else to hold that many keys and values.  Just filling the memory with
+ * zeroes at init time is brutal when you get that big.
+ * But why not be complete?
+ * Maybe in the future, these limits will have expanded. */
+const getUintArray = max =>
+  !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+    ? Uint8Array
+    : max <= Math.pow(2, 16)
+    ? Uint16Array
+    : max <= Math.pow(2, 32)
+    ? Uint32Array
+    : max <= Number.MAX_SAFE_INTEGER
+    ? ZeroArray
+    : null
+
+class ZeroArray extends Array {
+  constructor(size) {
+    super(size)
+    this.fill(0)
+  }
+}
+
+class Stack {
+  constructor(max) {
+    if (max === 0) {
+      return []
+    }
+    const UintArray = getUintArray(max)
+    this.heap = new UintArray(max)
+    this.length = 0
+  }
+  push(n) {
+    this.heap[this.length++] = n
+  }
+  pop() {
+    return this.heap[--this.length]
+  }
+}
+
+class LRUCache {
+  constructor(options = {}) {
+    const {
+      max = 0,
+      ttl,
+      ttlResolution = 1,
+      ttlAutopurge,
+      updateAgeOnGet,
+      updateAgeOnHas,
+      allowStale,
+      dispose,
+      disposeAfter,
+      noDisposeOnSet,
+      noUpdateTTL,
+      maxSize = 0,
+      maxEntrySize = 0,
+      sizeCalculation,
+      fetchMethod,
+      fetchContext,
+      noDeleteOnFetchRejection,
+      noDeleteOnStaleGet,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+    } = options
+
+    // deprecated options, don't trigger a warning for getting them if
+    // the thing being passed in is another LRUCache we're copying.
+    const { length, maxAge, stale } =
+      options instanceof LRUCache ? {} : options
+
+    if (max !== 0 && !isPosInt(max)) {
+      throw new TypeError('max option must be a nonnegative integer')
+    }
+
+    const UintArray = max ? getUintArray(max) : Array
+    if (!UintArray) {
+      throw new Error('invalid max value: ' + max)
+    }
+
+    this.max = max
+    this.maxSize = maxSize
+    this.maxEntrySize = maxEntrySize || this.maxSize
+    this.sizeCalculation = sizeCalculation || length
+    if (this.sizeCalculation) {
+      if (!this.maxSize && !this.maxEntrySize) {
+        throw new TypeError(
+          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
+        )
+      }
+      if (typeof this.sizeCalculation !== 'function') {
+        throw new TypeError('sizeCalculation set to non-function')
+      }
+    }
+
+    this.fetchMethod = fetchMethod || null
+    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
+      throw new TypeError(
+        'fetchMethod must be a function if specified'
+      )
+    }
+
+    this.fetchContext = fetchContext
+    if (!this.fetchMethod && fetchContext !== undefined) {
+      throw new TypeError(
+        'cannot set fetchContext without fetchMethod'
+      )
+    }
+
+    this.keyMap = new Map()
+    this.keyList = new Array(max).fill(null)
+    this.valList = new Array(max).fill(null)
+    this.next = new UintArray(max)
+    this.prev = new UintArray(max)
+    this.head = 0
+    this.tail = 0
+    this.free = new Stack(max)
+    this.initialFill = 1
+    this.size = 0
+
+    if (typeof dispose === 'function') {
+      this.dispose = dispose
+    }
+    if (typeof disposeAfter === 'function') {
+      this.disposeAfter = disposeAfter
+      this.disposed = []
+    } else {
+      this.disposeAfter = null
+      this.disposed = null
+    }
+    this.noDisposeOnSet = !!noDisposeOnSet
+    this.noUpdateTTL = !!noUpdateTTL
+    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
+    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
+    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
+    this.ignoreFetchAbort = !!ignoreFetchAbort
+
+    // NB: maxEntrySize is set to maxSize if it's set
+    if (this.maxEntrySize !== 0) {
+      if (this.maxSize !== 0) {
+        if (!isPosInt(this.maxSize)) {
+          throw new TypeError(
+            'maxSize must be a positive integer if specified'
+          )
+        }
+      }
+      if (!isPosInt(this.maxEntrySize)) {
+        throw new TypeError(
+          'maxEntrySize must be a positive integer if specified'
+        )
+      }
+      this.initializeSizeTracking()
+    }
+
+    this.allowStale = !!allowStale || !!stale
+    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
+    this.updateAgeOnGet = !!updateAgeOnGet
+    this.updateAgeOnHas = !!updateAgeOnHas
+    this.ttlResolution =
+      isPosInt(ttlResolution) || ttlResolution === 0
+        ? ttlResolution
+        : 1
+    this.ttlAutopurge = !!ttlAutopurge
+    this.ttl = ttl || maxAge || 0
+    if (this.ttl) {
+      if (!isPosInt(this.ttl)) {
+        throw new TypeError(
+          'ttl must be a positive integer if specified'
+        )
+      }
+      this.initializeTTLTracking()
+    }
+
+    // do not allow completely unbounded caches
+    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
+      throw new TypeError(
+        'At least one of max, maxSize, or ttl is required'
+      )
+    }
+    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
+      const code = 'LRU_CACHE_UNBOUNDED'
+      if (shouldWarn(code)) {
+        warned.add(code)
+        const msg =
+          'TTL caching without ttlAutopurge, max, or maxSize can ' +
+          'result in unbounded memory consumption.'
+        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
+      }
+    }
+
+    if (stale) {
+      deprecatedOption('stale', 'allowStale')
+    }
+    if (maxAge) {
+      deprecatedOption('maxAge', 'ttl')
+    }
+    if (length) {
+      deprecatedOption('length', 'sizeCalculation')
+    }
+  }
+
+  getRemainingTTL(key) {
+    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
+  }
+
+  initializeTTLTracking() {
+    this.ttls = new ZeroArray(this.max)
+    this.starts = new ZeroArray(this.max)
+
+    this.setItemTTL = (index, ttl, start = perf.now()) => {
+      this.starts[index] = ttl !== 0 ? start : 0
+      this.ttls[index] = ttl
+      if (ttl !== 0 && this.ttlAutopurge) {
+        const t = setTimeout(() => {
+          if (this.isStale(index)) {
+            this.delete(this.keyList[index])
+          }
+        }, ttl + 1)
+        /* istanbul ignore else - unref() not supported on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+    }
+
+    this.updateItemAge = index => {
+      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
+    }
+
+    this.statusTTL = (status, index) => {
+      if (status) {
+        status.ttl = this.ttls[index]
+        status.start = this.starts[index]
+        status.now = cachedNow || getNow()
+        status.remainingTTL = status.now + status.ttl - status.start
+      }
+    }
+
+    // debounce calls to perf.now() to 1s so we're not hitting
+    // that costly call repeatedly.
+    let cachedNow = 0
+    const getNow = () => {
+      const n = perf.now()
+      if (this.ttlResolution > 0) {
+        cachedNow = n
+        const t = setTimeout(
+          () => (cachedNow = 0),
+          this.ttlResolution
+        )
+        /* istanbul ignore else - not available on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+      return n
+    }
+
+    this.getRemainingTTL = key => {
+      const index = this.keyMap.get(key)
+      if (index === undefined) {
+        return 0
+      }
+      return this.ttls[index] === 0 || this.starts[index] === 0
+        ? Infinity
+        : this.starts[index] +
+            this.ttls[index] -
+            (cachedNow || getNow())
+    }
+
+    this.isStale = index => {
+      return (
+        this.ttls[index] !== 0 &&
+        this.starts[index] !== 0 &&
+        (cachedNow || getNow()) - this.starts[index] >
+          this.ttls[index]
+      )
+    }
+  }
+  updateItemAge(_index) {}
+  statusTTL(_status, _index) {}
+  setItemTTL(_index, _ttl, _start) {}
+  isStale(_index) {
+    return false
+  }
+
+  initializeSizeTracking() {
+    this.calculatedSize = 0
+    this.sizes = new ZeroArray(this.max)
+    this.removeItemSize = index => {
+      this.calculatedSize -= this.sizes[index]
+      this.sizes[index] = 0
+    }
+    this.requireSize = (k, v, size, sizeCalculation) => {
+      // provisionally accept background fetches.
+      // actual value size will be checked when they return.
+      if (this.isBackgroundFetch(v)) {
+        return 0
+      }
+      if (!isPosInt(size)) {
+        if (sizeCalculation) {
+          if (typeof sizeCalculation !== 'function') {
+            throw new TypeError('sizeCalculation must be a function')
+          }
+          size = sizeCalculation(v, k)
+          if (!isPosInt(size)) {
+            throw new TypeError(
+              'sizeCalculation return invalid (expect positive integer)'
+            )
+          }
+        } else {
+          throw new TypeError(
+            'invalid size value (must be positive integer). ' +
+              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
+              'must be set.'
+          )
+        }
+      }
+      return size
+    }
+    this.addItemSize = (index, size, status) => {
+      this.sizes[index] = size
+      if (this.maxSize) {
+        const maxSize = this.maxSize - this.sizes[index]
+        while (this.calculatedSize > maxSize) {
+          this.evict(true)
+        }
+      }
+      this.calculatedSize += this.sizes[index]
+      if (status) {
+        status.entrySize = size
+        status.totalCalculatedSize = this.calculatedSize
+      }
+    }
+  }
+  removeItemSize(_index) {}
+  addItemSize(_index, _size) {}
+  requireSize(_k, _v, size, sizeCalculation) {
+    if (size || sizeCalculation) {
+      throw new TypeError(
+        'cannot set size without setting maxSize or maxEntrySize on cache'
+      )
+    }
+  }
+
+  *indexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.tail; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.head) {
+          break
+        } else {
+          i = this.prev[i]
+        }
+      }
+    }
+  }
+
+  *rindexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.head; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.tail) {
+          break
+        } else {
+          i = this.next[i]
+        }
+      }
+    }
+  }
+
+  isValidIndex(index) {
+    return (
+      index !== undefined &&
+      this.keyMap.get(this.keyList[index]) === index
+    )
+  }
+
+  *entries() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+  *rentries() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+
+  *keys() {
+    for (const i of this.indexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+  *rkeys() {
+    for (const i of this.rindexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+
+  *values() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+  *rvalues() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+
+  [Symbol.iterator]() {
+    return this.entries()
+  }
+
+  find(fn, getOptions) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      if (fn(value, this.keyList[i], this)) {
+        return this.get(this.keyList[i], getOptions)
+      }
+    }
+  }
+
+  forEach(fn, thisp = this) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  rforEach(fn, thisp = this) {
+    for (const i of this.rindexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  get prune() {
+    deprecatedMethod('prune', 'purgeStale')
+    return this.purgeStale
+  }
+
+  purgeStale() {
+    let deleted = false
+    for (const i of this.rindexes({ allowStale: true })) {
+      if (this.isStale(i)) {
+        this.delete(this.keyList[i])
+        deleted = true
+      }
+    }
+    return deleted
+  }
+
+  dump() {
+    const arr = []
+    for (const i of this.indexes({ allowStale: true })) {
+      const key = this.keyList[i]
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      const entry = { value }
+      if (this.ttls) {
+        entry.ttl = this.ttls[i]
+        // always dump the start relative to a portable timestamp
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = perf.now() - this.starts[i]
+        entry.start = Math.floor(Date.now() - age)
+      }
+      if (this.sizes) {
+        entry.size = this.sizes[i]
+      }
+      arr.unshift([key, entry])
+    }
+    return arr
+  }
+
+  load(arr) {
+    this.clear()
+    for (const [key, entry] of arr) {
+      if (entry.start) {
+        // entry.start is a portable timestamp, but we may be using
+        // node's performance.now(), so calculate the offset.
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = Date.now() - entry.start
+        entry.start = perf.now() - age
+      }
+      this.set(key, entry.value, entry)
+    }
+  }
+
+  dispose(_v, _k, _reason) {}
+
+  set(
+    k,
+    v,
+    {
+      ttl = this.ttl,
+      start,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      status,
+    } = {}
+  ) {
+    size = this.requireSize(k, v, size, sizeCalculation)
+    // if the item doesn't fit, don't do anything
+    // NB: maxEntrySize set to maxSize by default
+    if (this.maxEntrySize && size > this.maxEntrySize) {
+      if (status) {
+        status.set = 'miss'
+        status.maxEntrySizeExceeded = true
+      }
+      // have to delete, in case a background fetch is there already.
+      // in non-async cases, this is a no-op
+      this.delete(k)
+      return this
+    }
+    let index = this.size === 0 ? undefined : this.keyMap.get(k)
+    if (index === undefined) {
+      // addition
+      index = this.newIndex()
+      this.keyList[index] = k
+      this.valList[index] = v
+      this.keyMap.set(k, index)
+      this.next[this.tail] = index
+      this.prev[index] = this.tail
+      this.tail = index
+      this.size++
+      this.addItemSize(index, size, status)
+      if (status) {
+        status.set = 'add'
+      }
+      noUpdateTTL = false
+    } else {
+      // update
+      this.moveToTail(index)
+      const oldVal = this.valList[index]
+      if (v !== oldVal) {
+        if (this.isBackgroundFetch(oldVal)) {
+          oldVal.__abortController.abort(new Error('replaced'))
+        } else {
+          if (!noDisposeOnSet) {
+            this.dispose(oldVal, k, 'set')
+            if (this.disposeAfter) {
+              this.disposed.push([oldVal, k, 'set'])
+            }
+          }
+        }
+        this.removeItemSize(index)
+        this.valList[index] = v
+        this.addItemSize(index, size, status)
+        if (status) {
+          status.set = 'replace'
+          const oldValue =
+            oldVal && this.isBackgroundFetch(oldVal)
+              ? oldVal.__staleWhileFetching
+              : oldVal
+          if (oldValue !== undefined) status.oldValue = oldValue
+        }
+      } else if (status) {
+        status.set = 'update'
+      }
+    }
+    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
+      this.initializeTTLTracking()
+    }
+    if (!noUpdateTTL) {
+      this.setItemTTL(index, ttl, start)
+    }
+    this.statusTTL(status, index)
+    if (this.disposeAfter) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return this
+  }
+
+  newIndex() {
+    if (this.size === 0) {
+      return this.tail
+    }
+    if (this.size === this.max && this.max !== 0) {
+      return this.evict(false)
+    }
+    if (this.free.length !== 0) {
+      return this.free.pop()
+    }
+    // initial fill, just keep writing down the list
+    return this.initialFill++
+  }
+
+  pop() {
+    if (this.size) {
+      const val = this.valList[this.head]
+      this.evict(true)
+      return val
+    }
+  }
+
+  evict(free) {
+    const head = this.head
+    const k = this.keyList[head]
+    const v = this.valList[head]
+    if (this.isBackgroundFetch(v)) {
+      v.__abortController.abort(new Error('evicted'))
+    } else {
+      this.dispose(v, k, 'evict')
+      if (this.disposeAfter) {
+        this.disposed.push([v, k, 'evict'])
+      }
+    }
+    this.removeItemSize(head)
+    // if we aren't about to use the index, then null these out
+    if (free) {
+      this.keyList[head] = null
+      this.valList[head] = null
+      this.free.push(head)
+    }
+    this.head = this.next[head]
+    this.keyMap.delete(k)
+    this.size--
+    return head
+  }
+
+  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      if (!this.isStale(index)) {
+        if (updateAgeOnHas) {
+          this.updateItemAge(index)
+        }
+        if (status) status.has = 'hit'
+        this.statusTTL(status, index)
+        return true
+      } else if (status) {
+        status.has = 'stale'
+        this.statusTTL(status, index)
+      }
+    } else if (status) {
+      status.has = 'miss'
+    }
+    return false
+  }
+
+  // like get(), but without any LRU updating or TTL expiration
+  peek(k, { allowStale = this.allowStale } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined && (allowStale || !this.isStale(index))) {
+      const v = this.valList[index]
+      // either stale and allowed, or forcing a refresh of non-stale value
+      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
+    }
+  }
+
+  backgroundFetch(k, index, options, context) {
+    const v = index === undefined ? undefined : this.valList[index]
+    if (this.isBackgroundFetch(v)) {
+      return v
+    }
+    const ac = new AC()
+    if (options.signal) {
+      options.signal.addEventListener('abort', () =>
+        ac.abort(options.signal.reason)
+      )
+    }
+    const fetchOpts = {
+      signal: ac.signal,
+      options,
+      context,
+    }
+    const cb = (v, updateCache = false) => {
+      const { aborted } = ac.signal
+      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
+      if (options.status) {
+        if (aborted && !updateCache) {
+          options.status.fetchAborted = true
+          options.status.fetchError = ac.signal.reason
+          if (ignoreAbort) options.status.fetchAbortIgnored = true
+        } else {
+          options.status.fetchResolved = true
+        }
+      }
+      if (aborted && !ignoreAbort && !updateCache) {
+        return fetchFail(ac.signal.reason)
+      }
+      // either we didn't abort, and are still here, or we did, and ignored
+      if (this.valList[index] === p) {
+        if (v === undefined) {
+          if (p.__staleWhileFetching) {
+            this.valList[index] = p.__staleWhileFetching
+          } else {
+            this.delete(k)
+          }
+        } else {
+          if (options.status) options.status.fetchUpdated = true
+          this.set(k, v, fetchOpts.options)
+        }
+      }
+      return v
+    }
+    const eb = er => {
+      if (options.status) {
+        options.status.fetchRejected = true
+        options.status.fetchError = er
+      }
+      return fetchFail(er)
+    }
+    const fetchFail = er => {
+      const { aborted } = ac.signal
+      const allowStaleAborted =
+        aborted && options.allowStaleOnFetchAbort
+      const allowStale =
+        allowStaleAborted || options.allowStaleOnFetchRejection
+      const noDelete = allowStale || options.noDeleteOnFetchRejection
+      if (this.valList[index] === p) {
+        // if we allow stale on fetch rejections, then we need to ensure that
+        // the stale value is not removed from the cache when the fetch fails.
+        const del = !noDelete || p.__staleWhileFetching === undefined
+        if (del) {
+          this.delete(k)
+        } else if (!allowStaleAborted) {
+          // still replace the *promise* with the stale value,
+          // since we are done with the promise at this point.
+          // leave it untouched if we're still waiting for an
+          // aborted background fetch that hasn't yet returned.
+          this.valList[index] = p.__staleWhileFetching
+        }
+      }
+      if (allowStale) {
+        if (options.status && p.__staleWhileFetching !== undefined) {
+          options.status.returnedStale = true
+        }
+        return p.__staleWhileFetching
+      } else if (p.__returned === p) {
+        throw er
+      }
+    }
+    const pcall = (res, rej) => {
+      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
+      // ignored, we go until we finish, regardless.
+      // defer check until we are actually aborting,
+      // so fetchMethod can override.
+      ac.signal.addEventListener('abort', () => {
+        if (
+          !options.ignoreFetchAbort ||
+          options.allowStaleOnFetchAbort
+        ) {
+          res()
+          // when it eventually resolves, update the cache.
+          if (options.allowStaleOnFetchAbort) {
+            res = v => cb(v, true)
+          }
+        }
+      })
+    }
+    if (options.status) options.status.fetchDispatched = true
+    const p = new Promise(pcall).then(cb, eb)
+    p.__abortController = ac
+    p.__staleWhileFetching = v
+    p.__returned = null
+    if (index === undefined) {
+      // internal, don't expose status.
+      this.set(k, p, { ...fetchOpts.options, status: undefined })
+      index = this.keyMap.get(k)
+    } else {
+      this.valList[index] = p
+    }
+    return p
+  }
+
+  isBackgroundFetch(p) {
+    return (
+      p &&
+      typeof p === 'object' &&
+      typeof p.then === 'function' &&
+      Object.prototype.hasOwnProperty.call(
+        p,
+        '__staleWhileFetching'
+      ) &&
+      Object.prototype.hasOwnProperty.call(p, '__returned') &&
+      (p.__returned === p || p.__returned === null)
+    )
+  }
+
+  // this takes the union of get() and set() opts, because it does both
+  async fetch(
+    k,
+    {
+      // get options
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      // set options
+      ttl = this.ttl,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      // fetch exclusive options
+      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
+      ignoreFetchAbort = this.ignoreFetchAbort,
+      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
+      fetchContext = this.fetchContext,
+      forceRefresh = false,
+      status,
+      signal,
+    } = {}
+  ) {
+    if (!this.fetchMethod) {
+      if (status) status.fetch = 'get'
+      return this.get(k, {
+        allowStale,
+        updateAgeOnGet,
+        noDeleteOnStaleGet,
+        status,
+      })
+    }
+
+    const options = {
+      allowStale,
+      updateAgeOnGet,
+      noDeleteOnStaleGet,
+      ttl,
+      noDisposeOnSet,
+      size,
+      sizeCalculation,
+      noUpdateTTL,
+      noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+      status,
+      signal,
+    }
+
+    let index = this.keyMap.get(k)
+    if (index === undefined) {
+      if (status) status.fetch = 'miss'
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      return (p.__returned = p)
+    } else {
+      // in cache, maybe already fetching
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        const stale =
+          allowStale && v.__staleWhileFetching !== undefined
+        if (status) {
+          status.fetch = 'inflight'
+          if (stale) status.returnedStale = true
+        }
+        return stale ? v.__staleWhileFetching : (v.__returned = v)
+      }
+
+      // if we force a refresh, that means do NOT serve the cached value,
+      // unless we are already in the process of refreshing the cache.
+      const isStale = this.isStale(index)
+      if (!forceRefresh && !isStale) {
+        if (status) status.fetch = 'hit'
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        this.statusTTL(status, index)
+        return v
+      }
+
+      // ok, it is stale or a forced refresh, and not already fetching.
+      // refresh the cache.
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      const hasStale = p.__staleWhileFetching !== undefined
+      const staleVal = hasStale && allowStale
+      if (status) {
+        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
+        if (staleVal && isStale) status.returnedStale = true
+      }
+      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
+    }
+  }
+
+  get(
+    k,
+    {
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      status,
+    } = {}
+  ) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      const value = this.valList[index]
+      const fetching = this.isBackgroundFetch(value)
+      this.statusTTL(status, index)
+      if (this.isStale(index)) {
+        if (status) status.get = 'stale'
+        // delete only if not an in-flight background fetch
+        if (!fetching) {
+          if (!noDeleteOnStaleGet) {
+            this.delete(k)
+          }
+          if (status) status.returnedStale = allowStale
+          return allowStale ? value : undefined
+        } else {
+          if (status) {
+            status.returnedStale =
+              allowStale && value.__staleWhileFetching !== undefined
+          }
+          return allowStale ? value.__staleWhileFetching : undefined
+        }
+      } else {
+        if (status) status.get = 'hit'
+        // if we're currently fetching it, we don't actually have it yet
+        // it's not stale, which means this isn't a staleWhileRefetching.
+        // If it's not stale, and fetching, AND has a __staleWhileFetching
+        // value, then that means the user fetched with {forceRefresh:true},
+        // so it's safe to return that value.
+        if (fetching) {
+          return value.__staleWhileFetching
+        }
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        return value
+      }
+    } else if (status) {
+      status.get = 'miss'
+    }
+  }
+
+  connect(p, n) {
+    this.prev[n] = p
+    this.next[p] = n
+  }
+
+  moveToTail(index) {
+    // if tail already, nothing to do
+    // if head, move head to next[index]
+    // else
+    //   move next[prev[index]] to next[index] (head has no prev)
+    //   move prev[next[index]] to prev[index]
+    // prev[index] = tail
+    // next[tail] = index
+    // tail = index
+    if (index !== this.tail) {
+      if (index === this.head) {
+        this.head = this.next[index]
+      } else {
+        this.connect(this.prev[index], this.next[index])
+      }
+      this.connect(this.tail, index)
+      this.tail = index
+    }
+  }
+
+  get del() {
+    deprecatedMethod('del', 'delete')
+    return this.delete
+  }
+
+  delete(k) {
+    let deleted = false
+    if (this.size !== 0) {
+      const index = this.keyMap.get(k)
+      if (index !== undefined) {
+        deleted = true
+        if (this.size === 1) {
+          this.clear()
+        } else {
+          this.removeItemSize(index)
+          const v = this.valList[index]
+          if (this.isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('deleted'))
+          } else {
+            this.dispose(v, k, 'delete')
+            if (this.disposeAfter) {
+              this.disposed.push([v, k, 'delete'])
+            }
+          }
+          this.keyMap.delete(k)
+          this.keyList[index] = null
+          this.valList[index] = null
+          if (index === this.tail) {
+            this.tail = this.prev[index]
+          } else if (index === this.head) {
+            this.head = this.next[index]
+          } else {
+            this.next[this.prev[index]] = this.next[index]
+            this.prev[this.next[index]] = this.prev[index]
+          }
+          this.size--
+          this.free.push(index)
+        }
+      }
+    }
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return deleted
+  }
+
+  clear() {
+    for (const index of this.rindexes({ allowStale: true })) {
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        v.__abortController.abort(new Error('deleted'))
+      } else {
+        const k = this.keyList[index]
+        this.dispose(v, k, 'delete')
+        if (this.disposeAfter) {
+          this.disposed.push([v, k, 'delete'])
+        }
+      }
+    }
+
+    this.keyMap.clear()
+    this.valList.fill(null)
+    this.keyList.fill(null)
+    if (this.ttls) {
+      this.ttls.fill(0)
+      this.starts.fill(0)
+    }
+    if (this.sizes) {
+      this.sizes.fill(0)
+    }
+    this.head = 0
+    this.tail = 0
+    this.initialFill = 1
+    this.free.length = 0
+    this.calculatedSize = 0
+    this.size = 0
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+  }
+
+  get reset() {
+    deprecatedMethod('reset', 'clear')
+    return this.clear
+  }
+
+  get length() {
+    deprecatedProperty('length', 'size')
+    return this.size
+  }
+
+  static get AbortController() {
+    return AC
+  }
+  static get AbortSignal() {
+    return AS
+  }
+}
+
+export default LRUCache
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/package.json b/node_modules/node-gyp/node_modules/lru-cache/package.json
similarity index 52%
rename from node_modules/@npmcli/git/node_modules/lru-cache/package.json
rename to node_modules/node-gyp/node_modules/lru-cache/package.json
index bae4a04839d1f..9684991727e7a 100644
--- a/node_modules/@npmcli/git/node_modules/lru-cache/package.json
+++ b/node_modules/node-gyp/node_modules/lru-cache/package.json
@@ -1,7 +1,7 @@
 {
   "name": "lru-cache",
   "description": "A cache object that deletes the least-recently-used items.",
-  "version": "10.0.1",
+  "version": "7.18.3",
   "author": "Isaac Z. Schlueter ",
   "keywords": [
     "mru",
@@ -11,74 +11,60 @@
   "sideEffects": false,
   "scripts": {
     "build": "npm run prepare",
-    "preprepare": "rm -rf dist",
-    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
-    "postprepare": "bash fixup.sh",
     "pretest": "npm run prepare",
     "presnap": "npm run prepare",
-    "test": "c8 tap",
-    "snap": "c8 tap",
+    "prepare": "node ./scripts/transpile-to-esm.js",
+    "size": "size-limit",
+    "test": "tap",
+    "snap": "tap",
     "preversion": "npm test",
     "postversion": "npm publish",
     "prepublishOnly": "git push origin --follow-tags",
     "format": "prettier --write .",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts",
-    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
-    "prebenchmark": "npm run prepare",
-    "benchmark": "make -C benchmark",
-    "preprofile": "npm run prepare",
-    "profile": "make -C benchmark profile"
+    "typedoc": "typedoc ./index.d.ts"
   },
-  "main": "./dist/cjs/index.js",
-  "module": "./dist/mjs/index.js",
+  "type": "commonjs",
+  "main": "./index.js",
+  "module": "./index.mjs",
+  "types": "./index.d.ts",
   "exports": {
-    "./min": {
-      "import": {
-        "types": "./dist/mjs/index.d.ts",
-        "default": "./dist/mjs/index.min.js"
-      },
-      "require": {
-        "types": "./dist/cjs/index.d.ts",
-        "default": "./dist/cjs/index.min.js"
-      }
-    },
     ".": {
       "import": {
-        "types": "./dist/mjs/index.d.ts",
-        "default": "./dist/mjs/index.js"
+        "types": "./index.d.ts",
+        "default": "./index.mjs"
       },
       "require": {
-        "types": "./dist/cjs/index.d.ts",
-        "default": "./dist/cjs/index.js"
+        "types": "./index.d.ts",
+        "default": "./index.js"
       }
-    }
+    },
+    "./package.json": "./package.json"
   },
   "repository": "git://github.com/isaacs/node-lru-cache.git",
   "devDependencies": {
     "@size-limit/preset-small-lib": "^7.0.8",
-    "@types/node": "^20.2.5",
+    "@types/node": "^17.0.31",
     "@types/tap": "^15.0.6",
     "benchmark": "^2.1.4",
     "c8": "^7.11.2",
     "clock-mock": "^1.0.6",
-    "esbuild": "^0.17.11",
     "eslint-config-prettier": "^8.5.0",
-    "marked": "^4.2.12",
-    "mkdirp": "^2.1.5",
     "prettier": "^2.6.2",
     "size-limit": "^7.0.8",
     "tap": "^16.3.4",
-    "ts-node": "^10.9.1",
+    "ts-node": "^10.7.0",
     "tslib": "^2.4.0",
-    "typedoc": "^0.24.6",
-    "typescript": "^5.0.4"
+    "typedoc": "^0.23.24",
+    "typescript": "^4.6.4"
   },
   "license": "ISC",
   "files": [
-    "dist"
+    "index.js",
+    "index.mjs",
+    "index.d.ts"
   ],
   "engines": {
-    "node": "14 || >=16.14"
+    "node": ">=12"
   },
   "prettier": {
     "semi": false,
@@ -92,17 +78,19 @@
     "endOfLine": "lf"
   },
   "tap": {
-    "coverage": false,
+    "nyc-arg": [
+      "--include=index.js"
+    ],
     "node-arg": [
       "--expose-gc",
-      "-r",
+      "--require",
       "ts-node/register"
     ],
     "ts": false
   },
   "size-limit": [
     {
-      "path": "./dist/mjs/index.js"
+      "path": "./index.js"
     }
   ]
 }
diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/LICENSE b/node_modules/normalize-package-data/node_modules/lru-cache/LICENSE
similarity index 100%
rename from node_modules/hosted-git-info/node_modules/lru-cache/LICENSE
rename to node_modules/normalize-package-data/node_modules/lru-cache/LICENSE
diff --git a/node_modules/normalize-package-data/node_modules/lru-cache/index.js b/node_modules/normalize-package-data/node_modules/lru-cache/index.js
new file mode 100644
index 0000000000000..48e99fe5e5a70
--- /dev/null
+++ b/node_modules/normalize-package-data/node_modules/lru-cache/index.js
@@ -0,0 +1,1227 @@
+const perf =
+  typeof performance === 'object' &&
+  performance &&
+  typeof performance.now === 'function'
+    ? performance
+    : Date
+
+const hasAbortController = typeof AbortController === 'function'
+
+// minimal backwards-compatibility polyfill
+// this doesn't have nearly all the checks and whatnot that
+// actual AbortController/Signal has, but it's enough for
+// our purposes, and if used properly, behaves the same.
+const AC = hasAbortController
+  ? AbortController
+  : class AbortController {
+      constructor() {
+        this.signal = new AS()
+      }
+      abort(reason = new Error('This operation was aborted')) {
+        this.signal.reason = this.signal.reason || reason
+        this.signal.aborted = true
+        this.signal.dispatchEvent({
+          type: 'abort',
+          target: this.signal,
+        })
+      }
+    }
+
+const hasAbortSignal = typeof AbortSignal === 'function'
+// Some polyfills put this on the AC class, not global
+const hasACAbortSignal = typeof AC.AbortSignal === 'function'
+const AS = hasAbortSignal
+  ? AbortSignal
+  : hasACAbortSignal
+  ? AC.AbortController
+  : class AbortSignal {
+      constructor() {
+        this.reason = undefined
+        this.aborted = false
+        this._listeners = []
+      }
+      dispatchEvent(e) {
+        if (e.type === 'abort') {
+          this.aborted = true
+          this.onabort(e)
+          this._listeners.forEach(f => f(e), this)
+        }
+      }
+      onabort() {}
+      addEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners.push(fn)
+        }
+      }
+      removeEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners = this._listeners.filter(f => f !== fn)
+        }
+      }
+    }
+
+const warned = new Set()
+const deprecatedOption = (opt, instead) => {
+  const code = `LRU_CACHE_OPTION_${opt}`
+  if (shouldWarn(code)) {
+    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
+  }
+}
+const deprecatedMethod = (method, instead) => {
+  const code = `LRU_CACHE_METHOD_${method}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
+    warn(code, `${method} method`, `cache.${instead}()`, get)
+  }
+}
+const deprecatedProperty = (field, instead) => {
+  const code = `LRU_CACHE_PROPERTY_${field}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
+    warn(code, `${field} property`, `cache.${instead}`, get)
+  }
+}
+
+const emitWarning = (...a) => {
+  typeof process === 'object' &&
+  process &&
+  typeof process.emitWarning === 'function'
+    ? process.emitWarning(...a)
+    : console.error(...a)
+}
+
+const shouldWarn = code => !warned.has(code)
+
+const warn = (code, what, instead, fn) => {
+  warned.add(code)
+  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
+  emitWarning(msg, 'DeprecationWarning', code, fn)
+}
+
+const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
+
+/* istanbul ignore next - This is a little bit ridiculous, tbh.
+ * The maximum array length is 2^32-1 or thereabouts on most JS impls.
+ * And well before that point, you're caching the entire world, I mean,
+ * that's ~32GB of just integers for the next/prev links, plus whatever
+ * else to hold that many keys and values.  Just filling the memory with
+ * zeroes at init time is brutal when you get that big.
+ * But why not be complete?
+ * Maybe in the future, these limits will have expanded. */
+const getUintArray = max =>
+  !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+    ? Uint8Array
+    : max <= Math.pow(2, 16)
+    ? Uint16Array
+    : max <= Math.pow(2, 32)
+    ? Uint32Array
+    : max <= Number.MAX_SAFE_INTEGER
+    ? ZeroArray
+    : null
+
+class ZeroArray extends Array {
+  constructor(size) {
+    super(size)
+    this.fill(0)
+  }
+}
+
+class Stack {
+  constructor(max) {
+    if (max === 0) {
+      return []
+    }
+    const UintArray = getUintArray(max)
+    this.heap = new UintArray(max)
+    this.length = 0
+  }
+  push(n) {
+    this.heap[this.length++] = n
+  }
+  pop() {
+    return this.heap[--this.length]
+  }
+}
+
+class LRUCache {
+  constructor(options = {}) {
+    const {
+      max = 0,
+      ttl,
+      ttlResolution = 1,
+      ttlAutopurge,
+      updateAgeOnGet,
+      updateAgeOnHas,
+      allowStale,
+      dispose,
+      disposeAfter,
+      noDisposeOnSet,
+      noUpdateTTL,
+      maxSize = 0,
+      maxEntrySize = 0,
+      sizeCalculation,
+      fetchMethod,
+      fetchContext,
+      noDeleteOnFetchRejection,
+      noDeleteOnStaleGet,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+    } = options
+
+    // deprecated options, don't trigger a warning for getting them if
+    // the thing being passed in is another LRUCache we're copying.
+    const { length, maxAge, stale } =
+      options instanceof LRUCache ? {} : options
+
+    if (max !== 0 && !isPosInt(max)) {
+      throw new TypeError('max option must be a nonnegative integer')
+    }
+
+    const UintArray = max ? getUintArray(max) : Array
+    if (!UintArray) {
+      throw new Error('invalid max value: ' + max)
+    }
+
+    this.max = max
+    this.maxSize = maxSize
+    this.maxEntrySize = maxEntrySize || this.maxSize
+    this.sizeCalculation = sizeCalculation || length
+    if (this.sizeCalculation) {
+      if (!this.maxSize && !this.maxEntrySize) {
+        throw new TypeError(
+          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
+        )
+      }
+      if (typeof this.sizeCalculation !== 'function') {
+        throw new TypeError('sizeCalculation set to non-function')
+      }
+    }
+
+    this.fetchMethod = fetchMethod || null
+    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
+      throw new TypeError(
+        'fetchMethod must be a function if specified'
+      )
+    }
+
+    this.fetchContext = fetchContext
+    if (!this.fetchMethod && fetchContext !== undefined) {
+      throw new TypeError(
+        'cannot set fetchContext without fetchMethod'
+      )
+    }
+
+    this.keyMap = new Map()
+    this.keyList = new Array(max).fill(null)
+    this.valList = new Array(max).fill(null)
+    this.next = new UintArray(max)
+    this.prev = new UintArray(max)
+    this.head = 0
+    this.tail = 0
+    this.free = new Stack(max)
+    this.initialFill = 1
+    this.size = 0
+
+    if (typeof dispose === 'function') {
+      this.dispose = dispose
+    }
+    if (typeof disposeAfter === 'function') {
+      this.disposeAfter = disposeAfter
+      this.disposed = []
+    } else {
+      this.disposeAfter = null
+      this.disposed = null
+    }
+    this.noDisposeOnSet = !!noDisposeOnSet
+    this.noUpdateTTL = !!noUpdateTTL
+    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
+    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
+    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
+    this.ignoreFetchAbort = !!ignoreFetchAbort
+
+    // NB: maxEntrySize is set to maxSize if it's set
+    if (this.maxEntrySize !== 0) {
+      if (this.maxSize !== 0) {
+        if (!isPosInt(this.maxSize)) {
+          throw new TypeError(
+            'maxSize must be a positive integer if specified'
+          )
+        }
+      }
+      if (!isPosInt(this.maxEntrySize)) {
+        throw new TypeError(
+          'maxEntrySize must be a positive integer if specified'
+        )
+      }
+      this.initializeSizeTracking()
+    }
+
+    this.allowStale = !!allowStale || !!stale
+    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
+    this.updateAgeOnGet = !!updateAgeOnGet
+    this.updateAgeOnHas = !!updateAgeOnHas
+    this.ttlResolution =
+      isPosInt(ttlResolution) || ttlResolution === 0
+        ? ttlResolution
+        : 1
+    this.ttlAutopurge = !!ttlAutopurge
+    this.ttl = ttl || maxAge || 0
+    if (this.ttl) {
+      if (!isPosInt(this.ttl)) {
+        throw new TypeError(
+          'ttl must be a positive integer if specified'
+        )
+      }
+      this.initializeTTLTracking()
+    }
+
+    // do not allow completely unbounded caches
+    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
+      throw new TypeError(
+        'At least one of max, maxSize, or ttl is required'
+      )
+    }
+    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
+      const code = 'LRU_CACHE_UNBOUNDED'
+      if (shouldWarn(code)) {
+        warned.add(code)
+        const msg =
+          'TTL caching without ttlAutopurge, max, or maxSize can ' +
+          'result in unbounded memory consumption.'
+        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
+      }
+    }
+
+    if (stale) {
+      deprecatedOption('stale', 'allowStale')
+    }
+    if (maxAge) {
+      deprecatedOption('maxAge', 'ttl')
+    }
+    if (length) {
+      deprecatedOption('length', 'sizeCalculation')
+    }
+  }
+
+  getRemainingTTL(key) {
+    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
+  }
+
+  initializeTTLTracking() {
+    this.ttls = new ZeroArray(this.max)
+    this.starts = new ZeroArray(this.max)
+
+    this.setItemTTL = (index, ttl, start = perf.now()) => {
+      this.starts[index] = ttl !== 0 ? start : 0
+      this.ttls[index] = ttl
+      if (ttl !== 0 && this.ttlAutopurge) {
+        const t = setTimeout(() => {
+          if (this.isStale(index)) {
+            this.delete(this.keyList[index])
+          }
+        }, ttl + 1)
+        /* istanbul ignore else - unref() not supported on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+    }
+
+    this.updateItemAge = index => {
+      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
+    }
+
+    this.statusTTL = (status, index) => {
+      if (status) {
+        status.ttl = this.ttls[index]
+        status.start = this.starts[index]
+        status.now = cachedNow || getNow()
+        status.remainingTTL = status.now + status.ttl - status.start
+      }
+    }
+
+    // debounce calls to perf.now() to 1s so we're not hitting
+    // that costly call repeatedly.
+    let cachedNow = 0
+    const getNow = () => {
+      const n = perf.now()
+      if (this.ttlResolution > 0) {
+        cachedNow = n
+        const t = setTimeout(
+          () => (cachedNow = 0),
+          this.ttlResolution
+        )
+        /* istanbul ignore else - not available on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+      return n
+    }
+
+    this.getRemainingTTL = key => {
+      const index = this.keyMap.get(key)
+      if (index === undefined) {
+        return 0
+      }
+      return this.ttls[index] === 0 || this.starts[index] === 0
+        ? Infinity
+        : this.starts[index] +
+            this.ttls[index] -
+            (cachedNow || getNow())
+    }
+
+    this.isStale = index => {
+      return (
+        this.ttls[index] !== 0 &&
+        this.starts[index] !== 0 &&
+        (cachedNow || getNow()) - this.starts[index] >
+          this.ttls[index]
+      )
+    }
+  }
+  updateItemAge(_index) {}
+  statusTTL(_status, _index) {}
+  setItemTTL(_index, _ttl, _start) {}
+  isStale(_index) {
+    return false
+  }
+
+  initializeSizeTracking() {
+    this.calculatedSize = 0
+    this.sizes = new ZeroArray(this.max)
+    this.removeItemSize = index => {
+      this.calculatedSize -= this.sizes[index]
+      this.sizes[index] = 0
+    }
+    this.requireSize = (k, v, size, sizeCalculation) => {
+      // provisionally accept background fetches.
+      // actual value size will be checked when they return.
+      if (this.isBackgroundFetch(v)) {
+        return 0
+      }
+      if (!isPosInt(size)) {
+        if (sizeCalculation) {
+          if (typeof sizeCalculation !== 'function') {
+            throw new TypeError('sizeCalculation must be a function')
+          }
+          size = sizeCalculation(v, k)
+          if (!isPosInt(size)) {
+            throw new TypeError(
+              'sizeCalculation return invalid (expect positive integer)'
+            )
+          }
+        } else {
+          throw new TypeError(
+            'invalid size value (must be positive integer). ' +
+              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
+              'must be set.'
+          )
+        }
+      }
+      return size
+    }
+    this.addItemSize = (index, size, status) => {
+      this.sizes[index] = size
+      if (this.maxSize) {
+        const maxSize = this.maxSize - this.sizes[index]
+        while (this.calculatedSize > maxSize) {
+          this.evict(true)
+        }
+      }
+      this.calculatedSize += this.sizes[index]
+      if (status) {
+        status.entrySize = size
+        status.totalCalculatedSize = this.calculatedSize
+      }
+    }
+  }
+  removeItemSize(_index) {}
+  addItemSize(_index, _size) {}
+  requireSize(_k, _v, size, sizeCalculation) {
+    if (size || sizeCalculation) {
+      throw new TypeError(
+        'cannot set size without setting maxSize or maxEntrySize on cache'
+      )
+    }
+  }
+
+  *indexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.tail; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.head) {
+          break
+        } else {
+          i = this.prev[i]
+        }
+      }
+    }
+  }
+
+  *rindexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.head; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.tail) {
+          break
+        } else {
+          i = this.next[i]
+        }
+      }
+    }
+  }
+
+  isValidIndex(index) {
+    return (
+      index !== undefined &&
+      this.keyMap.get(this.keyList[index]) === index
+    )
+  }
+
+  *entries() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+  *rentries() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+
+  *keys() {
+    for (const i of this.indexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+  *rkeys() {
+    for (const i of this.rindexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+
+  *values() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+  *rvalues() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+
+  [Symbol.iterator]() {
+    return this.entries()
+  }
+
+  find(fn, getOptions) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      if (fn(value, this.keyList[i], this)) {
+        return this.get(this.keyList[i], getOptions)
+      }
+    }
+  }
+
+  forEach(fn, thisp = this) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  rforEach(fn, thisp = this) {
+    for (const i of this.rindexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  get prune() {
+    deprecatedMethod('prune', 'purgeStale')
+    return this.purgeStale
+  }
+
+  purgeStale() {
+    let deleted = false
+    for (const i of this.rindexes({ allowStale: true })) {
+      if (this.isStale(i)) {
+        this.delete(this.keyList[i])
+        deleted = true
+      }
+    }
+    return deleted
+  }
+
+  dump() {
+    const arr = []
+    for (const i of this.indexes({ allowStale: true })) {
+      const key = this.keyList[i]
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      const entry = { value }
+      if (this.ttls) {
+        entry.ttl = this.ttls[i]
+        // always dump the start relative to a portable timestamp
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = perf.now() - this.starts[i]
+        entry.start = Math.floor(Date.now() - age)
+      }
+      if (this.sizes) {
+        entry.size = this.sizes[i]
+      }
+      arr.unshift([key, entry])
+    }
+    return arr
+  }
+
+  load(arr) {
+    this.clear()
+    for (const [key, entry] of arr) {
+      if (entry.start) {
+        // entry.start is a portable timestamp, but we may be using
+        // node's performance.now(), so calculate the offset.
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = Date.now() - entry.start
+        entry.start = perf.now() - age
+      }
+      this.set(key, entry.value, entry)
+    }
+  }
+
+  dispose(_v, _k, _reason) {}
+
+  set(
+    k,
+    v,
+    {
+      ttl = this.ttl,
+      start,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      status,
+    } = {}
+  ) {
+    size = this.requireSize(k, v, size, sizeCalculation)
+    // if the item doesn't fit, don't do anything
+    // NB: maxEntrySize set to maxSize by default
+    if (this.maxEntrySize && size > this.maxEntrySize) {
+      if (status) {
+        status.set = 'miss'
+        status.maxEntrySizeExceeded = true
+      }
+      // have to delete, in case a background fetch is there already.
+      // in non-async cases, this is a no-op
+      this.delete(k)
+      return this
+    }
+    let index = this.size === 0 ? undefined : this.keyMap.get(k)
+    if (index === undefined) {
+      // addition
+      index = this.newIndex()
+      this.keyList[index] = k
+      this.valList[index] = v
+      this.keyMap.set(k, index)
+      this.next[this.tail] = index
+      this.prev[index] = this.tail
+      this.tail = index
+      this.size++
+      this.addItemSize(index, size, status)
+      if (status) {
+        status.set = 'add'
+      }
+      noUpdateTTL = false
+    } else {
+      // update
+      this.moveToTail(index)
+      const oldVal = this.valList[index]
+      if (v !== oldVal) {
+        if (this.isBackgroundFetch(oldVal)) {
+          oldVal.__abortController.abort(new Error('replaced'))
+        } else {
+          if (!noDisposeOnSet) {
+            this.dispose(oldVal, k, 'set')
+            if (this.disposeAfter) {
+              this.disposed.push([oldVal, k, 'set'])
+            }
+          }
+        }
+        this.removeItemSize(index)
+        this.valList[index] = v
+        this.addItemSize(index, size, status)
+        if (status) {
+          status.set = 'replace'
+          const oldValue =
+            oldVal && this.isBackgroundFetch(oldVal)
+              ? oldVal.__staleWhileFetching
+              : oldVal
+          if (oldValue !== undefined) status.oldValue = oldValue
+        }
+      } else if (status) {
+        status.set = 'update'
+      }
+    }
+    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
+      this.initializeTTLTracking()
+    }
+    if (!noUpdateTTL) {
+      this.setItemTTL(index, ttl, start)
+    }
+    this.statusTTL(status, index)
+    if (this.disposeAfter) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return this
+  }
+
+  newIndex() {
+    if (this.size === 0) {
+      return this.tail
+    }
+    if (this.size === this.max && this.max !== 0) {
+      return this.evict(false)
+    }
+    if (this.free.length !== 0) {
+      return this.free.pop()
+    }
+    // initial fill, just keep writing down the list
+    return this.initialFill++
+  }
+
+  pop() {
+    if (this.size) {
+      const val = this.valList[this.head]
+      this.evict(true)
+      return val
+    }
+  }
+
+  evict(free) {
+    const head = this.head
+    const k = this.keyList[head]
+    const v = this.valList[head]
+    if (this.isBackgroundFetch(v)) {
+      v.__abortController.abort(new Error('evicted'))
+    } else {
+      this.dispose(v, k, 'evict')
+      if (this.disposeAfter) {
+        this.disposed.push([v, k, 'evict'])
+      }
+    }
+    this.removeItemSize(head)
+    // if we aren't about to use the index, then null these out
+    if (free) {
+      this.keyList[head] = null
+      this.valList[head] = null
+      this.free.push(head)
+    }
+    this.head = this.next[head]
+    this.keyMap.delete(k)
+    this.size--
+    return head
+  }
+
+  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      if (!this.isStale(index)) {
+        if (updateAgeOnHas) {
+          this.updateItemAge(index)
+        }
+        if (status) status.has = 'hit'
+        this.statusTTL(status, index)
+        return true
+      } else if (status) {
+        status.has = 'stale'
+        this.statusTTL(status, index)
+      }
+    } else if (status) {
+      status.has = 'miss'
+    }
+    return false
+  }
+
+  // like get(), but without any LRU updating or TTL expiration
+  peek(k, { allowStale = this.allowStale } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined && (allowStale || !this.isStale(index))) {
+      const v = this.valList[index]
+      // either stale and allowed, or forcing a refresh of non-stale value
+      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
+    }
+  }
+
+  backgroundFetch(k, index, options, context) {
+    const v = index === undefined ? undefined : this.valList[index]
+    if (this.isBackgroundFetch(v)) {
+      return v
+    }
+    const ac = new AC()
+    if (options.signal) {
+      options.signal.addEventListener('abort', () =>
+        ac.abort(options.signal.reason)
+      )
+    }
+    const fetchOpts = {
+      signal: ac.signal,
+      options,
+      context,
+    }
+    const cb = (v, updateCache = false) => {
+      const { aborted } = ac.signal
+      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
+      if (options.status) {
+        if (aborted && !updateCache) {
+          options.status.fetchAborted = true
+          options.status.fetchError = ac.signal.reason
+          if (ignoreAbort) options.status.fetchAbortIgnored = true
+        } else {
+          options.status.fetchResolved = true
+        }
+      }
+      if (aborted && !ignoreAbort && !updateCache) {
+        return fetchFail(ac.signal.reason)
+      }
+      // either we didn't abort, and are still here, or we did, and ignored
+      if (this.valList[index] === p) {
+        if (v === undefined) {
+          if (p.__staleWhileFetching) {
+            this.valList[index] = p.__staleWhileFetching
+          } else {
+            this.delete(k)
+          }
+        } else {
+          if (options.status) options.status.fetchUpdated = true
+          this.set(k, v, fetchOpts.options)
+        }
+      }
+      return v
+    }
+    const eb = er => {
+      if (options.status) {
+        options.status.fetchRejected = true
+        options.status.fetchError = er
+      }
+      return fetchFail(er)
+    }
+    const fetchFail = er => {
+      const { aborted } = ac.signal
+      const allowStaleAborted =
+        aborted && options.allowStaleOnFetchAbort
+      const allowStale =
+        allowStaleAborted || options.allowStaleOnFetchRejection
+      const noDelete = allowStale || options.noDeleteOnFetchRejection
+      if (this.valList[index] === p) {
+        // if we allow stale on fetch rejections, then we need to ensure that
+        // the stale value is not removed from the cache when the fetch fails.
+        const del = !noDelete || p.__staleWhileFetching === undefined
+        if (del) {
+          this.delete(k)
+        } else if (!allowStaleAborted) {
+          // still replace the *promise* with the stale value,
+          // since we are done with the promise at this point.
+          // leave it untouched if we're still waiting for an
+          // aborted background fetch that hasn't yet returned.
+          this.valList[index] = p.__staleWhileFetching
+        }
+      }
+      if (allowStale) {
+        if (options.status && p.__staleWhileFetching !== undefined) {
+          options.status.returnedStale = true
+        }
+        return p.__staleWhileFetching
+      } else if (p.__returned === p) {
+        throw er
+      }
+    }
+    const pcall = (res, rej) => {
+      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
+      // ignored, we go until we finish, regardless.
+      // defer check until we are actually aborting,
+      // so fetchMethod can override.
+      ac.signal.addEventListener('abort', () => {
+        if (
+          !options.ignoreFetchAbort ||
+          options.allowStaleOnFetchAbort
+        ) {
+          res()
+          // when it eventually resolves, update the cache.
+          if (options.allowStaleOnFetchAbort) {
+            res = v => cb(v, true)
+          }
+        }
+      })
+    }
+    if (options.status) options.status.fetchDispatched = true
+    const p = new Promise(pcall).then(cb, eb)
+    p.__abortController = ac
+    p.__staleWhileFetching = v
+    p.__returned = null
+    if (index === undefined) {
+      // internal, don't expose status.
+      this.set(k, p, { ...fetchOpts.options, status: undefined })
+      index = this.keyMap.get(k)
+    } else {
+      this.valList[index] = p
+    }
+    return p
+  }
+
+  isBackgroundFetch(p) {
+    return (
+      p &&
+      typeof p === 'object' &&
+      typeof p.then === 'function' &&
+      Object.prototype.hasOwnProperty.call(
+        p,
+        '__staleWhileFetching'
+      ) &&
+      Object.prototype.hasOwnProperty.call(p, '__returned') &&
+      (p.__returned === p || p.__returned === null)
+    )
+  }
+
+  // this takes the union of get() and set() opts, because it does both
+  async fetch(
+    k,
+    {
+      // get options
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      // set options
+      ttl = this.ttl,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      // fetch exclusive options
+      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
+      ignoreFetchAbort = this.ignoreFetchAbort,
+      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
+      fetchContext = this.fetchContext,
+      forceRefresh = false,
+      status,
+      signal,
+    } = {}
+  ) {
+    if (!this.fetchMethod) {
+      if (status) status.fetch = 'get'
+      return this.get(k, {
+        allowStale,
+        updateAgeOnGet,
+        noDeleteOnStaleGet,
+        status,
+      })
+    }
+
+    const options = {
+      allowStale,
+      updateAgeOnGet,
+      noDeleteOnStaleGet,
+      ttl,
+      noDisposeOnSet,
+      size,
+      sizeCalculation,
+      noUpdateTTL,
+      noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+      status,
+      signal,
+    }
+
+    let index = this.keyMap.get(k)
+    if (index === undefined) {
+      if (status) status.fetch = 'miss'
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      return (p.__returned = p)
+    } else {
+      // in cache, maybe already fetching
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        const stale =
+          allowStale && v.__staleWhileFetching !== undefined
+        if (status) {
+          status.fetch = 'inflight'
+          if (stale) status.returnedStale = true
+        }
+        return stale ? v.__staleWhileFetching : (v.__returned = v)
+      }
+
+      // if we force a refresh, that means do NOT serve the cached value,
+      // unless we are already in the process of refreshing the cache.
+      const isStale = this.isStale(index)
+      if (!forceRefresh && !isStale) {
+        if (status) status.fetch = 'hit'
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        this.statusTTL(status, index)
+        return v
+      }
+
+      // ok, it is stale or a forced refresh, and not already fetching.
+      // refresh the cache.
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      const hasStale = p.__staleWhileFetching !== undefined
+      const staleVal = hasStale && allowStale
+      if (status) {
+        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
+        if (staleVal && isStale) status.returnedStale = true
+      }
+      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
+    }
+  }
+
+  get(
+    k,
+    {
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      status,
+    } = {}
+  ) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      const value = this.valList[index]
+      const fetching = this.isBackgroundFetch(value)
+      this.statusTTL(status, index)
+      if (this.isStale(index)) {
+        if (status) status.get = 'stale'
+        // delete only if not an in-flight background fetch
+        if (!fetching) {
+          if (!noDeleteOnStaleGet) {
+            this.delete(k)
+          }
+          if (status) status.returnedStale = allowStale
+          return allowStale ? value : undefined
+        } else {
+          if (status) {
+            status.returnedStale =
+              allowStale && value.__staleWhileFetching !== undefined
+          }
+          return allowStale ? value.__staleWhileFetching : undefined
+        }
+      } else {
+        if (status) status.get = 'hit'
+        // if we're currently fetching it, we don't actually have it yet
+        // it's not stale, which means this isn't a staleWhileRefetching.
+        // If it's not stale, and fetching, AND has a __staleWhileFetching
+        // value, then that means the user fetched with {forceRefresh:true},
+        // so it's safe to return that value.
+        if (fetching) {
+          return value.__staleWhileFetching
+        }
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        return value
+      }
+    } else if (status) {
+      status.get = 'miss'
+    }
+  }
+
+  connect(p, n) {
+    this.prev[n] = p
+    this.next[p] = n
+  }
+
+  moveToTail(index) {
+    // if tail already, nothing to do
+    // if head, move head to next[index]
+    // else
+    //   move next[prev[index]] to next[index] (head has no prev)
+    //   move prev[next[index]] to prev[index]
+    // prev[index] = tail
+    // next[tail] = index
+    // tail = index
+    if (index !== this.tail) {
+      if (index === this.head) {
+        this.head = this.next[index]
+      } else {
+        this.connect(this.prev[index], this.next[index])
+      }
+      this.connect(this.tail, index)
+      this.tail = index
+    }
+  }
+
+  get del() {
+    deprecatedMethod('del', 'delete')
+    return this.delete
+  }
+
+  delete(k) {
+    let deleted = false
+    if (this.size !== 0) {
+      const index = this.keyMap.get(k)
+      if (index !== undefined) {
+        deleted = true
+        if (this.size === 1) {
+          this.clear()
+        } else {
+          this.removeItemSize(index)
+          const v = this.valList[index]
+          if (this.isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('deleted'))
+          } else {
+            this.dispose(v, k, 'delete')
+            if (this.disposeAfter) {
+              this.disposed.push([v, k, 'delete'])
+            }
+          }
+          this.keyMap.delete(k)
+          this.keyList[index] = null
+          this.valList[index] = null
+          if (index === this.tail) {
+            this.tail = this.prev[index]
+          } else if (index === this.head) {
+            this.head = this.next[index]
+          } else {
+            this.next[this.prev[index]] = this.next[index]
+            this.prev[this.next[index]] = this.prev[index]
+          }
+          this.size--
+          this.free.push(index)
+        }
+      }
+    }
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return deleted
+  }
+
+  clear() {
+    for (const index of this.rindexes({ allowStale: true })) {
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        v.__abortController.abort(new Error('deleted'))
+      } else {
+        const k = this.keyList[index]
+        this.dispose(v, k, 'delete')
+        if (this.disposeAfter) {
+          this.disposed.push([v, k, 'delete'])
+        }
+      }
+    }
+
+    this.keyMap.clear()
+    this.valList.fill(null)
+    this.keyList.fill(null)
+    if (this.ttls) {
+      this.ttls.fill(0)
+      this.starts.fill(0)
+    }
+    if (this.sizes) {
+      this.sizes.fill(0)
+    }
+    this.head = 0
+    this.tail = 0
+    this.initialFill = 1
+    this.free.length = 0
+    this.calculatedSize = 0
+    this.size = 0
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+  }
+
+  get reset() {
+    deprecatedMethod('reset', 'clear')
+    return this.clear
+  }
+
+  get length() {
+    deprecatedProperty('length', 'size')
+    return this.size
+  }
+
+  static get AbortController() {
+    return AC
+  }
+  static get AbortSignal() {
+    return AS
+  }
+}
+
+module.exports = LRUCache
diff --git a/node_modules/normalize-package-data/node_modules/lru-cache/index.mjs b/node_modules/normalize-package-data/node_modules/lru-cache/index.mjs
new file mode 100644
index 0000000000000..4a0b4813ec515
--- /dev/null
+++ b/node_modules/normalize-package-data/node_modules/lru-cache/index.mjs
@@ -0,0 +1,1227 @@
+const perf =
+  typeof performance === 'object' &&
+  performance &&
+  typeof performance.now === 'function'
+    ? performance
+    : Date
+
+const hasAbortController = typeof AbortController === 'function'
+
+// minimal backwards-compatibility polyfill
+// this doesn't have nearly all the checks and whatnot that
+// actual AbortController/Signal has, but it's enough for
+// our purposes, and if used properly, behaves the same.
+const AC = hasAbortController
+  ? AbortController
+  : class AbortController {
+      constructor() {
+        this.signal = new AS()
+      }
+      abort(reason = new Error('This operation was aborted')) {
+        this.signal.reason = this.signal.reason || reason
+        this.signal.aborted = true
+        this.signal.dispatchEvent({
+          type: 'abort',
+          target: this.signal,
+        })
+      }
+    }
+
+const hasAbortSignal = typeof AbortSignal === 'function'
+// Some polyfills put this on the AC class, not global
+const hasACAbortSignal = typeof AC.AbortSignal === 'function'
+const AS = hasAbortSignal
+  ? AbortSignal
+  : hasACAbortSignal
+  ? AC.AbortController
+  : class AbortSignal {
+      constructor() {
+        this.reason = undefined
+        this.aborted = false
+        this._listeners = []
+      }
+      dispatchEvent(e) {
+        if (e.type === 'abort') {
+          this.aborted = true
+          this.onabort(e)
+          this._listeners.forEach(f => f(e), this)
+        }
+      }
+      onabort() {}
+      addEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners.push(fn)
+        }
+      }
+      removeEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners = this._listeners.filter(f => f !== fn)
+        }
+      }
+    }
+
+const warned = new Set()
+const deprecatedOption = (opt, instead) => {
+  const code = `LRU_CACHE_OPTION_${opt}`
+  if (shouldWarn(code)) {
+    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
+  }
+}
+const deprecatedMethod = (method, instead) => {
+  const code = `LRU_CACHE_METHOD_${method}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
+    warn(code, `${method} method`, `cache.${instead}()`, get)
+  }
+}
+const deprecatedProperty = (field, instead) => {
+  const code = `LRU_CACHE_PROPERTY_${field}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
+    warn(code, `${field} property`, `cache.${instead}`, get)
+  }
+}
+
+const emitWarning = (...a) => {
+  typeof process === 'object' &&
+  process &&
+  typeof process.emitWarning === 'function'
+    ? process.emitWarning(...a)
+    : console.error(...a)
+}
+
+const shouldWarn = code => !warned.has(code)
+
+const warn = (code, what, instead, fn) => {
+  warned.add(code)
+  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
+  emitWarning(msg, 'DeprecationWarning', code, fn)
+}
+
+const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
+
+/* istanbul ignore next - This is a little bit ridiculous, tbh.
+ * The maximum array length is 2^32-1 or thereabouts on most JS impls.
+ * And well before that point, you're caching the entire world, I mean,
+ * that's ~32GB of just integers for the next/prev links, plus whatever
+ * else to hold that many keys and values.  Just filling the memory with
+ * zeroes at init time is brutal when you get that big.
+ * But why not be complete?
+ * Maybe in the future, these limits will have expanded. */
+const getUintArray = max =>
+  !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+    ? Uint8Array
+    : max <= Math.pow(2, 16)
+    ? Uint16Array
+    : max <= Math.pow(2, 32)
+    ? Uint32Array
+    : max <= Number.MAX_SAFE_INTEGER
+    ? ZeroArray
+    : null
+
+class ZeroArray extends Array {
+  constructor(size) {
+    super(size)
+    this.fill(0)
+  }
+}
+
+class Stack {
+  constructor(max) {
+    if (max === 0) {
+      return []
+    }
+    const UintArray = getUintArray(max)
+    this.heap = new UintArray(max)
+    this.length = 0
+  }
+  push(n) {
+    this.heap[this.length++] = n
+  }
+  pop() {
+    return this.heap[--this.length]
+  }
+}
+
+class LRUCache {
+  constructor(options = {}) {
+    const {
+      max = 0,
+      ttl,
+      ttlResolution = 1,
+      ttlAutopurge,
+      updateAgeOnGet,
+      updateAgeOnHas,
+      allowStale,
+      dispose,
+      disposeAfter,
+      noDisposeOnSet,
+      noUpdateTTL,
+      maxSize = 0,
+      maxEntrySize = 0,
+      sizeCalculation,
+      fetchMethod,
+      fetchContext,
+      noDeleteOnFetchRejection,
+      noDeleteOnStaleGet,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+    } = options
+
+    // deprecated options, don't trigger a warning for getting them if
+    // the thing being passed in is another LRUCache we're copying.
+    const { length, maxAge, stale } =
+      options instanceof LRUCache ? {} : options
+
+    if (max !== 0 && !isPosInt(max)) {
+      throw new TypeError('max option must be a nonnegative integer')
+    }
+
+    const UintArray = max ? getUintArray(max) : Array
+    if (!UintArray) {
+      throw new Error('invalid max value: ' + max)
+    }
+
+    this.max = max
+    this.maxSize = maxSize
+    this.maxEntrySize = maxEntrySize || this.maxSize
+    this.sizeCalculation = sizeCalculation || length
+    if (this.sizeCalculation) {
+      if (!this.maxSize && !this.maxEntrySize) {
+        throw new TypeError(
+          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
+        )
+      }
+      if (typeof this.sizeCalculation !== 'function') {
+        throw new TypeError('sizeCalculation set to non-function')
+      }
+    }
+
+    this.fetchMethod = fetchMethod || null
+    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
+      throw new TypeError(
+        'fetchMethod must be a function if specified'
+      )
+    }
+
+    this.fetchContext = fetchContext
+    if (!this.fetchMethod && fetchContext !== undefined) {
+      throw new TypeError(
+        'cannot set fetchContext without fetchMethod'
+      )
+    }
+
+    this.keyMap = new Map()
+    this.keyList = new Array(max).fill(null)
+    this.valList = new Array(max).fill(null)
+    this.next = new UintArray(max)
+    this.prev = new UintArray(max)
+    this.head = 0
+    this.tail = 0
+    this.free = new Stack(max)
+    this.initialFill = 1
+    this.size = 0
+
+    if (typeof dispose === 'function') {
+      this.dispose = dispose
+    }
+    if (typeof disposeAfter === 'function') {
+      this.disposeAfter = disposeAfter
+      this.disposed = []
+    } else {
+      this.disposeAfter = null
+      this.disposed = null
+    }
+    this.noDisposeOnSet = !!noDisposeOnSet
+    this.noUpdateTTL = !!noUpdateTTL
+    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
+    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
+    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
+    this.ignoreFetchAbort = !!ignoreFetchAbort
+
+    // NB: maxEntrySize is set to maxSize if it's set
+    if (this.maxEntrySize !== 0) {
+      if (this.maxSize !== 0) {
+        if (!isPosInt(this.maxSize)) {
+          throw new TypeError(
+            'maxSize must be a positive integer if specified'
+          )
+        }
+      }
+      if (!isPosInt(this.maxEntrySize)) {
+        throw new TypeError(
+          'maxEntrySize must be a positive integer if specified'
+        )
+      }
+      this.initializeSizeTracking()
+    }
+
+    this.allowStale = !!allowStale || !!stale
+    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
+    this.updateAgeOnGet = !!updateAgeOnGet
+    this.updateAgeOnHas = !!updateAgeOnHas
+    this.ttlResolution =
+      isPosInt(ttlResolution) || ttlResolution === 0
+        ? ttlResolution
+        : 1
+    this.ttlAutopurge = !!ttlAutopurge
+    this.ttl = ttl || maxAge || 0
+    if (this.ttl) {
+      if (!isPosInt(this.ttl)) {
+        throw new TypeError(
+          'ttl must be a positive integer if specified'
+        )
+      }
+      this.initializeTTLTracking()
+    }
+
+    // do not allow completely unbounded caches
+    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
+      throw new TypeError(
+        'At least one of max, maxSize, or ttl is required'
+      )
+    }
+    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
+      const code = 'LRU_CACHE_UNBOUNDED'
+      if (shouldWarn(code)) {
+        warned.add(code)
+        const msg =
+          'TTL caching without ttlAutopurge, max, or maxSize can ' +
+          'result in unbounded memory consumption.'
+        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
+      }
+    }
+
+    if (stale) {
+      deprecatedOption('stale', 'allowStale')
+    }
+    if (maxAge) {
+      deprecatedOption('maxAge', 'ttl')
+    }
+    if (length) {
+      deprecatedOption('length', 'sizeCalculation')
+    }
+  }
+
+  getRemainingTTL(key) {
+    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
+  }
+
+  initializeTTLTracking() {
+    this.ttls = new ZeroArray(this.max)
+    this.starts = new ZeroArray(this.max)
+
+    this.setItemTTL = (index, ttl, start = perf.now()) => {
+      this.starts[index] = ttl !== 0 ? start : 0
+      this.ttls[index] = ttl
+      if (ttl !== 0 && this.ttlAutopurge) {
+        const t = setTimeout(() => {
+          if (this.isStale(index)) {
+            this.delete(this.keyList[index])
+          }
+        }, ttl + 1)
+        /* istanbul ignore else - unref() not supported on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+    }
+
+    this.updateItemAge = index => {
+      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
+    }
+
+    this.statusTTL = (status, index) => {
+      if (status) {
+        status.ttl = this.ttls[index]
+        status.start = this.starts[index]
+        status.now = cachedNow || getNow()
+        status.remainingTTL = status.now + status.ttl - status.start
+      }
+    }
+
+    // debounce calls to perf.now() to 1s so we're not hitting
+    // that costly call repeatedly.
+    let cachedNow = 0
+    const getNow = () => {
+      const n = perf.now()
+      if (this.ttlResolution > 0) {
+        cachedNow = n
+        const t = setTimeout(
+          () => (cachedNow = 0),
+          this.ttlResolution
+        )
+        /* istanbul ignore else - not available on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+      return n
+    }
+
+    this.getRemainingTTL = key => {
+      const index = this.keyMap.get(key)
+      if (index === undefined) {
+        return 0
+      }
+      return this.ttls[index] === 0 || this.starts[index] === 0
+        ? Infinity
+        : this.starts[index] +
+            this.ttls[index] -
+            (cachedNow || getNow())
+    }
+
+    this.isStale = index => {
+      return (
+        this.ttls[index] !== 0 &&
+        this.starts[index] !== 0 &&
+        (cachedNow || getNow()) - this.starts[index] >
+          this.ttls[index]
+      )
+    }
+  }
+  updateItemAge(_index) {}
+  statusTTL(_status, _index) {}
+  setItemTTL(_index, _ttl, _start) {}
+  isStale(_index) {
+    return false
+  }
+
+  initializeSizeTracking() {
+    this.calculatedSize = 0
+    this.sizes = new ZeroArray(this.max)
+    this.removeItemSize = index => {
+      this.calculatedSize -= this.sizes[index]
+      this.sizes[index] = 0
+    }
+    this.requireSize = (k, v, size, sizeCalculation) => {
+      // provisionally accept background fetches.
+      // actual value size will be checked when they return.
+      if (this.isBackgroundFetch(v)) {
+        return 0
+      }
+      if (!isPosInt(size)) {
+        if (sizeCalculation) {
+          if (typeof sizeCalculation !== 'function') {
+            throw new TypeError('sizeCalculation must be a function')
+          }
+          size = sizeCalculation(v, k)
+          if (!isPosInt(size)) {
+            throw new TypeError(
+              'sizeCalculation return invalid (expect positive integer)'
+            )
+          }
+        } else {
+          throw new TypeError(
+            'invalid size value (must be positive integer). ' +
+              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
+              'must be set.'
+          )
+        }
+      }
+      return size
+    }
+    this.addItemSize = (index, size, status) => {
+      this.sizes[index] = size
+      if (this.maxSize) {
+        const maxSize = this.maxSize - this.sizes[index]
+        while (this.calculatedSize > maxSize) {
+          this.evict(true)
+        }
+      }
+      this.calculatedSize += this.sizes[index]
+      if (status) {
+        status.entrySize = size
+        status.totalCalculatedSize = this.calculatedSize
+      }
+    }
+  }
+  removeItemSize(_index) {}
+  addItemSize(_index, _size) {}
+  requireSize(_k, _v, size, sizeCalculation) {
+    if (size || sizeCalculation) {
+      throw new TypeError(
+        'cannot set size without setting maxSize or maxEntrySize on cache'
+      )
+    }
+  }
+
+  *indexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.tail; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.head) {
+          break
+        } else {
+          i = this.prev[i]
+        }
+      }
+    }
+  }
+
+  *rindexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.head; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.tail) {
+          break
+        } else {
+          i = this.next[i]
+        }
+      }
+    }
+  }
+
+  isValidIndex(index) {
+    return (
+      index !== undefined &&
+      this.keyMap.get(this.keyList[index]) === index
+    )
+  }
+
+  *entries() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+  *rentries() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+
+  *keys() {
+    for (const i of this.indexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+  *rkeys() {
+    for (const i of this.rindexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+
+  *values() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+  *rvalues() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+
+  [Symbol.iterator]() {
+    return this.entries()
+  }
+
+  find(fn, getOptions) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      if (fn(value, this.keyList[i], this)) {
+        return this.get(this.keyList[i], getOptions)
+      }
+    }
+  }
+
+  forEach(fn, thisp = this) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  rforEach(fn, thisp = this) {
+    for (const i of this.rindexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  get prune() {
+    deprecatedMethod('prune', 'purgeStale')
+    return this.purgeStale
+  }
+
+  purgeStale() {
+    let deleted = false
+    for (const i of this.rindexes({ allowStale: true })) {
+      if (this.isStale(i)) {
+        this.delete(this.keyList[i])
+        deleted = true
+      }
+    }
+    return deleted
+  }
+
+  dump() {
+    const arr = []
+    for (const i of this.indexes({ allowStale: true })) {
+      const key = this.keyList[i]
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      const entry = { value }
+      if (this.ttls) {
+        entry.ttl = this.ttls[i]
+        // always dump the start relative to a portable timestamp
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = perf.now() - this.starts[i]
+        entry.start = Math.floor(Date.now() - age)
+      }
+      if (this.sizes) {
+        entry.size = this.sizes[i]
+      }
+      arr.unshift([key, entry])
+    }
+    return arr
+  }
+
+  load(arr) {
+    this.clear()
+    for (const [key, entry] of arr) {
+      if (entry.start) {
+        // entry.start is a portable timestamp, but we may be using
+        // node's performance.now(), so calculate the offset.
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = Date.now() - entry.start
+        entry.start = perf.now() - age
+      }
+      this.set(key, entry.value, entry)
+    }
+  }
+
+  dispose(_v, _k, _reason) {}
+
+  set(
+    k,
+    v,
+    {
+      ttl = this.ttl,
+      start,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      status,
+    } = {}
+  ) {
+    size = this.requireSize(k, v, size, sizeCalculation)
+    // if the item doesn't fit, don't do anything
+    // NB: maxEntrySize set to maxSize by default
+    if (this.maxEntrySize && size > this.maxEntrySize) {
+      if (status) {
+        status.set = 'miss'
+        status.maxEntrySizeExceeded = true
+      }
+      // have to delete, in case a background fetch is there already.
+      // in non-async cases, this is a no-op
+      this.delete(k)
+      return this
+    }
+    let index = this.size === 0 ? undefined : this.keyMap.get(k)
+    if (index === undefined) {
+      // addition
+      index = this.newIndex()
+      this.keyList[index] = k
+      this.valList[index] = v
+      this.keyMap.set(k, index)
+      this.next[this.tail] = index
+      this.prev[index] = this.tail
+      this.tail = index
+      this.size++
+      this.addItemSize(index, size, status)
+      if (status) {
+        status.set = 'add'
+      }
+      noUpdateTTL = false
+    } else {
+      // update
+      this.moveToTail(index)
+      const oldVal = this.valList[index]
+      if (v !== oldVal) {
+        if (this.isBackgroundFetch(oldVal)) {
+          oldVal.__abortController.abort(new Error('replaced'))
+        } else {
+          if (!noDisposeOnSet) {
+            this.dispose(oldVal, k, 'set')
+            if (this.disposeAfter) {
+              this.disposed.push([oldVal, k, 'set'])
+            }
+          }
+        }
+        this.removeItemSize(index)
+        this.valList[index] = v
+        this.addItemSize(index, size, status)
+        if (status) {
+          status.set = 'replace'
+          const oldValue =
+            oldVal && this.isBackgroundFetch(oldVal)
+              ? oldVal.__staleWhileFetching
+              : oldVal
+          if (oldValue !== undefined) status.oldValue = oldValue
+        }
+      } else if (status) {
+        status.set = 'update'
+      }
+    }
+    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
+      this.initializeTTLTracking()
+    }
+    if (!noUpdateTTL) {
+      this.setItemTTL(index, ttl, start)
+    }
+    this.statusTTL(status, index)
+    if (this.disposeAfter) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return this
+  }
+
+  newIndex() {
+    if (this.size === 0) {
+      return this.tail
+    }
+    if (this.size === this.max && this.max !== 0) {
+      return this.evict(false)
+    }
+    if (this.free.length !== 0) {
+      return this.free.pop()
+    }
+    // initial fill, just keep writing down the list
+    return this.initialFill++
+  }
+
+  pop() {
+    if (this.size) {
+      const val = this.valList[this.head]
+      this.evict(true)
+      return val
+    }
+  }
+
+  evict(free) {
+    const head = this.head
+    const k = this.keyList[head]
+    const v = this.valList[head]
+    if (this.isBackgroundFetch(v)) {
+      v.__abortController.abort(new Error('evicted'))
+    } else {
+      this.dispose(v, k, 'evict')
+      if (this.disposeAfter) {
+        this.disposed.push([v, k, 'evict'])
+      }
+    }
+    this.removeItemSize(head)
+    // if we aren't about to use the index, then null these out
+    if (free) {
+      this.keyList[head] = null
+      this.valList[head] = null
+      this.free.push(head)
+    }
+    this.head = this.next[head]
+    this.keyMap.delete(k)
+    this.size--
+    return head
+  }
+
+  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      if (!this.isStale(index)) {
+        if (updateAgeOnHas) {
+          this.updateItemAge(index)
+        }
+        if (status) status.has = 'hit'
+        this.statusTTL(status, index)
+        return true
+      } else if (status) {
+        status.has = 'stale'
+        this.statusTTL(status, index)
+      }
+    } else if (status) {
+      status.has = 'miss'
+    }
+    return false
+  }
+
+  // like get(), but without any LRU updating or TTL expiration
+  peek(k, { allowStale = this.allowStale } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined && (allowStale || !this.isStale(index))) {
+      const v = this.valList[index]
+      // either stale and allowed, or forcing a refresh of non-stale value
+      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
+    }
+  }
+
+  backgroundFetch(k, index, options, context) {
+    const v = index === undefined ? undefined : this.valList[index]
+    if (this.isBackgroundFetch(v)) {
+      return v
+    }
+    const ac = new AC()
+    if (options.signal) {
+      options.signal.addEventListener('abort', () =>
+        ac.abort(options.signal.reason)
+      )
+    }
+    const fetchOpts = {
+      signal: ac.signal,
+      options,
+      context,
+    }
+    const cb = (v, updateCache = false) => {
+      const { aborted } = ac.signal
+      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
+      if (options.status) {
+        if (aborted && !updateCache) {
+          options.status.fetchAborted = true
+          options.status.fetchError = ac.signal.reason
+          if (ignoreAbort) options.status.fetchAbortIgnored = true
+        } else {
+          options.status.fetchResolved = true
+        }
+      }
+      if (aborted && !ignoreAbort && !updateCache) {
+        return fetchFail(ac.signal.reason)
+      }
+      // either we didn't abort, and are still here, or we did, and ignored
+      if (this.valList[index] === p) {
+        if (v === undefined) {
+          if (p.__staleWhileFetching) {
+            this.valList[index] = p.__staleWhileFetching
+          } else {
+            this.delete(k)
+          }
+        } else {
+          if (options.status) options.status.fetchUpdated = true
+          this.set(k, v, fetchOpts.options)
+        }
+      }
+      return v
+    }
+    const eb = er => {
+      if (options.status) {
+        options.status.fetchRejected = true
+        options.status.fetchError = er
+      }
+      return fetchFail(er)
+    }
+    const fetchFail = er => {
+      const { aborted } = ac.signal
+      const allowStaleAborted =
+        aborted && options.allowStaleOnFetchAbort
+      const allowStale =
+        allowStaleAborted || options.allowStaleOnFetchRejection
+      const noDelete = allowStale || options.noDeleteOnFetchRejection
+      if (this.valList[index] === p) {
+        // if we allow stale on fetch rejections, then we need to ensure that
+        // the stale value is not removed from the cache when the fetch fails.
+        const del = !noDelete || p.__staleWhileFetching === undefined
+        if (del) {
+          this.delete(k)
+        } else if (!allowStaleAborted) {
+          // still replace the *promise* with the stale value,
+          // since we are done with the promise at this point.
+          // leave it untouched if we're still waiting for an
+          // aborted background fetch that hasn't yet returned.
+          this.valList[index] = p.__staleWhileFetching
+        }
+      }
+      if (allowStale) {
+        if (options.status && p.__staleWhileFetching !== undefined) {
+          options.status.returnedStale = true
+        }
+        return p.__staleWhileFetching
+      } else if (p.__returned === p) {
+        throw er
+      }
+    }
+    const pcall = (res, rej) => {
+      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
+      // ignored, we go until we finish, regardless.
+      // defer check until we are actually aborting,
+      // so fetchMethod can override.
+      ac.signal.addEventListener('abort', () => {
+        if (
+          !options.ignoreFetchAbort ||
+          options.allowStaleOnFetchAbort
+        ) {
+          res()
+          // when it eventually resolves, update the cache.
+          if (options.allowStaleOnFetchAbort) {
+            res = v => cb(v, true)
+          }
+        }
+      })
+    }
+    if (options.status) options.status.fetchDispatched = true
+    const p = new Promise(pcall).then(cb, eb)
+    p.__abortController = ac
+    p.__staleWhileFetching = v
+    p.__returned = null
+    if (index === undefined) {
+      // internal, don't expose status.
+      this.set(k, p, { ...fetchOpts.options, status: undefined })
+      index = this.keyMap.get(k)
+    } else {
+      this.valList[index] = p
+    }
+    return p
+  }
+
+  isBackgroundFetch(p) {
+    return (
+      p &&
+      typeof p === 'object' &&
+      typeof p.then === 'function' &&
+      Object.prototype.hasOwnProperty.call(
+        p,
+        '__staleWhileFetching'
+      ) &&
+      Object.prototype.hasOwnProperty.call(p, '__returned') &&
+      (p.__returned === p || p.__returned === null)
+    )
+  }
+
+  // this takes the union of get() and set() opts, because it does both
+  async fetch(
+    k,
+    {
+      // get options
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      // set options
+      ttl = this.ttl,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      // fetch exclusive options
+      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
+      ignoreFetchAbort = this.ignoreFetchAbort,
+      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
+      fetchContext = this.fetchContext,
+      forceRefresh = false,
+      status,
+      signal,
+    } = {}
+  ) {
+    if (!this.fetchMethod) {
+      if (status) status.fetch = 'get'
+      return this.get(k, {
+        allowStale,
+        updateAgeOnGet,
+        noDeleteOnStaleGet,
+        status,
+      })
+    }
+
+    const options = {
+      allowStale,
+      updateAgeOnGet,
+      noDeleteOnStaleGet,
+      ttl,
+      noDisposeOnSet,
+      size,
+      sizeCalculation,
+      noUpdateTTL,
+      noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+      status,
+      signal,
+    }
+
+    let index = this.keyMap.get(k)
+    if (index === undefined) {
+      if (status) status.fetch = 'miss'
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      return (p.__returned = p)
+    } else {
+      // in cache, maybe already fetching
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        const stale =
+          allowStale && v.__staleWhileFetching !== undefined
+        if (status) {
+          status.fetch = 'inflight'
+          if (stale) status.returnedStale = true
+        }
+        return stale ? v.__staleWhileFetching : (v.__returned = v)
+      }
+
+      // if we force a refresh, that means do NOT serve the cached value,
+      // unless we are already in the process of refreshing the cache.
+      const isStale = this.isStale(index)
+      if (!forceRefresh && !isStale) {
+        if (status) status.fetch = 'hit'
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        this.statusTTL(status, index)
+        return v
+      }
+
+      // ok, it is stale or a forced refresh, and not already fetching.
+      // refresh the cache.
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      const hasStale = p.__staleWhileFetching !== undefined
+      const staleVal = hasStale && allowStale
+      if (status) {
+        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
+        if (staleVal && isStale) status.returnedStale = true
+      }
+      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
+    }
+  }
+
+  get(
+    k,
+    {
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      status,
+    } = {}
+  ) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      const value = this.valList[index]
+      const fetching = this.isBackgroundFetch(value)
+      this.statusTTL(status, index)
+      if (this.isStale(index)) {
+        if (status) status.get = 'stale'
+        // delete only if not an in-flight background fetch
+        if (!fetching) {
+          if (!noDeleteOnStaleGet) {
+            this.delete(k)
+          }
+          if (status) status.returnedStale = allowStale
+          return allowStale ? value : undefined
+        } else {
+          if (status) {
+            status.returnedStale =
+              allowStale && value.__staleWhileFetching !== undefined
+          }
+          return allowStale ? value.__staleWhileFetching : undefined
+        }
+      } else {
+        if (status) status.get = 'hit'
+        // if we're currently fetching it, we don't actually have it yet
+        // it's not stale, which means this isn't a staleWhileRefetching.
+        // If it's not stale, and fetching, AND has a __staleWhileFetching
+        // value, then that means the user fetched with {forceRefresh:true},
+        // so it's safe to return that value.
+        if (fetching) {
+          return value.__staleWhileFetching
+        }
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        return value
+      }
+    } else if (status) {
+      status.get = 'miss'
+    }
+  }
+
+  connect(p, n) {
+    this.prev[n] = p
+    this.next[p] = n
+  }
+
+  moveToTail(index) {
+    // if tail already, nothing to do
+    // if head, move head to next[index]
+    // else
+    //   move next[prev[index]] to next[index] (head has no prev)
+    //   move prev[next[index]] to prev[index]
+    // prev[index] = tail
+    // next[tail] = index
+    // tail = index
+    if (index !== this.tail) {
+      if (index === this.head) {
+        this.head = this.next[index]
+      } else {
+        this.connect(this.prev[index], this.next[index])
+      }
+      this.connect(this.tail, index)
+      this.tail = index
+    }
+  }
+
+  get del() {
+    deprecatedMethod('del', 'delete')
+    return this.delete
+  }
+
+  delete(k) {
+    let deleted = false
+    if (this.size !== 0) {
+      const index = this.keyMap.get(k)
+      if (index !== undefined) {
+        deleted = true
+        if (this.size === 1) {
+          this.clear()
+        } else {
+          this.removeItemSize(index)
+          const v = this.valList[index]
+          if (this.isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('deleted'))
+          } else {
+            this.dispose(v, k, 'delete')
+            if (this.disposeAfter) {
+              this.disposed.push([v, k, 'delete'])
+            }
+          }
+          this.keyMap.delete(k)
+          this.keyList[index] = null
+          this.valList[index] = null
+          if (index === this.tail) {
+            this.tail = this.prev[index]
+          } else if (index === this.head) {
+            this.head = this.next[index]
+          } else {
+            this.next[this.prev[index]] = this.next[index]
+            this.prev[this.next[index]] = this.prev[index]
+          }
+          this.size--
+          this.free.push(index)
+        }
+      }
+    }
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return deleted
+  }
+
+  clear() {
+    for (const index of this.rindexes({ allowStale: true })) {
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        v.__abortController.abort(new Error('deleted'))
+      } else {
+        const k = this.keyList[index]
+        this.dispose(v, k, 'delete')
+        if (this.disposeAfter) {
+          this.disposed.push([v, k, 'delete'])
+        }
+      }
+    }
+
+    this.keyMap.clear()
+    this.valList.fill(null)
+    this.keyList.fill(null)
+    if (this.ttls) {
+      this.ttls.fill(0)
+      this.starts.fill(0)
+    }
+    if (this.sizes) {
+      this.sizes.fill(0)
+    }
+    this.head = 0
+    this.tail = 0
+    this.initialFill = 1
+    this.free.length = 0
+    this.calculatedSize = 0
+    this.size = 0
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+  }
+
+  get reset() {
+    deprecatedMethod('reset', 'clear')
+    return this.clear
+  }
+
+  get length() {
+    deprecatedProperty('length', 'size')
+    return this.size
+  }
+
+  static get AbortController() {
+    return AC
+  }
+  static get AbortSignal() {
+    return AS
+  }
+}
+
+export default LRUCache
diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/package.json b/node_modules/normalize-package-data/node_modules/lru-cache/package.json
similarity index 52%
rename from node_modules/hosted-git-info/node_modules/lru-cache/package.json
rename to node_modules/normalize-package-data/node_modules/lru-cache/package.json
index bae4a04839d1f..9684991727e7a 100644
--- a/node_modules/hosted-git-info/node_modules/lru-cache/package.json
+++ b/node_modules/normalize-package-data/node_modules/lru-cache/package.json
@@ -1,7 +1,7 @@
 {
   "name": "lru-cache",
   "description": "A cache object that deletes the least-recently-used items.",
-  "version": "10.0.1",
+  "version": "7.18.3",
   "author": "Isaac Z. Schlueter ",
   "keywords": [
     "mru",
@@ -11,74 +11,60 @@
   "sideEffects": false,
   "scripts": {
     "build": "npm run prepare",
-    "preprepare": "rm -rf dist",
-    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
-    "postprepare": "bash fixup.sh",
     "pretest": "npm run prepare",
     "presnap": "npm run prepare",
-    "test": "c8 tap",
-    "snap": "c8 tap",
+    "prepare": "node ./scripts/transpile-to-esm.js",
+    "size": "size-limit",
+    "test": "tap",
+    "snap": "tap",
     "preversion": "npm test",
     "postversion": "npm publish",
     "prepublishOnly": "git push origin --follow-tags",
     "format": "prettier --write .",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts",
-    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
-    "prebenchmark": "npm run prepare",
-    "benchmark": "make -C benchmark",
-    "preprofile": "npm run prepare",
-    "profile": "make -C benchmark profile"
+    "typedoc": "typedoc ./index.d.ts"
   },
-  "main": "./dist/cjs/index.js",
-  "module": "./dist/mjs/index.js",
+  "type": "commonjs",
+  "main": "./index.js",
+  "module": "./index.mjs",
+  "types": "./index.d.ts",
   "exports": {
-    "./min": {
-      "import": {
-        "types": "./dist/mjs/index.d.ts",
-        "default": "./dist/mjs/index.min.js"
-      },
-      "require": {
-        "types": "./dist/cjs/index.d.ts",
-        "default": "./dist/cjs/index.min.js"
-      }
-    },
     ".": {
       "import": {
-        "types": "./dist/mjs/index.d.ts",
-        "default": "./dist/mjs/index.js"
+        "types": "./index.d.ts",
+        "default": "./index.mjs"
       },
       "require": {
-        "types": "./dist/cjs/index.d.ts",
-        "default": "./dist/cjs/index.js"
+        "types": "./index.d.ts",
+        "default": "./index.js"
       }
-    }
+    },
+    "./package.json": "./package.json"
   },
   "repository": "git://github.com/isaacs/node-lru-cache.git",
   "devDependencies": {
     "@size-limit/preset-small-lib": "^7.0.8",
-    "@types/node": "^20.2.5",
+    "@types/node": "^17.0.31",
     "@types/tap": "^15.0.6",
     "benchmark": "^2.1.4",
     "c8": "^7.11.2",
     "clock-mock": "^1.0.6",
-    "esbuild": "^0.17.11",
     "eslint-config-prettier": "^8.5.0",
-    "marked": "^4.2.12",
-    "mkdirp": "^2.1.5",
     "prettier": "^2.6.2",
     "size-limit": "^7.0.8",
     "tap": "^16.3.4",
-    "ts-node": "^10.9.1",
+    "ts-node": "^10.7.0",
     "tslib": "^2.4.0",
-    "typedoc": "^0.24.6",
-    "typescript": "^5.0.4"
+    "typedoc": "^0.23.24",
+    "typescript": "^4.6.4"
   },
   "license": "ISC",
   "files": [
-    "dist"
+    "index.js",
+    "index.mjs",
+    "index.d.ts"
   ],
   "engines": {
-    "node": "14 || >=16.14"
+    "node": ">=12"
   },
   "prettier": {
     "semi": false,
@@ -92,17 +78,19 @@
     "endOfLine": "lf"
   },
   "tap": {
-    "coverage": false,
+    "nyc-arg": [
+      "--include=index.js"
+    ],
     "node-arg": [
       "--expose-gc",
-      "-r",
+      "--require",
       "ts-node/register"
     ],
     "ts": false
   },
   "size-limit": [
     {
-      "path": "./dist/mjs/index.js"
+      "path": "./index.js"
     }
   ]
 }
diff --git a/node_modules/path-scurry/node_modules/lru-cache/LICENSE b/node_modules/npm-pick-manifest/node_modules/lru-cache/LICENSE
similarity index 100%
rename from node_modules/path-scurry/node_modules/lru-cache/LICENSE
rename to node_modules/npm-pick-manifest/node_modules/lru-cache/LICENSE
diff --git a/node_modules/npm-pick-manifest/node_modules/lru-cache/index.js b/node_modules/npm-pick-manifest/node_modules/lru-cache/index.js
new file mode 100644
index 0000000000000..48e99fe5e5a70
--- /dev/null
+++ b/node_modules/npm-pick-manifest/node_modules/lru-cache/index.js
@@ -0,0 +1,1227 @@
+const perf =
+  typeof performance === 'object' &&
+  performance &&
+  typeof performance.now === 'function'
+    ? performance
+    : Date
+
+const hasAbortController = typeof AbortController === 'function'
+
+// minimal backwards-compatibility polyfill
+// this doesn't have nearly all the checks and whatnot that
+// actual AbortController/Signal has, but it's enough for
+// our purposes, and if used properly, behaves the same.
+const AC = hasAbortController
+  ? AbortController
+  : class AbortController {
+      constructor() {
+        this.signal = new AS()
+      }
+      abort(reason = new Error('This operation was aborted')) {
+        this.signal.reason = this.signal.reason || reason
+        this.signal.aborted = true
+        this.signal.dispatchEvent({
+          type: 'abort',
+          target: this.signal,
+        })
+      }
+    }
+
+const hasAbortSignal = typeof AbortSignal === 'function'
+// Some polyfills put this on the AC class, not global
+const hasACAbortSignal = typeof AC.AbortSignal === 'function'
+const AS = hasAbortSignal
+  ? AbortSignal
+  : hasACAbortSignal
+  ? AC.AbortController
+  : class AbortSignal {
+      constructor() {
+        this.reason = undefined
+        this.aborted = false
+        this._listeners = []
+      }
+      dispatchEvent(e) {
+        if (e.type === 'abort') {
+          this.aborted = true
+          this.onabort(e)
+          this._listeners.forEach(f => f(e), this)
+        }
+      }
+      onabort() {}
+      addEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners.push(fn)
+        }
+      }
+      removeEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners = this._listeners.filter(f => f !== fn)
+        }
+      }
+    }
+
+const warned = new Set()
+const deprecatedOption = (opt, instead) => {
+  const code = `LRU_CACHE_OPTION_${opt}`
+  if (shouldWarn(code)) {
+    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
+  }
+}
+const deprecatedMethod = (method, instead) => {
+  const code = `LRU_CACHE_METHOD_${method}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
+    warn(code, `${method} method`, `cache.${instead}()`, get)
+  }
+}
+const deprecatedProperty = (field, instead) => {
+  const code = `LRU_CACHE_PROPERTY_${field}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
+    warn(code, `${field} property`, `cache.${instead}`, get)
+  }
+}
+
+const emitWarning = (...a) => {
+  typeof process === 'object' &&
+  process &&
+  typeof process.emitWarning === 'function'
+    ? process.emitWarning(...a)
+    : console.error(...a)
+}
+
+const shouldWarn = code => !warned.has(code)
+
+const warn = (code, what, instead, fn) => {
+  warned.add(code)
+  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
+  emitWarning(msg, 'DeprecationWarning', code, fn)
+}
+
+const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
+
+/* istanbul ignore next - This is a little bit ridiculous, tbh.
+ * The maximum array length is 2^32-1 or thereabouts on most JS impls.
+ * And well before that point, you're caching the entire world, I mean,
+ * that's ~32GB of just integers for the next/prev links, plus whatever
+ * else to hold that many keys and values.  Just filling the memory with
+ * zeroes at init time is brutal when you get that big.
+ * But why not be complete?
+ * Maybe in the future, these limits will have expanded. */
+const getUintArray = max =>
+  !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+    ? Uint8Array
+    : max <= Math.pow(2, 16)
+    ? Uint16Array
+    : max <= Math.pow(2, 32)
+    ? Uint32Array
+    : max <= Number.MAX_SAFE_INTEGER
+    ? ZeroArray
+    : null
+
+class ZeroArray extends Array {
+  constructor(size) {
+    super(size)
+    this.fill(0)
+  }
+}
+
+class Stack {
+  constructor(max) {
+    if (max === 0) {
+      return []
+    }
+    const UintArray = getUintArray(max)
+    this.heap = new UintArray(max)
+    this.length = 0
+  }
+  push(n) {
+    this.heap[this.length++] = n
+  }
+  pop() {
+    return this.heap[--this.length]
+  }
+}
+
+class LRUCache {
+  constructor(options = {}) {
+    const {
+      max = 0,
+      ttl,
+      ttlResolution = 1,
+      ttlAutopurge,
+      updateAgeOnGet,
+      updateAgeOnHas,
+      allowStale,
+      dispose,
+      disposeAfter,
+      noDisposeOnSet,
+      noUpdateTTL,
+      maxSize = 0,
+      maxEntrySize = 0,
+      sizeCalculation,
+      fetchMethod,
+      fetchContext,
+      noDeleteOnFetchRejection,
+      noDeleteOnStaleGet,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+    } = options
+
+    // deprecated options, don't trigger a warning for getting them if
+    // the thing being passed in is another LRUCache we're copying.
+    const { length, maxAge, stale } =
+      options instanceof LRUCache ? {} : options
+
+    if (max !== 0 && !isPosInt(max)) {
+      throw new TypeError('max option must be a nonnegative integer')
+    }
+
+    const UintArray = max ? getUintArray(max) : Array
+    if (!UintArray) {
+      throw new Error('invalid max value: ' + max)
+    }
+
+    this.max = max
+    this.maxSize = maxSize
+    this.maxEntrySize = maxEntrySize || this.maxSize
+    this.sizeCalculation = sizeCalculation || length
+    if (this.sizeCalculation) {
+      if (!this.maxSize && !this.maxEntrySize) {
+        throw new TypeError(
+          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
+        )
+      }
+      if (typeof this.sizeCalculation !== 'function') {
+        throw new TypeError('sizeCalculation set to non-function')
+      }
+    }
+
+    this.fetchMethod = fetchMethod || null
+    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
+      throw new TypeError(
+        'fetchMethod must be a function if specified'
+      )
+    }
+
+    this.fetchContext = fetchContext
+    if (!this.fetchMethod && fetchContext !== undefined) {
+      throw new TypeError(
+        'cannot set fetchContext without fetchMethod'
+      )
+    }
+
+    this.keyMap = new Map()
+    this.keyList = new Array(max).fill(null)
+    this.valList = new Array(max).fill(null)
+    this.next = new UintArray(max)
+    this.prev = new UintArray(max)
+    this.head = 0
+    this.tail = 0
+    this.free = new Stack(max)
+    this.initialFill = 1
+    this.size = 0
+
+    if (typeof dispose === 'function') {
+      this.dispose = dispose
+    }
+    if (typeof disposeAfter === 'function') {
+      this.disposeAfter = disposeAfter
+      this.disposed = []
+    } else {
+      this.disposeAfter = null
+      this.disposed = null
+    }
+    this.noDisposeOnSet = !!noDisposeOnSet
+    this.noUpdateTTL = !!noUpdateTTL
+    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
+    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
+    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
+    this.ignoreFetchAbort = !!ignoreFetchAbort
+
+    // NB: maxEntrySize is set to maxSize if it's set
+    if (this.maxEntrySize !== 0) {
+      if (this.maxSize !== 0) {
+        if (!isPosInt(this.maxSize)) {
+          throw new TypeError(
+            'maxSize must be a positive integer if specified'
+          )
+        }
+      }
+      if (!isPosInt(this.maxEntrySize)) {
+        throw new TypeError(
+          'maxEntrySize must be a positive integer if specified'
+        )
+      }
+      this.initializeSizeTracking()
+    }
+
+    this.allowStale = !!allowStale || !!stale
+    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
+    this.updateAgeOnGet = !!updateAgeOnGet
+    this.updateAgeOnHas = !!updateAgeOnHas
+    this.ttlResolution =
+      isPosInt(ttlResolution) || ttlResolution === 0
+        ? ttlResolution
+        : 1
+    this.ttlAutopurge = !!ttlAutopurge
+    this.ttl = ttl || maxAge || 0
+    if (this.ttl) {
+      if (!isPosInt(this.ttl)) {
+        throw new TypeError(
+          'ttl must be a positive integer if specified'
+        )
+      }
+      this.initializeTTLTracking()
+    }
+
+    // do not allow completely unbounded caches
+    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
+      throw new TypeError(
+        'At least one of max, maxSize, or ttl is required'
+      )
+    }
+    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
+      const code = 'LRU_CACHE_UNBOUNDED'
+      if (shouldWarn(code)) {
+        warned.add(code)
+        const msg =
+          'TTL caching without ttlAutopurge, max, or maxSize can ' +
+          'result in unbounded memory consumption.'
+        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
+      }
+    }
+
+    if (stale) {
+      deprecatedOption('stale', 'allowStale')
+    }
+    if (maxAge) {
+      deprecatedOption('maxAge', 'ttl')
+    }
+    if (length) {
+      deprecatedOption('length', 'sizeCalculation')
+    }
+  }
+
+  getRemainingTTL(key) {
+    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
+  }
+
+  initializeTTLTracking() {
+    this.ttls = new ZeroArray(this.max)
+    this.starts = new ZeroArray(this.max)
+
+    this.setItemTTL = (index, ttl, start = perf.now()) => {
+      this.starts[index] = ttl !== 0 ? start : 0
+      this.ttls[index] = ttl
+      if (ttl !== 0 && this.ttlAutopurge) {
+        const t = setTimeout(() => {
+          if (this.isStale(index)) {
+            this.delete(this.keyList[index])
+          }
+        }, ttl + 1)
+        /* istanbul ignore else - unref() not supported on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+    }
+
+    this.updateItemAge = index => {
+      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
+    }
+
+    this.statusTTL = (status, index) => {
+      if (status) {
+        status.ttl = this.ttls[index]
+        status.start = this.starts[index]
+        status.now = cachedNow || getNow()
+        status.remainingTTL = status.now + status.ttl - status.start
+      }
+    }
+
+    // debounce calls to perf.now() to 1s so we're not hitting
+    // that costly call repeatedly.
+    let cachedNow = 0
+    const getNow = () => {
+      const n = perf.now()
+      if (this.ttlResolution > 0) {
+        cachedNow = n
+        const t = setTimeout(
+          () => (cachedNow = 0),
+          this.ttlResolution
+        )
+        /* istanbul ignore else - not available on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+      return n
+    }
+
+    this.getRemainingTTL = key => {
+      const index = this.keyMap.get(key)
+      if (index === undefined) {
+        return 0
+      }
+      return this.ttls[index] === 0 || this.starts[index] === 0
+        ? Infinity
+        : this.starts[index] +
+            this.ttls[index] -
+            (cachedNow || getNow())
+    }
+
+    this.isStale = index => {
+      return (
+        this.ttls[index] !== 0 &&
+        this.starts[index] !== 0 &&
+        (cachedNow || getNow()) - this.starts[index] >
+          this.ttls[index]
+      )
+    }
+  }
+  updateItemAge(_index) {}
+  statusTTL(_status, _index) {}
+  setItemTTL(_index, _ttl, _start) {}
+  isStale(_index) {
+    return false
+  }
+
+  initializeSizeTracking() {
+    this.calculatedSize = 0
+    this.sizes = new ZeroArray(this.max)
+    this.removeItemSize = index => {
+      this.calculatedSize -= this.sizes[index]
+      this.sizes[index] = 0
+    }
+    this.requireSize = (k, v, size, sizeCalculation) => {
+      // provisionally accept background fetches.
+      // actual value size will be checked when they return.
+      if (this.isBackgroundFetch(v)) {
+        return 0
+      }
+      if (!isPosInt(size)) {
+        if (sizeCalculation) {
+          if (typeof sizeCalculation !== 'function') {
+            throw new TypeError('sizeCalculation must be a function')
+          }
+          size = sizeCalculation(v, k)
+          if (!isPosInt(size)) {
+            throw new TypeError(
+              'sizeCalculation return invalid (expect positive integer)'
+            )
+          }
+        } else {
+          throw new TypeError(
+            'invalid size value (must be positive integer). ' +
+              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
+              'must be set.'
+          )
+        }
+      }
+      return size
+    }
+    this.addItemSize = (index, size, status) => {
+      this.sizes[index] = size
+      if (this.maxSize) {
+        const maxSize = this.maxSize - this.sizes[index]
+        while (this.calculatedSize > maxSize) {
+          this.evict(true)
+        }
+      }
+      this.calculatedSize += this.sizes[index]
+      if (status) {
+        status.entrySize = size
+        status.totalCalculatedSize = this.calculatedSize
+      }
+    }
+  }
+  removeItemSize(_index) {}
+  addItemSize(_index, _size) {}
+  requireSize(_k, _v, size, sizeCalculation) {
+    if (size || sizeCalculation) {
+      throw new TypeError(
+        'cannot set size without setting maxSize or maxEntrySize on cache'
+      )
+    }
+  }
+
+  *indexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.tail; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.head) {
+          break
+        } else {
+          i = this.prev[i]
+        }
+      }
+    }
+  }
+
+  *rindexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.head; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.tail) {
+          break
+        } else {
+          i = this.next[i]
+        }
+      }
+    }
+  }
+
+  isValidIndex(index) {
+    return (
+      index !== undefined &&
+      this.keyMap.get(this.keyList[index]) === index
+    )
+  }
+
+  *entries() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+  *rentries() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+
+  *keys() {
+    for (const i of this.indexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+  *rkeys() {
+    for (const i of this.rindexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+
+  *values() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+  *rvalues() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+
+  [Symbol.iterator]() {
+    return this.entries()
+  }
+
+  find(fn, getOptions) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      if (fn(value, this.keyList[i], this)) {
+        return this.get(this.keyList[i], getOptions)
+      }
+    }
+  }
+
+  forEach(fn, thisp = this) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  rforEach(fn, thisp = this) {
+    for (const i of this.rindexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  get prune() {
+    deprecatedMethod('prune', 'purgeStale')
+    return this.purgeStale
+  }
+
+  purgeStale() {
+    let deleted = false
+    for (const i of this.rindexes({ allowStale: true })) {
+      if (this.isStale(i)) {
+        this.delete(this.keyList[i])
+        deleted = true
+      }
+    }
+    return deleted
+  }
+
+  dump() {
+    const arr = []
+    for (const i of this.indexes({ allowStale: true })) {
+      const key = this.keyList[i]
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      const entry = { value }
+      if (this.ttls) {
+        entry.ttl = this.ttls[i]
+        // always dump the start relative to a portable timestamp
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = perf.now() - this.starts[i]
+        entry.start = Math.floor(Date.now() - age)
+      }
+      if (this.sizes) {
+        entry.size = this.sizes[i]
+      }
+      arr.unshift([key, entry])
+    }
+    return arr
+  }
+
+  load(arr) {
+    this.clear()
+    for (const [key, entry] of arr) {
+      if (entry.start) {
+        // entry.start is a portable timestamp, but we may be using
+        // node's performance.now(), so calculate the offset.
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = Date.now() - entry.start
+        entry.start = perf.now() - age
+      }
+      this.set(key, entry.value, entry)
+    }
+  }
+
+  dispose(_v, _k, _reason) {}
+
+  set(
+    k,
+    v,
+    {
+      ttl = this.ttl,
+      start,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      status,
+    } = {}
+  ) {
+    size = this.requireSize(k, v, size, sizeCalculation)
+    // if the item doesn't fit, don't do anything
+    // NB: maxEntrySize set to maxSize by default
+    if (this.maxEntrySize && size > this.maxEntrySize) {
+      if (status) {
+        status.set = 'miss'
+        status.maxEntrySizeExceeded = true
+      }
+      // have to delete, in case a background fetch is there already.
+      // in non-async cases, this is a no-op
+      this.delete(k)
+      return this
+    }
+    let index = this.size === 0 ? undefined : this.keyMap.get(k)
+    if (index === undefined) {
+      // addition
+      index = this.newIndex()
+      this.keyList[index] = k
+      this.valList[index] = v
+      this.keyMap.set(k, index)
+      this.next[this.tail] = index
+      this.prev[index] = this.tail
+      this.tail = index
+      this.size++
+      this.addItemSize(index, size, status)
+      if (status) {
+        status.set = 'add'
+      }
+      noUpdateTTL = false
+    } else {
+      // update
+      this.moveToTail(index)
+      const oldVal = this.valList[index]
+      if (v !== oldVal) {
+        if (this.isBackgroundFetch(oldVal)) {
+          oldVal.__abortController.abort(new Error('replaced'))
+        } else {
+          if (!noDisposeOnSet) {
+            this.dispose(oldVal, k, 'set')
+            if (this.disposeAfter) {
+              this.disposed.push([oldVal, k, 'set'])
+            }
+          }
+        }
+        this.removeItemSize(index)
+        this.valList[index] = v
+        this.addItemSize(index, size, status)
+        if (status) {
+          status.set = 'replace'
+          const oldValue =
+            oldVal && this.isBackgroundFetch(oldVal)
+              ? oldVal.__staleWhileFetching
+              : oldVal
+          if (oldValue !== undefined) status.oldValue = oldValue
+        }
+      } else if (status) {
+        status.set = 'update'
+      }
+    }
+    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
+      this.initializeTTLTracking()
+    }
+    if (!noUpdateTTL) {
+      this.setItemTTL(index, ttl, start)
+    }
+    this.statusTTL(status, index)
+    if (this.disposeAfter) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return this
+  }
+
+  newIndex() {
+    if (this.size === 0) {
+      return this.tail
+    }
+    if (this.size === this.max && this.max !== 0) {
+      return this.evict(false)
+    }
+    if (this.free.length !== 0) {
+      return this.free.pop()
+    }
+    // initial fill, just keep writing down the list
+    return this.initialFill++
+  }
+
+  pop() {
+    if (this.size) {
+      const val = this.valList[this.head]
+      this.evict(true)
+      return val
+    }
+  }
+
+  evict(free) {
+    const head = this.head
+    const k = this.keyList[head]
+    const v = this.valList[head]
+    if (this.isBackgroundFetch(v)) {
+      v.__abortController.abort(new Error('evicted'))
+    } else {
+      this.dispose(v, k, 'evict')
+      if (this.disposeAfter) {
+        this.disposed.push([v, k, 'evict'])
+      }
+    }
+    this.removeItemSize(head)
+    // if we aren't about to use the index, then null these out
+    if (free) {
+      this.keyList[head] = null
+      this.valList[head] = null
+      this.free.push(head)
+    }
+    this.head = this.next[head]
+    this.keyMap.delete(k)
+    this.size--
+    return head
+  }
+
+  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      if (!this.isStale(index)) {
+        if (updateAgeOnHas) {
+          this.updateItemAge(index)
+        }
+        if (status) status.has = 'hit'
+        this.statusTTL(status, index)
+        return true
+      } else if (status) {
+        status.has = 'stale'
+        this.statusTTL(status, index)
+      }
+    } else if (status) {
+      status.has = 'miss'
+    }
+    return false
+  }
+
+  // like get(), but without any LRU updating or TTL expiration
+  peek(k, { allowStale = this.allowStale } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined && (allowStale || !this.isStale(index))) {
+      const v = this.valList[index]
+      // either stale and allowed, or forcing a refresh of non-stale value
+      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
+    }
+  }
+
+  backgroundFetch(k, index, options, context) {
+    const v = index === undefined ? undefined : this.valList[index]
+    if (this.isBackgroundFetch(v)) {
+      return v
+    }
+    const ac = new AC()
+    if (options.signal) {
+      options.signal.addEventListener('abort', () =>
+        ac.abort(options.signal.reason)
+      )
+    }
+    const fetchOpts = {
+      signal: ac.signal,
+      options,
+      context,
+    }
+    const cb = (v, updateCache = false) => {
+      const { aborted } = ac.signal
+      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
+      if (options.status) {
+        if (aborted && !updateCache) {
+          options.status.fetchAborted = true
+          options.status.fetchError = ac.signal.reason
+          if (ignoreAbort) options.status.fetchAbortIgnored = true
+        } else {
+          options.status.fetchResolved = true
+        }
+      }
+      if (aborted && !ignoreAbort && !updateCache) {
+        return fetchFail(ac.signal.reason)
+      }
+      // either we didn't abort, and are still here, or we did, and ignored
+      if (this.valList[index] === p) {
+        if (v === undefined) {
+          if (p.__staleWhileFetching) {
+            this.valList[index] = p.__staleWhileFetching
+          } else {
+            this.delete(k)
+          }
+        } else {
+          if (options.status) options.status.fetchUpdated = true
+          this.set(k, v, fetchOpts.options)
+        }
+      }
+      return v
+    }
+    const eb = er => {
+      if (options.status) {
+        options.status.fetchRejected = true
+        options.status.fetchError = er
+      }
+      return fetchFail(er)
+    }
+    const fetchFail = er => {
+      const { aborted } = ac.signal
+      const allowStaleAborted =
+        aborted && options.allowStaleOnFetchAbort
+      const allowStale =
+        allowStaleAborted || options.allowStaleOnFetchRejection
+      const noDelete = allowStale || options.noDeleteOnFetchRejection
+      if (this.valList[index] === p) {
+        // if we allow stale on fetch rejections, then we need to ensure that
+        // the stale value is not removed from the cache when the fetch fails.
+        const del = !noDelete || p.__staleWhileFetching === undefined
+        if (del) {
+          this.delete(k)
+        } else if (!allowStaleAborted) {
+          // still replace the *promise* with the stale value,
+          // since we are done with the promise at this point.
+          // leave it untouched if we're still waiting for an
+          // aborted background fetch that hasn't yet returned.
+          this.valList[index] = p.__staleWhileFetching
+        }
+      }
+      if (allowStale) {
+        if (options.status && p.__staleWhileFetching !== undefined) {
+          options.status.returnedStale = true
+        }
+        return p.__staleWhileFetching
+      } else if (p.__returned === p) {
+        throw er
+      }
+    }
+    const pcall = (res, rej) => {
+      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
+      // ignored, we go until we finish, regardless.
+      // defer check until we are actually aborting,
+      // so fetchMethod can override.
+      ac.signal.addEventListener('abort', () => {
+        if (
+          !options.ignoreFetchAbort ||
+          options.allowStaleOnFetchAbort
+        ) {
+          res()
+          // when it eventually resolves, update the cache.
+          if (options.allowStaleOnFetchAbort) {
+            res = v => cb(v, true)
+          }
+        }
+      })
+    }
+    if (options.status) options.status.fetchDispatched = true
+    const p = new Promise(pcall).then(cb, eb)
+    p.__abortController = ac
+    p.__staleWhileFetching = v
+    p.__returned = null
+    if (index === undefined) {
+      // internal, don't expose status.
+      this.set(k, p, { ...fetchOpts.options, status: undefined })
+      index = this.keyMap.get(k)
+    } else {
+      this.valList[index] = p
+    }
+    return p
+  }
+
+  isBackgroundFetch(p) {
+    return (
+      p &&
+      typeof p === 'object' &&
+      typeof p.then === 'function' &&
+      Object.prototype.hasOwnProperty.call(
+        p,
+        '__staleWhileFetching'
+      ) &&
+      Object.prototype.hasOwnProperty.call(p, '__returned') &&
+      (p.__returned === p || p.__returned === null)
+    )
+  }
+
+  // this takes the union of get() and set() opts, because it does both
+  async fetch(
+    k,
+    {
+      // get options
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      // set options
+      ttl = this.ttl,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      // fetch exclusive options
+      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
+      ignoreFetchAbort = this.ignoreFetchAbort,
+      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
+      fetchContext = this.fetchContext,
+      forceRefresh = false,
+      status,
+      signal,
+    } = {}
+  ) {
+    if (!this.fetchMethod) {
+      if (status) status.fetch = 'get'
+      return this.get(k, {
+        allowStale,
+        updateAgeOnGet,
+        noDeleteOnStaleGet,
+        status,
+      })
+    }
+
+    const options = {
+      allowStale,
+      updateAgeOnGet,
+      noDeleteOnStaleGet,
+      ttl,
+      noDisposeOnSet,
+      size,
+      sizeCalculation,
+      noUpdateTTL,
+      noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+      status,
+      signal,
+    }
+
+    let index = this.keyMap.get(k)
+    if (index === undefined) {
+      if (status) status.fetch = 'miss'
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      return (p.__returned = p)
+    } else {
+      // in cache, maybe already fetching
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        const stale =
+          allowStale && v.__staleWhileFetching !== undefined
+        if (status) {
+          status.fetch = 'inflight'
+          if (stale) status.returnedStale = true
+        }
+        return stale ? v.__staleWhileFetching : (v.__returned = v)
+      }
+
+      // if we force a refresh, that means do NOT serve the cached value,
+      // unless we are already in the process of refreshing the cache.
+      const isStale = this.isStale(index)
+      if (!forceRefresh && !isStale) {
+        if (status) status.fetch = 'hit'
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        this.statusTTL(status, index)
+        return v
+      }
+
+      // ok, it is stale or a forced refresh, and not already fetching.
+      // refresh the cache.
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      const hasStale = p.__staleWhileFetching !== undefined
+      const staleVal = hasStale && allowStale
+      if (status) {
+        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
+        if (staleVal && isStale) status.returnedStale = true
+      }
+      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
+    }
+  }
+
+  get(
+    k,
+    {
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      status,
+    } = {}
+  ) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      const value = this.valList[index]
+      const fetching = this.isBackgroundFetch(value)
+      this.statusTTL(status, index)
+      if (this.isStale(index)) {
+        if (status) status.get = 'stale'
+        // delete only if not an in-flight background fetch
+        if (!fetching) {
+          if (!noDeleteOnStaleGet) {
+            this.delete(k)
+          }
+          if (status) status.returnedStale = allowStale
+          return allowStale ? value : undefined
+        } else {
+          if (status) {
+            status.returnedStale =
+              allowStale && value.__staleWhileFetching !== undefined
+          }
+          return allowStale ? value.__staleWhileFetching : undefined
+        }
+      } else {
+        if (status) status.get = 'hit'
+        // if we're currently fetching it, we don't actually have it yet
+        // it's not stale, which means this isn't a staleWhileRefetching.
+        // If it's not stale, and fetching, AND has a __staleWhileFetching
+        // value, then that means the user fetched with {forceRefresh:true},
+        // so it's safe to return that value.
+        if (fetching) {
+          return value.__staleWhileFetching
+        }
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        return value
+      }
+    } else if (status) {
+      status.get = 'miss'
+    }
+  }
+
+  connect(p, n) {
+    this.prev[n] = p
+    this.next[p] = n
+  }
+
+  moveToTail(index) {
+    // if tail already, nothing to do
+    // if head, move head to next[index]
+    // else
+    //   move next[prev[index]] to next[index] (head has no prev)
+    //   move prev[next[index]] to prev[index]
+    // prev[index] = tail
+    // next[tail] = index
+    // tail = index
+    if (index !== this.tail) {
+      if (index === this.head) {
+        this.head = this.next[index]
+      } else {
+        this.connect(this.prev[index], this.next[index])
+      }
+      this.connect(this.tail, index)
+      this.tail = index
+    }
+  }
+
+  get del() {
+    deprecatedMethod('del', 'delete')
+    return this.delete
+  }
+
+  delete(k) {
+    let deleted = false
+    if (this.size !== 0) {
+      const index = this.keyMap.get(k)
+      if (index !== undefined) {
+        deleted = true
+        if (this.size === 1) {
+          this.clear()
+        } else {
+          this.removeItemSize(index)
+          const v = this.valList[index]
+          if (this.isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('deleted'))
+          } else {
+            this.dispose(v, k, 'delete')
+            if (this.disposeAfter) {
+              this.disposed.push([v, k, 'delete'])
+            }
+          }
+          this.keyMap.delete(k)
+          this.keyList[index] = null
+          this.valList[index] = null
+          if (index === this.tail) {
+            this.tail = this.prev[index]
+          } else if (index === this.head) {
+            this.head = this.next[index]
+          } else {
+            this.next[this.prev[index]] = this.next[index]
+            this.prev[this.next[index]] = this.prev[index]
+          }
+          this.size--
+          this.free.push(index)
+        }
+      }
+    }
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return deleted
+  }
+
+  clear() {
+    for (const index of this.rindexes({ allowStale: true })) {
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        v.__abortController.abort(new Error('deleted'))
+      } else {
+        const k = this.keyList[index]
+        this.dispose(v, k, 'delete')
+        if (this.disposeAfter) {
+          this.disposed.push([v, k, 'delete'])
+        }
+      }
+    }
+
+    this.keyMap.clear()
+    this.valList.fill(null)
+    this.keyList.fill(null)
+    if (this.ttls) {
+      this.ttls.fill(0)
+      this.starts.fill(0)
+    }
+    if (this.sizes) {
+      this.sizes.fill(0)
+    }
+    this.head = 0
+    this.tail = 0
+    this.initialFill = 1
+    this.free.length = 0
+    this.calculatedSize = 0
+    this.size = 0
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+  }
+
+  get reset() {
+    deprecatedMethod('reset', 'clear')
+    return this.clear
+  }
+
+  get length() {
+    deprecatedProperty('length', 'size')
+    return this.size
+  }
+
+  static get AbortController() {
+    return AC
+  }
+  static get AbortSignal() {
+    return AS
+  }
+}
+
+module.exports = LRUCache
diff --git a/node_modules/npm-pick-manifest/node_modules/lru-cache/index.mjs b/node_modules/npm-pick-manifest/node_modules/lru-cache/index.mjs
new file mode 100644
index 0000000000000..4a0b4813ec515
--- /dev/null
+++ b/node_modules/npm-pick-manifest/node_modules/lru-cache/index.mjs
@@ -0,0 +1,1227 @@
+const perf =
+  typeof performance === 'object' &&
+  performance &&
+  typeof performance.now === 'function'
+    ? performance
+    : Date
+
+const hasAbortController = typeof AbortController === 'function'
+
+// minimal backwards-compatibility polyfill
+// this doesn't have nearly all the checks and whatnot that
+// actual AbortController/Signal has, but it's enough for
+// our purposes, and if used properly, behaves the same.
+const AC = hasAbortController
+  ? AbortController
+  : class AbortController {
+      constructor() {
+        this.signal = new AS()
+      }
+      abort(reason = new Error('This operation was aborted')) {
+        this.signal.reason = this.signal.reason || reason
+        this.signal.aborted = true
+        this.signal.dispatchEvent({
+          type: 'abort',
+          target: this.signal,
+        })
+      }
+    }
+
+const hasAbortSignal = typeof AbortSignal === 'function'
+// Some polyfills put this on the AC class, not global
+const hasACAbortSignal = typeof AC.AbortSignal === 'function'
+const AS = hasAbortSignal
+  ? AbortSignal
+  : hasACAbortSignal
+  ? AC.AbortController
+  : class AbortSignal {
+      constructor() {
+        this.reason = undefined
+        this.aborted = false
+        this._listeners = []
+      }
+      dispatchEvent(e) {
+        if (e.type === 'abort') {
+          this.aborted = true
+          this.onabort(e)
+          this._listeners.forEach(f => f(e), this)
+        }
+      }
+      onabort() {}
+      addEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners.push(fn)
+        }
+      }
+      removeEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners = this._listeners.filter(f => f !== fn)
+        }
+      }
+    }
+
+const warned = new Set()
+const deprecatedOption = (opt, instead) => {
+  const code = `LRU_CACHE_OPTION_${opt}`
+  if (shouldWarn(code)) {
+    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
+  }
+}
+const deprecatedMethod = (method, instead) => {
+  const code = `LRU_CACHE_METHOD_${method}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
+    warn(code, `${method} method`, `cache.${instead}()`, get)
+  }
+}
+const deprecatedProperty = (field, instead) => {
+  const code = `LRU_CACHE_PROPERTY_${field}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
+    warn(code, `${field} property`, `cache.${instead}`, get)
+  }
+}
+
+const emitWarning = (...a) => {
+  typeof process === 'object' &&
+  process &&
+  typeof process.emitWarning === 'function'
+    ? process.emitWarning(...a)
+    : console.error(...a)
+}
+
+const shouldWarn = code => !warned.has(code)
+
+const warn = (code, what, instead, fn) => {
+  warned.add(code)
+  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
+  emitWarning(msg, 'DeprecationWarning', code, fn)
+}
+
+const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
+
+/* istanbul ignore next - This is a little bit ridiculous, tbh.
+ * The maximum array length is 2^32-1 or thereabouts on most JS impls.
+ * And well before that point, you're caching the entire world, I mean,
+ * that's ~32GB of just integers for the next/prev links, plus whatever
+ * else to hold that many keys and values.  Just filling the memory with
+ * zeroes at init time is brutal when you get that big.
+ * But why not be complete?
+ * Maybe in the future, these limits will have expanded. */
+const getUintArray = max =>
+  !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+    ? Uint8Array
+    : max <= Math.pow(2, 16)
+    ? Uint16Array
+    : max <= Math.pow(2, 32)
+    ? Uint32Array
+    : max <= Number.MAX_SAFE_INTEGER
+    ? ZeroArray
+    : null
+
+class ZeroArray extends Array {
+  constructor(size) {
+    super(size)
+    this.fill(0)
+  }
+}
+
+class Stack {
+  constructor(max) {
+    if (max === 0) {
+      return []
+    }
+    const UintArray = getUintArray(max)
+    this.heap = new UintArray(max)
+    this.length = 0
+  }
+  push(n) {
+    this.heap[this.length++] = n
+  }
+  pop() {
+    return this.heap[--this.length]
+  }
+}
+
+class LRUCache {
+  constructor(options = {}) {
+    const {
+      max = 0,
+      ttl,
+      ttlResolution = 1,
+      ttlAutopurge,
+      updateAgeOnGet,
+      updateAgeOnHas,
+      allowStale,
+      dispose,
+      disposeAfter,
+      noDisposeOnSet,
+      noUpdateTTL,
+      maxSize = 0,
+      maxEntrySize = 0,
+      sizeCalculation,
+      fetchMethod,
+      fetchContext,
+      noDeleteOnFetchRejection,
+      noDeleteOnStaleGet,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+    } = options
+
+    // deprecated options, don't trigger a warning for getting them if
+    // the thing being passed in is another LRUCache we're copying.
+    const { length, maxAge, stale } =
+      options instanceof LRUCache ? {} : options
+
+    if (max !== 0 && !isPosInt(max)) {
+      throw new TypeError('max option must be a nonnegative integer')
+    }
+
+    const UintArray = max ? getUintArray(max) : Array
+    if (!UintArray) {
+      throw new Error('invalid max value: ' + max)
+    }
+
+    this.max = max
+    this.maxSize = maxSize
+    this.maxEntrySize = maxEntrySize || this.maxSize
+    this.sizeCalculation = sizeCalculation || length
+    if (this.sizeCalculation) {
+      if (!this.maxSize && !this.maxEntrySize) {
+        throw new TypeError(
+          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
+        )
+      }
+      if (typeof this.sizeCalculation !== 'function') {
+        throw new TypeError('sizeCalculation set to non-function')
+      }
+    }
+
+    this.fetchMethod = fetchMethod || null
+    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
+      throw new TypeError(
+        'fetchMethod must be a function if specified'
+      )
+    }
+
+    this.fetchContext = fetchContext
+    if (!this.fetchMethod && fetchContext !== undefined) {
+      throw new TypeError(
+        'cannot set fetchContext without fetchMethod'
+      )
+    }
+
+    this.keyMap = new Map()
+    this.keyList = new Array(max).fill(null)
+    this.valList = new Array(max).fill(null)
+    this.next = new UintArray(max)
+    this.prev = new UintArray(max)
+    this.head = 0
+    this.tail = 0
+    this.free = new Stack(max)
+    this.initialFill = 1
+    this.size = 0
+
+    if (typeof dispose === 'function') {
+      this.dispose = dispose
+    }
+    if (typeof disposeAfter === 'function') {
+      this.disposeAfter = disposeAfter
+      this.disposed = []
+    } else {
+      this.disposeAfter = null
+      this.disposed = null
+    }
+    this.noDisposeOnSet = !!noDisposeOnSet
+    this.noUpdateTTL = !!noUpdateTTL
+    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
+    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
+    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
+    this.ignoreFetchAbort = !!ignoreFetchAbort
+
+    // NB: maxEntrySize is set to maxSize if it's set
+    if (this.maxEntrySize !== 0) {
+      if (this.maxSize !== 0) {
+        if (!isPosInt(this.maxSize)) {
+          throw new TypeError(
+            'maxSize must be a positive integer if specified'
+          )
+        }
+      }
+      if (!isPosInt(this.maxEntrySize)) {
+        throw new TypeError(
+          'maxEntrySize must be a positive integer if specified'
+        )
+      }
+      this.initializeSizeTracking()
+    }
+
+    this.allowStale = !!allowStale || !!stale
+    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
+    this.updateAgeOnGet = !!updateAgeOnGet
+    this.updateAgeOnHas = !!updateAgeOnHas
+    this.ttlResolution =
+      isPosInt(ttlResolution) || ttlResolution === 0
+        ? ttlResolution
+        : 1
+    this.ttlAutopurge = !!ttlAutopurge
+    this.ttl = ttl || maxAge || 0
+    if (this.ttl) {
+      if (!isPosInt(this.ttl)) {
+        throw new TypeError(
+          'ttl must be a positive integer if specified'
+        )
+      }
+      this.initializeTTLTracking()
+    }
+
+    // do not allow completely unbounded caches
+    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
+      throw new TypeError(
+        'At least one of max, maxSize, or ttl is required'
+      )
+    }
+    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
+      const code = 'LRU_CACHE_UNBOUNDED'
+      if (shouldWarn(code)) {
+        warned.add(code)
+        const msg =
+          'TTL caching without ttlAutopurge, max, or maxSize can ' +
+          'result in unbounded memory consumption.'
+        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
+      }
+    }
+
+    if (stale) {
+      deprecatedOption('stale', 'allowStale')
+    }
+    if (maxAge) {
+      deprecatedOption('maxAge', 'ttl')
+    }
+    if (length) {
+      deprecatedOption('length', 'sizeCalculation')
+    }
+  }
+
+  getRemainingTTL(key) {
+    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
+  }
+
+  initializeTTLTracking() {
+    this.ttls = new ZeroArray(this.max)
+    this.starts = new ZeroArray(this.max)
+
+    this.setItemTTL = (index, ttl, start = perf.now()) => {
+      this.starts[index] = ttl !== 0 ? start : 0
+      this.ttls[index] = ttl
+      if (ttl !== 0 && this.ttlAutopurge) {
+        const t = setTimeout(() => {
+          if (this.isStale(index)) {
+            this.delete(this.keyList[index])
+          }
+        }, ttl + 1)
+        /* istanbul ignore else - unref() not supported on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+    }
+
+    this.updateItemAge = index => {
+      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
+    }
+
+    this.statusTTL = (status, index) => {
+      if (status) {
+        status.ttl = this.ttls[index]
+        status.start = this.starts[index]
+        status.now = cachedNow || getNow()
+        status.remainingTTL = status.now + status.ttl - status.start
+      }
+    }
+
+    // debounce calls to perf.now() to 1s so we're not hitting
+    // that costly call repeatedly.
+    let cachedNow = 0
+    const getNow = () => {
+      const n = perf.now()
+      if (this.ttlResolution > 0) {
+        cachedNow = n
+        const t = setTimeout(
+          () => (cachedNow = 0),
+          this.ttlResolution
+        )
+        /* istanbul ignore else - not available on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+      return n
+    }
+
+    this.getRemainingTTL = key => {
+      const index = this.keyMap.get(key)
+      if (index === undefined) {
+        return 0
+      }
+      return this.ttls[index] === 0 || this.starts[index] === 0
+        ? Infinity
+        : this.starts[index] +
+            this.ttls[index] -
+            (cachedNow || getNow())
+    }
+
+    this.isStale = index => {
+      return (
+        this.ttls[index] !== 0 &&
+        this.starts[index] !== 0 &&
+        (cachedNow || getNow()) - this.starts[index] >
+          this.ttls[index]
+      )
+    }
+  }
+  updateItemAge(_index) {}
+  statusTTL(_status, _index) {}
+  setItemTTL(_index, _ttl, _start) {}
+  isStale(_index) {
+    return false
+  }
+
+  initializeSizeTracking() {
+    this.calculatedSize = 0
+    this.sizes = new ZeroArray(this.max)
+    this.removeItemSize = index => {
+      this.calculatedSize -= this.sizes[index]
+      this.sizes[index] = 0
+    }
+    this.requireSize = (k, v, size, sizeCalculation) => {
+      // provisionally accept background fetches.
+      // actual value size will be checked when they return.
+      if (this.isBackgroundFetch(v)) {
+        return 0
+      }
+      if (!isPosInt(size)) {
+        if (sizeCalculation) {
+          if (typeof sizeCalculation !== 'function') {
+            throw new TypeError('sizeCalculation must be a function')
+          }
+          size = sizeCalculation(v, k)
+          if (!isPosInt(size)) {
+            throw new TypeError(
+              'sizeCalculation return invalid (expect positive integer)'
+            )
+          }
+        } else {
+          throw new TypeError(
+            'invalid size value (must be positive integer). ' +
+              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
+              'must be set.'
+          )
+        }
+      }
+      return size
+    }
+    this.addItemSize = (index, size, status) => {
+      this.sizes[index] = size
+      if (this.maxSize) {
+        const maxSize = this.maxSize - this.sizes[index]
+        while (this.calculatedSize > maxSize) {
+          this.evict(true)
+        }
+      }
+      this.calculatedSize += this.sizes[index]
+      if (status) {
+        status.entrySize = size
+        status.totalCalculatedSize = this.calculatedSize
+      }
+    }
+  }
+  removeItemSize(_index) {}
+  addItemSize(_index, _size) {}
+  requireSize(_k, _v, size, sizeCalculation) {
+    if (size || sizeCalculation) {
+      throw new TypeError(
+        'cannot set size without setting maxSize or maxEntrySize on cache'
+      )
+    }
+  }
+
+  *indexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.tail; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.head) {
+          break
+        } else {
+          i = this.prev[i]
+        }
+      }
+    }
+  }
+
+  *rindexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.head; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.tail) {
+          break
+        } else {
+          i = this.next[i]
+        }
+      }
+    }
+  }
+
+  isValidIndex(index) {
+    return (
+      index !== undefined &&
+      this.keyMap.get(this.keyList[index]) === index
+    )
+  }
+
+  *entries() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+  *rentries() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+
+  *keys() {
+    for (const i of this.indexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+  *rkeys() {
+    for (const i of this.rindexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+
+  *values() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+  *rvalues() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+
+  [Symbol.iterator]() {
+    return this.entries()
+  }
+
+  find(fn, getOptions) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      if (fn(value, this.keyList[i], this)) {
+        return this.get(this.keyList[i], getOptions)
+      }
+    }
+  }
+
+  forEach(fn, thisp = this) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  rforEach(fn, thisp = this) {
+    for (const i of this.rindexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  get prune() {
+    deprecatedMethod('prune', 'purgeStale')
+    return this.purgeStale
+  }
+
+  purgeStale() {
+    let deleted = false
+    for (const i of this.rindexes({ allowStale: true })) {
+      if (this.isStale(i)) {
+        this.delete(this.keyList[i])
+        deleted = true
+      }
+    }
+    return deleted
+  }
+
+  dump() {
+    const arr = []
+    for (const i of this.indexes({ allowStale: true })) {
+      const key = this.keyList[i]
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      const entry = { value }
+      if (this.ttls) {
+        entry.ttl = this.ttls[i]
+        // always dump the start relative to a portable timestamp
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = perf.now() - this.starts[i]
+        entry.start = Math.floor(Date.now() - age)
+      }
+      if (this.sizes) {
+        entry.size = this.sizes[i]
+      }
+      arr.unshift([key, entry])
+    }
+    return arr
+  }
+
+  load(arr) {
+    this.clear()
+    for (const [key, entry] of arr) {
+      if (entry.start) {
+        // entry.start is a portable timestamp, but we may be using
+        // node's performance.now(), so calculate the offset.
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = Date.now() - entry.start
+        entry.start = perf.now() - age
+      }
+      this.set(key, entry.value, entry)
+    }
+  }
+
+  dispose(_v, _k, _reason) {}
+
+  set(
+    k,
+    v,
+    {
+      ttl = this.ttl,
+      start,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      status,
+    } = {}
+  ) {
+    size = this.requireSize(k, v, size, sizeCalculation)
+    // if the item doesn't fit, don't do anything
+    // NB: maxEntrySize set to maxSize by default
+    if (this.maxEntrySize && size > this.maxEntrySize) {
+      if (status) {
+        status.set = 'miss'
+        status.maxEntrySizeExceeded = true
+      }
+      // have to delete, in case a background fetch is there already.
+      // in non-async cases, this is a no-op
+      this.delete(k)
+      return this
+    }
+    let index = this.size === 0 ? undefined : this.keyMap.get(k)
+    if (index === undefined) {
+      // addition
+      index = this.newIndex()
+      this.keyList[index] = k
+      this.valList[index] = v
+      this.keyMap.set(k, index)
+      this.next[this.tail] = index
+      this.prev[index] = this.tail
+      this.tail = index
+      this.size++
+      this.addItemSize(index, size, status)
+      if (status) {
+        status.set = 'add'
+      }
+      noUpdateTTL = false
+    } else {
+      // update
+      this.moveToTail(index)
+      const oldVal = this.valList[index]
+      if (v !== oldVal) {
+        if (this.isBackgroundFetch(oldVal)) {
+          oldVal.__abortController.abort(new Error('replaced'))
+        } else {
+          if (!noDisposeOnSet) {
+            this.dispose(oldVal, k, 'set')
+            if (this.disposeAfter) {
+              this.disposed.push([oldVal, k, 'set'])
+            }
+          }
+        }
+        this.removeItemSize(index)
+        this.valList[index] = v
+        this.addItemSize(index, size, status)
+        if (status) {
+          status.set = 'replace'
+          const oldValue =
+            oldVal && this.isBackgroundFetch(oldVal)
+              ? oldVal.__staleWhileFetching
+              : oldVal
+          if (oldValue !== undefined) status.oldValue = oldValue
+        }
+      } else if (status) {
+        status.set = 'update'
+      }
+    }
+    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
+      this.initializeTTLTracking()
+    }
+    if (!noUpdateTTL) {
+      this.setItemTTL(index, ttl, start)
+    }
+    this.statusTTL(status, index)
+    if (this.disposeAfter) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return this
+  }
+
+  newIndex() {
+    if (this.size === 0) {
+      return this.tail
+    }
+    if (this.size === this.max && this.max !== 0) {
+      return this.evict(false)
+    }
+    if (this.free.length !== 0) {
+      return this.free.pop()
+    }
+    // initial fill, just keep writing down the list
+    return this.initialFill++
+  }
+
+  pop() {
+    if (this.size) {
+      const val = this.valList[this.head]
+      this.evict(true)
+      return val
+    }
+  }
+
+  evict(free) {
+    const head = this.head
+    const k = this.keyList[head]
+    const v = this.valList[head]
+    if (this.isBackgroundFetch(v)) {
+      v.__abortController.abort(new Error('evicted'))
+    } else {
+      this.dispose(v, k, 'evict')
+      if (this.disposeAfter) {
+        this.disposed.push([v, k, 'evict'])
+      }
+    }
+    this.removeItemSize(head)
+    // if we aren't about to use the index, then null these out
+    if (free) {
+      this.keyList[head] = null
+      this.valList[head] = null
+      this.free.push(head)
+    }
+    this.head = this.next[head]
+    this.keyMap.delete(k)
+    this.size--
+    return head
+  }
+
+  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      if (!this.isStale(index)) {
+        if (updateAgeOnHas) {
+          this.updateItemAge(index)
+        }
+        if (status) status.has = 'hit'
+        this.statusTTL(status, index)
+        return true
+      } else if (status) {
+        status.has = 'stale'
+        this.statusTTL(status, index)
+      }
+    } else if (status) {
+      status.has = 'miss'
+    }
+    return false
+  }
+
+  // like get(), but without any LRU updating or TTL expiration
+  peek(k, { allowStale = this.allowStale } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined && (allowStale || !this.isStale(index))) {
+      const v = this.valList[index]
+      // either stale and allowed, or forcing a refresh of non-stale value
+      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
+    }
+  }
+
+  backgroundFetch(k, index, options, context) {
+    const v = index === undefined ? undefined : this.valList[index]
+    if (this.isBackgroundFetch(v)) {
+      return v
+    }
+    const ac = new AC()
+    if (options.signal) {
+      options.signal.addEventListener('abort', () =>
+        ac.abort(options.signal.reason)
+      )
+    }
+    const fetchOpts = {
+      signal: ac.signal,
+      options,
+      context,
+    }
+    const cb = (v, updateCache = false) => {
+      const { aborted } = ac.signal
+      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
+      if (options.status) {
+        if (aborted && !updateCache) {
+          options.status.fetchAborted = true
+          options.status.fetchError = ac.signal.reason
+          if (ignoreAbort) options.status.fetchAbortIgnored = true
+        } else {
+          options.status.fetchResolved = true
+        }
+      }
+      if (aborted && !ignoreAbort && !updateCache) {
+        return fetchFail(ac.signal.reason)
+      }
+      // either we didn't abort, and are still here, or we did, and ignored
+      if (this.valList[index] === p) {
+        if (v === undefined) {
+          if (p.__staleWhileFetching) {
+            this.valList[index] = p.__staleWhileFetching
+          } else {
+            this.delete(k)
+          }
+        } else {
+          if (options.status) options.status.fetchUpdated = true
+          this.set(k, v, fetchOpts.options)
+        }
+      }
+      return v
+    }
+    const eb = er => {
+      if (options.status) {
+        options.status.fetchRejected = true
+        options.status.fetchError = er
+      }
+      return fetchFail(er)
+    }
+    const fetchFail = er => {
+      const { aborted } = ac.signal
+      const allowStaleAborted =
+        aborted && options.allowStaleOnFetchAbort
+      const allowStale =
+        allowStaleAborted || options.allowStaleOnFetchRejection
+      const noDelete = allowStale || options.noDeleteOnFetchRejection
+      if (this.valList[index] === p) {
+        // if we allow stale on fetch rejections, then we need to ensure that
+        // the stale value is not removed from the cache when the fetch fails.
+        const del = !noDelete || p.__staleWhileFetching === undefined
+        if (del) {
+          this.delete(k)
+        } else if (!allowStaleAborted) {
+          // still replace the *promise* with the stale value,
+          // since we are done with the promise at this point.
+          // leave it untouched if we're still waiting for an
+          // aborted background fetch that hasn't yet returned.
+          this.valList[index] = p.__staleWhileFetching
+        }
+      }
+      if (allowStale) {
+        if (options.status && p.__staleWhileFetching !== undefined) {
+          options.status.returnedStale = true
+        }
+        return p.__staleWhileFetching
+      } else if (p.__returned === p) {
+        throw er
+      }
+    }
+    const pcall = (res, rej) => {
+      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
+      // ignored, we go until we finish, regardless.
+      // defer check until we are actually aborting,
+      // so fetchMethod can override.
+      ac.signal.addEventListener('abort', () => {
+        if (
+          !options.ignoreFetchAbort ||
+          options.allowStaleOnFetchAbort
+        ) {
+          res()
+          // when it eventually resolves, update the cache.
+          if (options.allowStaleOnFetchAbort) {
+            res = v => cb(v, true)
+          }
+        }
+      })
+    }
+    if (options.status) options.status.fetchDispatched = true
+    const p = new Promise(pcall).then(cb, eb)
+    p.__abortController = ac
+    p.__staleWhileFetching = v
+    p.__returned = null
+    if (index === undefined) {
+      // internal, don't expose status.
+      this.set(k, p, { ...fetchOpts.options, status: undefined })
+      index = this.keyMap.get(k)
+    } else {
+      this.valList[index] = p
+    }
+    return p
+  }
+
+  isBackgroundFetch(p) {
+    return (
+      p &&
+      typeof p === 'object' &&
+      typeof p.then === 'function' &&
+      Object.prototype.hasOwnProperty.call(
+        p,
+        '__staleWhileFetching'
+      ) &&
+      Object.prototype.hasOwnProperty.call(p, '__returned') &&
+      (p.__returned === p || p.__returned === null)
+    )
+  }
+
+  // this takes the union of get() and set() opts, because it does both
+  async fetch(
+    k,
+    {
+      // get options
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      // set options
+      ttl = this.ttl,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      // fetch exclusive options
+      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
+      ignoreFetchAbort = this.ignoreFetchAbort,
+      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
+      fetchContext = this.fetchContext,
+      forceRefresh = false,
+      status,
+      signal,
+    } = {}
+  ) {
+    if (!this.fetchMethod) {
+      if (status) status.fetch = 'get'
+      return this.get(k, {
+        allowStale,
+        updateAgeOnGet,
+        noDeleteOnStaleGet,
+        status,
+      })
+    }
+
+    const options = {
+      allowStale,
+      updateAgeOnGet,
+      noDeleteOnStaleGet,
+      ttl,
+      noDisposeOnSet,
+      size,
+      sizeCalculation,
+      noUpdateTTL,
+      noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+      status,
+      signal,
+    }
+
+    let index = this.keyMap.get(k)
+    if (index === undefined) {
+      if (status) status.fetch = 'miss'
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      return (p.__returned = p)
+    } else {
+      // in cache, maybe already fetching
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        const stale =
+          allowStale && v.__staleWhileFetching !== undefined
+        if (status) {
+          status.fetch = 'inflight'
+          if (stale) status.returnedStale = true
+        }
+        return stale ? v.__staleWhileFetching : (v.__returned = v)
+      }
+
+      // if we force a refresh, that means do NOT serve the cached value,
+      // unless we are already in the process of refreshing the cache.
+      const isStale = this.isStale(index)
+      if (!forceRefresh && !isStale) {
+        if (status) status.fetch = 'hit'
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        this.statusTTL(status, index)
+        return v
+      }
+
+      // ok, it is stale or a forced refresh, and not already fetching.
+      // refresh the cache.
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      const hasStale = p.__staleWhileFetching !== undefined
+      const staleVal = hasStale && allowStale
+      if (status) {
+        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
+        if (staleVal && isStale) status.returnedStale = true
+      }
+      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
+    }
+  }
+
+  get(
+    k,
+    {
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      status,
+    } = {}
+  ) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      const value = this.valList[index]
+      const fetching = this.isBackgroundFetch(value)
+      this.statusTTL(status, index)
+      if (this.isStale(index)) {
+        if (status) status.get = 'stale'
+        // delete only if not an in-flight background fetch
+        if (!fetching) {
+          if (!noDeleteOnStaleGet) {
+            this.delete(k)
+          }
+          if (status) status.returnedStale = allowStale
+          return allowStale ? value : undefined
+        } else {
+          if (status) {
+            status.returnedStale =
+              allowStale && value.__staleWhileFetching !== undefined
+          }
+          return allowStale ? value.__staleWhileFetching : undefined
+        }
+      } else {
+        if (status) status.get = 'hit'
+        // if we're currently fetching it, we don't actually have it yet
+        // it's not stale, which means this isn't a staleWhileRefetching.
+        // If it's not stale, and fetching, AND has a __staleWhileFetching
+        // value, then that means the user fetched with {forceRefresh:true},
+        // so it's safe to return that value.
+        if (fetching) {
+          return value.__staleWhileFetching
+        }
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        return value
+      }
+    } else if (status) {
+      status.get = 'miss'
+    }
+  }
+
+  connect(p, n) {
+    this.prev[n] = p
+    this.next[p] = n
+  }
+
+  moveToTail(index) {
+    // if tail already, nothing to do
+    // if head, move head to next[index]
+    // else
+    //   move next[prev[index]] to next[index] (head has no prev)
+    //   move prev[next[index]] to prev[index]
+    // prev[index] = tail
+    // next[tail] = index
+    // tail = index
+    if (index !== this.tail) {
+      if (index === this.head) {
+        this.head = this.next[index]
+      } else {
+        this.connect(this.prev[index], this.next[index])
+      }
+      this.connect(this.tail, index)
+      this.tail = index
+    }
+  }
+
+  get del() {
+    deprecatedMethod('del', 'delete')
+    return this.delete
+  }
+
+  delete(k) {
+    let deleted = false
+    if (this.size !== 0) {
+      const index = this.keyMap.get(k)
+      if (index !== undefined) {
+        deleted = true
+        if (this.size === 1) {
+          this.clear()
+        } else {
+          this.removeItemSize(index)
+          const v = this.valList[index]
+          if (this.isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('deleted'))
+          } else {
+            this.dispose(v, k, 'delete')
+            if (this.disposeAfter) {
+              this.disposed.push([v, k, 'delete'])
+            }
+          }
+          this.keyMap.delete(k)
+          this.keyList[index] = null
+          this.valList[index] = null
+          if (index === this.tail) {
+            this.tail = this.prev[index]
+          } else if (index === this.head) {
+            this.head = this.next[index]
+          } else {
+            this.next[this.prev[index]] = this.next[index]
+            this.prev[this.next[index]] = this.prev[index]
+          }
+          this.size--
+          this.free.push(index)
+        }
+      }
+    }
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return deleted
+  }
+
+  clear() {
+    for (const index of this.rindexes({ allowStale: true })) {
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        v.__abortController.abort(new Error('deleted'))
+      } else {
+        const k = this.keyList[index]
+        this.dispose(v, k, 'delete')
+        if (this.disposeAfter) {
+          this.disposed.push([v, k, 'delete'])
+        }
+      }
+    }
+
+    this.keyMap.clear()
+    this.valList.fill(null)
+    this.keyList.fill(null)
+    if (this.ttls) {
+      this.ttls.fill(0)
+      this.starts.fill(0)
+    }
+    if (this.sizes) {
+      this.sizes.fill(0)
+    }
+    this.head = 0
+    this.tail = 0
+    this.initialFill = 1
+    this.free.length = 0
+    this.calculatedSize = 0
+    this.size = 0
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+  }
+
+  get reset() {
+    deprecatedMethod('reset', 'clear')
+    return this.clear
+  }
+
+  get length() {
+    deprecatedProperty('length', 'size')
+    return this.size
+  }
+
+  static get AbortController() {
+    return AC
+  }
+  static get AbortSignal() {
+    return AS
+  }
+}
+
+export default LRUCache
diff --git a/node_modules/npm-pick-manifest/node_modules/lru-cache/package.json b/node_modules/npm-pick-manifest/node_modules/lru-cache/package.json
new file mode 100644
index 0000000000000..9684991727e7a
--- /dev/null
+++ b/node_modules/npm-pick-manifest/node_modules/lru-cache/package.json
@@ -0,0 +1,96 @@
+{
+  "name": "lru-cache",
+  "description": "A cache object that deletes the least-recently-used items.",
+  "version": "7.18.3",
+  "author": "Isaac Z. Schlueter ",
+  "keywords": [
+    "mru",
+    "lru",
+    "cache"
+  ],
+  "sideEffects": false,
+  "scripts": {
+    "build": "npm run prepare",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "prepare": "node ./scripts/transpile-to-esm.js",
+    "size": "size-limit",
+    "test": "tap",
+    "snap": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write .",
+    "typedoc": "typedoc ./index.d.ts"
+  },
+  "type": "commonjs",
+  "main": "./index.js",
+  "module": "./index.mjs",
+  "types": "./index.d.ts",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./index.d.ts",
+        "default": "./index.mjs"
+      },
+      "require": {
+        "types": "./index.d.ts",
+        "default": "./index.js"
+      }
+    },
+    "./package.json": "./package.json"
+  },
+  "repository": "git://github.com/isaacs/node-lru-cache.git",
+  "devDependencies": {
+    "@size-limit/preset-small-lib": "^7.0.8",
+    "@types/node": "^17.0.31",
+    "@types/tap": "^15.0.6",
+    "benchmark": "^2.1.4",
+    "c8": "^7.11.2",
+    "clock-mock": "^1.0.6",
+    "eslint-config-prettier": "^8.5.0",
+    "prettier": "^2.6.2",
+    "size-limit": "^7.0.8",
+    "tap": "^16.3.4",
+    "ts-node": "^10.7.0",
+    "tslib": "^2.4.0",
+    "typedoc": "^0.23.24",
+    "typescript": "^4.6.4"
+  },
+  "license": "ISC",
+  "files": [
+    "index.js",
+    "index.mjs",
+    "index.d.ts"
+  ],
+  "engines": {
+    "node": ">=12"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 70,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "tap": {
+    "nyc-arg": [
+      "--include=index.js"
+    ],
+    "node-arg": [
+      "--expose-gc",
+      "--require",
+      "ts-node/register"
+    ],
+    "ts": false
+  },
+  "size-limit": [
+    {
+      "path": "./index.js"
+    }
+  ]
+}
diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/LICENSE b/node_modules/npm-registry-fetch/node_modules/lru-cache/LICENSE
new file mode 100644
index 0000000000000..f785757cd63f8
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/lru-cache/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/index.js b/node_modules/npm-registry-fetch/node_modules/lru-cache/index.js
new file mode 100644
index 0000000000000..48e99fe5e5a70
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/lru-cache/index.js
@@ -0,0 +1,1227 @@
+const perf =
+  typeof performance === 'object' &&
+  performance &&
+  typeof performance.now === 'function'
+    ? performance
+    : Date
+
+const hasAbortController = typeof AbortController === 'function'
+
+// minimal backwards-compatibility polyfill
+// this doesn't have nearly all the checks and whatnot that
+// actual AbortController/Signal has, but it's enough for
+// our purposes, and if used properly, behaves the same.
+const AC = hasAbortController
+  ? AbortController
+  : class AbortController {
+      constructor() {
+        this.signal = new AS()
+      }
+      abort(reason = new Error('This operation was aborted')) {
+        this.signal.reason = this.signal.reason || reason
+        this.signal.aborted = true
+        this.signal.dispatchEvent({
+          type: 'abort',
+          target: this.signal,
+        })
+      }
+    }
+
+const hasAbortSignal = typeof AbortSignal === 'function'
+// Some polyfills put this on the AC class, not global
+const hasACAbortSignal = typeof AC.AbortSignal === 'function'
+const AS = hasAbortSignal
+  ? AbortSignal
+  : hasACAbortSignal
+  ? AC.AbortController
+  : class AbortSignal {
+      constructor() {
+        this.reason = undefined
+        this.aborted = false
+        this._listeners = []
+      }
+      dispatchEvent(e) {
+        if (e.type === 'abort') {
+          this.aborted = true
+          this.onabort(e)
+          this._listeners.forEach(f => f(e), this)
+        }
+      }
+      onabort() {}
+      addEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners.push(fn)
+        }
+      }
+      removeEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners = this._listeners.filter(f => f !== fn)
+        }
+      }
+    }
+
+const warned = new Set()
+const deprecatedOption = (opt, instead) => {
+  const code = `LRU_CACHE_OPTION_${opt}`
+  if (shouldWarn(code)) {
+    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
+  }
+}
+const deprecatedMethod = (method, instead) => {
+  const code = `LRU_CACHE_METHOD_${method}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
+    warn(code, `${method} method`, `cache.${instead}()`, get)
+  }
+}
+const deprecatedProperty = (field, instead) => {
+  const code = `LRU_CACHE_PROPERTY_${field}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
+    warn(code, `${field} property`, `cache.${instead}`, get)
+  }
+}
+
+const emitWarning = (...a) => {
+  typeof process === 'object' &&
+  process &&
+  typeof process.emitWarning === 'function'
+    ? process.emitWarning(...a)
+    : console.error(...a)
+}
+
+const shouldWarn = code => !warned.has(code)
+
+const warn = (code, what, instead, fn) => {
+  warned.add(code)
+  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
+  emitWarning(msg, 'DeprecationWarning', code, fn)
+}
+
+const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
+
+/* istanbul ignore next - This is a little bit ridiculous, tbh.
+ * The maximum array length is 2^32-1 or thereabouts on most JS impls.
+ * And well before that point, you're caching the entire world, I mean,
+ * that's ~32GB of just integers for the next/prev links, plus whatever
+ * else to hold that many keys and values.  Just filling the memory with
+ * zeroes at init time is brutal when you get that big.
+ * But why not be complete?
+ * Maybe in the future, these limits will have expanded. */
+const getUintArray = max =>
+  !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+    ? Uint8Array
+    : max <= Math.pow(2, 16)
+    ? Uint16Array
+    : max <= Math.pow(2, 32)
+    ? Uint32Array
+    : max <= Number.MAX_SAFE_INTEGER
+    ? ZeroArray
+    : null
+
+class ZeroArray extends Array {
+  constructor(size) {
+    super(size)
+    this.fill(0)
+  }
+}
+
+class Stack {
+  constructor(max) {
+    if (max === 0) {
+      return []
+    }
+    const UintArray = getUintArray(max)
+    this.heap = new UintArray(max)
+    this.length = 0
+  }
+  push(n) {
+    this.heap[this.length++] = n
+  }
+  pop() {
+    return this.heap[--this.length]
+  }
+}
+
+class LRUCache {
+  constructor(options = {}) {
+    const {
+      max = 0,
+      ttl,
+      ttlResolution = 1,
+      ttlAutopurge,
+      updateAgeOnGet,
+      updateAgeOnHas,
+      allowStale,
+      dispose,
+      disposeAfter,
+      noDisposeOnSet,
+      noUpdateTTL,
+      maxSize = 0,
+      maxEntrySize = 0,
+      sizeCalculation,
+      fetchMethod,
+      fetchContext,
+      noDeleteOnFetchRejection,
+      noDeleteOnStaleGet,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+    } = options
+
+    // deprecated options, don't trigger a warning for getting them if
+    // the thing being passed in is another LRUCache we're copying.
+    const { length, maxAge, stale } =
+      options instanceof LRUCache ? {} : options
+
+    if (max !== 0 && !isPosInt(max)) {
+      throw new TypeError('max option must be a nonnegative integer')
+    }
+
+    const UintArray = max ? getUintArray(max) : Array
+    if (!UintArray) {
+      throw new Error('invalid max value: ' + max)
+    }
+
+    this.max = max
+    this.maxSize = maxSize
+    this.maxEntrySize = maxEntrySize || this.maxSize
+    this.sizeCalculation = sizeCalculation || length
+    if (this.sizeCalculation) {
+      if (!this.maxSize && !this.maxEntrySize) {
+        throw new TypeError(
+          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
+        )
+      }
+      if (typeof this.sizeCalculation !== 'function') {
+        throw new TypeError('sizeCalculation set to non-function')
+      }
+    }
+
+    this.fetchMethod = fetchMethod || null
+    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
+      throw new TypeError(
+        'fetchMethod must be a function if specified'
+      )
+    }
+
+    this.fetchContext = fetchContext
+    if (!this.fetchMethod && fetchContext !== undefined) {
+      throw new TypeError(
+        'cannot set fetchContext without fetchMethod'
+      )
+    }
+
+    this.keyMap = new Map()
+    this.keyList = new Array(max).fill(null)
+    this.valList = new Array(max).fill(null)
+    this.next = new UintArray(max)
+    this.prev = new UintArray(max)
+    this.head = 0
+    this.tail = 0
+    this.free = new Stack(max)
+    this.initialFill = 1
+    this.size = 0
+
+    if (typeof dispose === 'function') {
+      this.dispose = dispose
+    }
+    if (typeof disposeAfter === 'function') {
+      this.disposeAfter = disposeAfter
+      this.disposed = []
+    } else {
+      this.disposeAfter = null
+      this.disposed = null
+    }
+    this.noDisposeOnSet = !!noDisposeOnSet
+    this.noUpdateTTL = !!noUpdateTTL
+    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
+    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
+    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
+    this.ignoreFetchAbort = !!ignoreFetchAbort
+
+    // NB: maxEntrySize is set to maxSize if it's set
+    if (this.maxEntrySize !== 0) {
+      if (this.maxSize !== 0) {
+        if (!isPosInt(this.maxSize)) {
+          throw new TypeError(
+            'maxSize must be a positive integer if specified'
+          )
+        }
+      }
+      if (!isPosInt(this.maxEntrySize)) {
+        throw new TypeError(
+          'maxEntrySize must be a positive integer if specified'
+        )
+      }
+      this.initializeSizeTracking()
+    }
+
+    this.allowStale = !!allowStale || !!stale
+    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
+    this.updateAgeOnGet = !!updateAgeOnGet
+    this.updateAgeOnHas = !!updateAgeOnHas
+    this.ttlResolution =
+      isPosInt(ttlResolution) || ttlResolution === 0
+        ? ttlResolution
+        : 1
+    this.ttlAutopurge = !!ttlAutopurge
+    this.ttl = ttl || maxAge || 0
+    if (this.ttl) {
+      if (!isPosInt(this.ttl)) {
+        throw new TypeError(
+          'ttl must be a positive integer if specified'
+        )
+      }
+      this.initializeTTLTracking()
+    }
+
+    // do not allow completely unbounded caches
+    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
+      throw new TypeError(
+        'At least one of max, maxSize, or ttl is required'
+      )
+    }
+    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
+      const code = 'LRU_CACHE_UNBOUNDED'
+      if (shouldWarn(code)) {
+        warned.add(code)
+        const msg =
+          'TTL caching without ttlAutopurge, max, or maxSize can ' +
+          'result in unbounded memory consumption.'
+        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
+      }
+    }
+
+    if (stale) {
+      deprecatedOption('stale', 'allowStale')
+    }
+    if (maxAge) {
+      deprecatedOption('maxAge', 'ttl')
+    }
+    if (length) {
+      deprecatedOption('length', 'sizeCalculation')
+    }
+  }
+
+  getRemainingTTL(key) {
+    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
+  }
+
+  initializeTTLTracking() {
+    this.ttls = new ZeroArray(this.max)
+    this.starts = new ZeroArray(this.max)
+
+    this.setItemTTL = (index, ttl, start = perf.now()) => {
+      this.starts[index] = ttl !== 0 ? start : 0
+      this.ttls[index] = ttl
+      if (ttl !== 0 && this.ttlAutopurge) {
+        const t = setTimeout(() => {
+          if (this.isStale(index)) {
+            this.delete(this.keyList[index])
+          }
+        }, ttl + 1)
+        /* istanbul ignore else - unref() not supported on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+    }
+
+    this.updateItemAge = index => {
+      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
+    }
+
+    this.statusTTL = (status, index) => {
+      if (status) {
+        status.ttl = this.ttls[index]
+        status.start = this.starts[index]
+        status.now = cachedNow || getNow()
+        status.remainingTTL = status.now + status.ttl - status.start
+      }
+    }
+
+    // debounce calls to perf.now() to 1s so we're not hitting
+    // that costly call repeatedly.
+    let cachedNow = 0
+    const getNow = () => {
+      const n = perf.now()
+      if (this.ttlResolution > 0) {
+        cachedNow = n
+        const t = setTimeout(
+          () => (cachedNow = 0),
+          this.ttlResolution
+        )
+        /* istanbul ignore else - not available on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+      return n
+    }
+
+    this.getRemainingTTL = key => {
+      const index = this.keyMap.get(key)
+      if (index === undefined) {
+        return 0
+      }
+      return this.ttls[index] === 0 || this.starts[index] === 0
+        ? Infinity
+        : this.starts[index] +
+            this.ttls[index] -
+            (cachedNow || getNow())
+    }
+
+    this.isStale = index => {
+      return (
+        this.ttls[index] !== 0 &&
+        this.starts[index] !== 0 &&
+        (cachedNow || getNow()) - this.starts[index] >
+          this.ttls[index]
+      )
+    }
+  }
+  updateItemAge(_index) {}
+  statusTTL(_status, _index) {}
+  setItemTTL(_index, _ttl, _start) {}
+  isStale(_index) {
+    return false
+  }
+
+  initializeSizeTracking() {
+    this.calculatedSize = 0
+    this.sizes = new ZeroArray(this.max)
+    this.removeItemSize = index => {
+      this.calculatedSize -= this.sizes[index]
+      this.sizes[index] = 0
+    }
+    this.requireSize = (k, v, size, sizeCalculation) => {
+      // provisionally accept background fetches.
+      // actual value size will be checked when they return.
+      if (this.isBackgroundFetch(v)) {
+        return 0
+      }
+      if (!isPosInt(size)) {
+        if (sizeCalculation) {
+          if (typeof sizeCalculation !== 'function') {
+            throw new TypeError('sizeCalculation must be a function')
+          }
+          size = sizeCalculation(v, k)
+          if (!isPosInt(size)) {
+            throw new TypeError(
+              'sizeCalculation return invalid (expect positive integer)'
+            )
+          }
+        } else {
+          throw new TypeError(
+            'invalid size value (must be positive integer). ' +
+              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
+              'must be set.'
+          )
+        }
+      }
+      return size
+    }
+    this.addItemSize = (index, size, status) => {
+      this.sizes[index] = size
+      if (this.maxSize) {
+        const maxSize = this.maxSize - this.sizes[index]
+        while (this.calculatedSize > maxSize) {
+          this.evict(true)
+        }
+      }
+      this.calculatedSize += this.sizes[index]
+      if (status) {
+        status.entrySize = size
+        status.totalCalculatedSize = this.calculatedSize
+      }
+    }
+  }
+  removeItemSize(_index) {}
+  addItemSize(_index, _size) {}
+  requireSize(_k, _v, size, sizeCalculation) {
+    if (size || sizeCalculation) {
+      throw new TypeError(
+        'cannot set size without setting maxSize or maxEntrySize on cache'
+      )
+    }
+  }
+
+  *indexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.tail; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.head) {
+          break
+        } else {
+          i = this.prev[i]
+        }
+      }
+    }
+  }
+
+  *rindexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.head; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.tail) {
+          break
+        } else {
+          i = this.next[i]
+        }
+      }
+    }
+  }
+
+  isValidIndex(index) {
+    return (
+      index !== undefined &&
+      this.keyMap.get(this.keyList[index]) === index
+    )
+  }
+
+  *entries() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+  *rentries() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+
+  *keys() {
+    for (const i of this.indexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+  *rkeys() {
+    for (const i of this.rindexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+
+  *values() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+  *rvalues() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+
+  [Symbol.iterator]() {
+    return this.entries()
+  }
+
+  find(fn, getOptions) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      if (fn(value, this.keyList[i], this)) {
+        return this.get(this.keyList[i], getOptions)
+      }
+    }
+  }
+
+  forEach(fn, thisp = this) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  rforEach(fn, thisp = this) {
+    for (const i of this.rindexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  get prune() {
+    deprecatedMethod('prune', 'purgeStale')
+    return this.purgeStale
+  }
+
+  purgeStale() {
+    let deleted = false
+    for (const i of this.rindexes({ allowStale: true })) {
+      if (this.isStale(i)) {
+        this.delete(this.keyList[i])
+        deleted = true
+      }
+    }
+    return deleted
+  }
+
+  dump() {
+    const arr = []
+    for (const i of this.indexes({ allowStale: true })) {
+      const key = this.keyList[i]
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      const entry = { value }
+      if (this.ttls) {
+        entry.ttl = this.ttls[i]
+        // always dump the start relative to a portable timestamp
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = perf.now() - this.starts[i]
+        entry.start = Math.floor(Date.now() - age)
+      }
+      if (this.sizes) {
+        entry.size = this.sizes[i]
+      }
+      arr.unshift([key, entry])
+    }
+    return arr
+  }
+
+  load(arr) {
+    this.clear()
+    for (const [key, entry] of arr) {
+      if (entry.start) {
+        // entry.start is a portable timestamp, but we may be using
+        // node's performance.now(), so calculate the offset.
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = Date.now() - entry.start
+        entry.start = perf.now() - age
+      }
+      this.set(key, entry.value, entry)
+    }
+  }
+
+  dispose(_v, _k, _reason) {}
+
+  set(
+    k,
+    v,
+    {
+      ttl = this.ttl,
+      start,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      status,
+    } = {}
+  ) {
+    size = this.requireSize(k, v, size, sizeCalculation)
+    // if the item doesn't fit, don't do anything
+    // NB: maxEntrySize set to maxSize by default
+    if (this.maxEntrySize && size > this.maxEntrySize) {
+      if (status) {
+        status.set = 'miss'
+        status.maxEntrySizeExceeded = true
+      }
+      // have to delete, in case a background fetch is there already.
+      // in non-async cases, this is a no-op
+      this.delete(k)
+      return this
+    }
+    let index = this.size === 0 ? undefined : this.keyMap.get(k)
+    if (index === undefined) {
+      // addition
+      index = this.newIndex()
+      this.keyList[index] = k
+      this.valList[index] = v
+      this.keyMap.set(k, index)
+      this.next[this.tail] = index
+      this.prev[index] = this.tail
+      this.tail = index
+      this.size++
+      this.addItemSize(index, size, status)
+      if (status) {
+        status.set = 'add'
+      }
+      noUpdateTTL = false
+    } else {
+      // update
+      this.moveToTail(index)
+      const oldVal = this.valList[index]
+      if (v !== oldVal) {
+        if (this.isBackgroundFetch(oldVal)) {
+          oldVal.__abortController.abort(new Error('replaced'))
+        } else {
+          if (!noDisposeOnSet) {
+            this.dispose(oldVal, k, 'set')
+            if (this.disposeAfter) {
+              this.disposed.push([oldVal, k, 'set'])
+            }
+          }
+        }
+        this.removeItemSize(index)
+        this.valList[index] = v
+        this.addItemSize(index, size, status)
+        if (status) {
+          status.set = 'replace'
+          const oldValue =
+            oldVal && this.isBackgroundFetch(oldVal)
+              ? oldVal.__staleWhileFetching
+              : oldVal
+          if (oldValue !== undefined) status.oldValue = oldValue
+        }
+      } else if (status) {
+        status.set = 'update'
+      }
+    }
+    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
+      this.initializeTTLTracking()
+    }
+    if (!noUpdateTTL) {
+      this.setItemTTL(index, ttl, start)
+    }
+    this.statusTTL(status, index)
+    if (this.disposeAfter) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return this
+  }
+
+  newIndex() {
+    if (this.size === 0) {
+      return this.tail
+    }
+    if (this.size === this.max && this.max !== 0) {
+      return this.evict(false)
+    }
+    if (this.free.length !== 0) {
+      return this.free.pop()
+    }
+    // initial fill, just keep writing down the list
+    return this.initialFill++
+  }
+
+  pop() {
+    if (this.size) {
+      const val = this.valList[this.head]
+      this.evict(true)
+      return val
+    }
+  }
+
+  evict(free) {
+    const head = this.head
+    const k = this.keyList[head]
+    const v = this.valList[head]
+    if (this.isBackgroundFetch(v)) {
+      v.__abortController.abort(new Error('evicted'))
+    } else {
+      this.dispose(v, k, 'evict')
+      if (this.disposeAfter) {
+        this.disposed.push([v, k, 'evict'])
+      }
+    }
+    this.removeItemSize(head)
+    // if we aren't about to use the index, then null these out
+    if (free) {
+      this.keyList[head] = null
+      this.valList[head] = null
+      this.free.push(head)
+    }
+    this.head = this.next[head]
+    this.keyMap.delete(k)
+    this.size--
+    return head
+  }
+
+  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      if (!this.isStale(index)) {
+        if (updateAgeOnHas) {
+          this.updateItemAge(index)
+        }
+        if (status) status.has = 'hit'
+        this.statusTTL(status, index)
+        return true
+      } else if (status) {
+        status.has = 'stale'
+        this.statusTTL(status, index)
+      }
+    } else if (status) {
+      status.has = 'miss'
+    }
+    return false
+  }
+
+  // like get(), but without any LRU updating or TTL expiration
+  peek(k, { allowStale = this.allowStale } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined && (allowStale || !this.isStale(index))) {
+      const v = this.valList[index]
+      // either stale and allowed, or forcing a refresh of non-stale value
+      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
+    }
+  }
+
+  backgroundFetch(k, index, options, context) {
+    const v = index === undefined ? undefined : this.valList[index]
+    if (this.isBackgroundFetch(v)) {
+      return v
+    }
+    const ac = new AC()
+    if (options.signal) {
+      options.signal.addEventListener('abort', () =>
+        ac.abort(options.signal.reason)
+      )
+    }
+    const fetchOpts = {
+      signal: ac.signal,
+      options,
+      context,
+    }
+    const cb = (v, updateCache = false) => {
+      const { aborted } = ac.signal
+      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
+      if (options.status) {
+        if (aborted && !updateCache) {
+          options.status.fetchAborted = true
+          options.status.fetchError = ac.signal.reason
+          if (ignoreAbort) options.status.fetchAbortIgnored = true
+        } else {
+          options.status.fetchResolved = true
+        }
+      }
+      if (aborted && !ignoreAbort && !updateCache) {
+        return fetchFail(ac.signal.reason)
+      }
+      // either we didn't abort, and are still here, or we did, and ignored
+      if (this.valList[index] === p) {
+        if (v === undefined) {
+          if (p.__staleWhileFetching) {
+            this.valList[index] = p.__staleWhileFetching
+          } else {
+            this.delete(k)
+          }
+        } else {
+          if (options.status) options.status.fetchUpdated = true
+          this.set(k, v, fetchOpts.options)
+        }
+      }
+      return v
+    }
+    const eb = er => {
+      if (options.status) {
+        options.status.fetchRejected = true
+        options.status.fetchError = er
+      }
+      return fetchFail(er)
+    }
+    const fetchFail = er => {
+      const { aborted } = ac.signal
+      const allowStaleAborted =
+        aborted && options.allowStaleOnFetchAbort
+      const allowStale =
+        allowStaleAborted || options.allowStaleOnFetchRejection
+      const noDelete = allowStale || options.noDeleteOnFetchRejection
+      if (this.valList[index] === p) {
+        // if we allow stale on fetch rejections, then we need to ensure that
+        // the stale value is not removed from the cache when the fetch fails.
+        const del = !noDelete || p.__staleWhileFetching === undefined
+        if (del) {
+          this.delete(k)
+        } else if (!allowStaleAborted) {
+          // still replace the *promise* with the stale value,
+          // since we are done with the promise at this point.
+          // leave it untouched if we're still waiting for an
+          // aborted background fetch that hasn't yet returned.
+          this.valList[index] = p.__staleWhileFetching
+        }
+      }
+      if (allowStale) {
+        if (options.status && p.__staleWhileFetching !== undefined) {
+          options.status.returnedStale = true
+        }
+        return p.__staleWhileFetching
+      } else if (p.__returned === p) {
+        throw er
+      }
+    }
+    const pcall = (res, rej) => {
+      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
+      // ignored, we go until we finish, regardless.
+      // defer check until we are actually aborting,
+      // so fetchMethod can override.
+      ac.signal.addEventListener('abort', () => {
+        if (
+          !options.ignoreFetchAbort ||
+          options.allowStaleOnFetchAbort
+        ) {
+          res()
+          // when it eventually resolves, update the cache.
+          if (options.allowStaleOnFetchAbort) {
+            res = v => cb(v, true)
+          }
+        }
+      })
+    }
+    if (options.status) options.status.fetchDispatched = true
+    const p = new Promise(pcall).then(cb, eb)
+    p.__abortController = ac
+    p.__staleWhileFetching = v
+    p.__returned = null
+    if (index === undefined) {
+      // internal, don't expose status.
+      this.set(k, p, { ...fetchOpts.options, status: undefined })
+      index = this.keyMap.get(k)
+    } else {
+      this.valList[index] = p
+    }
+    return p
+  }
+
+  isBackgroundFetch(p) {
+    return (
+      p &&
+      typeof p === 'object' &&
+      typeof p.then === 'function' &&
+      Object.prototype.hasOwnProperty.call(
+        p,
+        '__staleWhileFetching'
+      ) &&
+      Object.prototype.hasOwnProperty.call(p, '__returned') &&
+      (p.__returned === p || p.__returned === null)
+    )
+  }
+
+  // this takes the union of get() and set() opts, because it does both
+  async fetch(
+    k,
+    {
+      // get options
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      // set options
+      ttl = this.ttl,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      // fetch exclusive options
+      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
+      ignoreFetchAbort = this.ignoreFetchAbort,
+      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
+      fetchContext = this.fetchContext,
+      forceRefresh = false,
+      status,
+      signal,
+    } = {}
+  ) {
+    if (!this.fetchMethod) {
+      if (status) status.fetch = 'get'
+      return this.get(k, {
+        allowStale,
+        updateAgeOnGet,
+        noDeleteOnStaleGet,
+        status,
+      })
+    }
+
+    const options = {
+      allowStale,
+      updateAgeOnGet,
+      noDeleteOnStaleGet,
+      ttl,
+      noDisposeOnSet,
+      size,
+      sizeCalculation,
+      noUpdateTTL,
+      noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+      status,
+      signal,
+    }
+
+    let index = this.keyMap.get(k)
+    if (index === undefined) {
+      if (status) status.fetch = 'miss'
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      return (p.__returned = p)
+    } else {
+      // in cache, maybe already fetching
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        const stale =
+          allowStale && v.__staleWhileFetching !== undefined
+        if (status) {
+          status.fetch = 'inflight'
+          if (stale) status.returnedStale = true
+        }
+        return stale ? v.__staleWhileFetching : (v.__returned = v)
+      }
+
+      // if we force a refresh, that means do NOT serve the cached value,
+      // unless we are already in the process of refreshing the cache.
+      const isStale = this.isStale(index)
+      if (!forceRefresh && !isStale) {
+        if (status) status.fetch = 'hit'
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        this.statusTTL(status, index)
+        return v
+      }
+
+      // ok, it is stale or a forced refresh, and not already fetching.
+      // refresh the cache.
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      const hasStale = p.__staleWhileFetching !== undefined
+      const staleVal = hasStale && allowStale
+      if (status) {
+        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
+        if (staleVal && isStale) status.returnedStale = true
+      }
+      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
+    }
+  }
+
+  get(
+    k,
+    {
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      status,
+    } = {}
+  ) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      const value = this.valList[index]
+      const fetching = this.isBackgroundFetch(value)
+      this.statusTTL(status, index)
+      if (this.isStale(index)) {
+        if (status) status.get = 'stale'
+        // delete only if not an in-flight background fetch
+        if (!fetching) {
+          if (!noDeleteOnStaleGet) {
+            this.delete(k)
+          }
+          if (status) status.returnedStale = allowStale
+          return allowStale ? value : undefined
+        } else {
+          if (status) {
+            status.returnedStale =
+              allowStale && value.__staleWhileFetching !== undefined
+          }
+          return allowStale ? value.__staleWhileFetching : undefined
+        }
+      } else {
+        if (status) status.get = 'hit'
+        // if we're currently fetching it, we don't actually have it yet
+        // it's not stale, which means this isn't a staleWhileRefetching.
+        // If it's not stale, and fetching, AND has a __staleWhileFetching
+        // value, then that means the user fetched with {forceRefresh:true},
+        // so it's safe to return that value.
+        if (fetching) {
+          return value.__staleWhileFetching
+        }
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        return value
+      }
+    } else if (status) {
+      status.get = 'miss'
+    }
+  }
+
+  connect(p, n) {
+    this.prev[n] = p
+    this.next[p] = n
+  }
+
+  moveToTail(index) {
+    // if tail already, nothing to do
+    // if head, move head to next[index]
+    // else
+    //   move next[prev[index]] to next[index] (head has no prev)
+    //   move prev[next[index]] to prev[index]
+    // prev[index] = tail
+    // next[tail] = index
+    // tail = index
+    if (index !== this.tail) {
+      if (index === this.head) {
+        this.head = this.next[index]
+      } else {
+        this.connect(this.prev[index], this.next[index])
+      }
+      this.connect(this.tail, index)
+      this.tail = index
+    }
+  }
+
+  get del() {
+    deprecatedMethod('del', 'delete')
+    return this.delete
+  }
+
+  delete(k) {
+    let deleted = false
+    if (this.size !== 0) {
+      const index = this.keyMap.get(k)
+      if (index !== undefined) {
+        deleted = true
+        if (this.size === 1) {
+          this.clear()
+        } else {
+          this.removeItemSize(index)
+          const v = this.valList[index]
+          if (this.isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('deleted'))
+          } else {
+            this.dispose(v, k, 'delete')
+            if (this.disposeAfter) {
+              this.disposed.push([v, k, 'delete'])
+            }
+          }
+          this.keyMap.delete(k)
+          this.keyList[index] = null
+          this.valList[index] = null
+          if (index === this.tail) {
+            this.tail = this.prev[index]
+          } else if (index === this.head) {
+            this.head = this.next[index]
+          } else {
+            this.next[this.prev[index]] = this.next[index]
+            this.prev[this.next[index]] = this.prev[index]
+          }
+          this.size--
+          this.free.push(index)
+        }
+      }
+    }
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return deleted
+  }
+
+  clear() {
+    for (const index of this.rindexes({ allowStale: true })) {
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        v.__abortController.abort(new Error('deleted'))
+      } else {
+        const k = this.keyList[index]
+        this.dispose(v, k, 'delete')
+        if (this.disposeAfter) {
+          this.disposed.push([v, k, 'delete'])
+        }
+      }
+    }
+
+    this.keyMap.clear()
+    this.valList.fill(null)
+    this.keyList.fill(null)
+    if (this.ttls) {
+      this.ttls.fill(0)
+      this.starts.fill(0)
+    }
+    if (this.sizes) {
+      this.sizes.fill(0)
+    }
+    this.head = 0
+    this.tail = 0
+    this.initialFill = 1
+    this.free.length = 0
+    this.calculatedSize = 0
+    this.size = 0
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+  }
+
+  get reset() {
+    deprecatedMethod('reset', 'clear')
+    return this.clear
+  }
+
+  get length() {
+    deprecatedProperty('length', 'size')
+    return this.size
+  }
+
+  static get AbortController() {
+    return AC
+  }
+  static get AbortSignal() {
+    return AS
+  }
+}
+
+module.exports = LRUCache
diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/index.mjs b/node_modules/npm-registry-fetch/node_modules/lru-cache/index.mjs
new file mode 100644
index 0000000000000..4a0b4813ec515
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/lru-cache/index.mjs
@@ -0,0 +1,1227 @@
+const perf =
+  typeof performance === 'object' &&
+  performance &&
+  typeof performance.now === 'function'
+    ? performance
+    : Date
+
+const hasAbortController = typeof AbortController === 'function'
+
+// minimal backwards-compatibility polyfill
+// this doesn't have nearly all the checks and whatnot that
+// actual AbortController/Signal has, but it's enough for
+// our purposes, and if used properly, behaves the same.
+const AC = hasAbortController
+  ? AbortController
+  : class AbortController {
+      constructor() {
+        this.signal = new AS()
+      }
+      abort(reason = new Error('This operation was aborted')) {
+        this.signal.reason = this.signal.reason || reason
+        this.signal.aborted = true
+        this.signal.dispatchEvent({
+          type: 'abort',
+          target: this.signal,
+        })
+      }
+    }
+
+const hasAbortSignal = typeof AbortSignal === 'function'
+// Some polyfills put this on the AC class, not global
+const hasACAbortSignal = typeof AC.AbortSignal === 'function'
+const AS = hasAbortSignal
+  ? AbortSignal
+  : hasACAbortSignal
+  ? AC.AbortController
+  : class AbortSignal {
+      constructor() {
+        this.reason = undefined
+        this.aborted = false
+        this._listeners = []
+      }
+      dispatchEvent(e) {
+        if (e.type === 'abort') {
+          this.aborted = true
+          this.onabort(e)
+          this._listeners.forEach(f => f(e), this)
+        }
+      }
+      onabort() {}
+      addEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners.push(fn)
+        }
+      }
+      removeEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners = this._listeners.filter(f => f !== fn)
+        }
+      }
+    }
+
+const warned = new Set()
+const deprecatedOption = (opt, instead) => {
+  const code = `LRU_CACHE_OPTION_${opt}`
+  if (shouldWarn(code)) {
+    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
+  }
+}
+const deprecatedMethod = (method, instead) => {
+  const code = `LRU_CACHE_METHOD_${method}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
+    warn(code, `${method} method`, `cache.${instead}()`, get)
+  }
+}
+const deprecatedProperty = (field, instead) => {
+  const code = `LRU_CACHE_PROPERTY_${field}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
+    warn(code, `${field} property`, `cache.${instead}`, get)
+  }
+}
+
+const emitWarning = (...a) => {
+  typeof process === 'object' &&
+  process &&
+  typeof process.emitWarning === 'function'
+    ? process.emitWarning(...a)
+    : console.error(...a)
+}
+
+const shouldWarn = code => !warned.has(code)
+
+const warn = (code, what, instead, fn) => {
+  warned.add(code)
+  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
+  emitWarning(msg, 'DeprecationWarning', code, fn)
+}
+
+const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
+
+/* istanbul ignore next - This is a little bit ridiculous, tbh.
+ * The maximum array length is 2^32-1 or thereabouts on most JS impls.
+ * And well before that point, you're caching the entire world, I mean,
+ * that's ~32GB of just integers for the next/prev links, plus whatever
+ * else to hold that many keys and values.  Just filling the memory with
+ * zeroes at init time is brutal when you get that big.
+ * But why not be complete?
+ * Maybe in the future, these limits will have expanded. */
+const getUintArray = max =>
+  !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+    ? Uint8Array
+    : max <= Math.pow(2, 16)
+    ? Uint16Array
+    : max <= Math.pow(2, 32)
+    ? Uint32Array
+    : max <= Number.MAX_SAFE_INTEGER
+    ? ZeroArray
+    : null
+
+class ZeroArray extends Array {
+  constructor(size) {
+    super(size)
+    this.fill(0)
+  }
+}
+
+class Stack {
+  constructor(max) {
+    if (max === 0) {
+      return []
+    }
+    const UintArray = getUintArray(max)
+    this.heap = new UintArray(max)
+    this.length = 0
+  }
+  push(n) {
+    this.heap[this.length++] = n
+  }
+  pop() {
+    return this.heap[--this.length]
+  }
+}
+
+class LRUCache {
+  constructor(options = {}) {
+    const {
+      max = 0,
+      ttl,
+      ttlResolution = 1,
+      ttlAutopurge,
+      updateAgeOnGet,
+      updateAgeOnHas,
+      allowStale,
+      dispose,
+      disposeAfter,
+      noDisposeOnSet,
+      noUpdateTTL,
+      maxSize = 0,
+      maxEntrySize = 0,
+      sizeCalculation,
+      fetchMethod,
+      fetchContext,
+      noDeleteOnFetchRejection,
+      noDeleteOnStaleGet,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+    } = options
+
+    // deprecated options, don't trigger a warning for getting them if
+    // the thing being passed in is another LRUCache we're copying.
+    const { length, maxAge, stale } =
+      options instanceof LRUCache ? {} : options
+
+    if (max !== 0 && !isPosInt(max)) {
+      throw new TypeError('max option must be a nonnegative integer')
+    }
+
+    const UintArray = max ? getUintArray(max) : Array
+    if (!UintArray) {
+      throw new Error('invalid max value: ' + max)
+    }
+
+    this.max = max
+    this.maxSize = maxSize
+    this.maxEntrySize = maxEntrySize || this.maxSize
+    this.sizeCalculation = sizeCalculation || length
+    if (this.sizeCalculation) {
+      if (!this.maxSize && !this.maxEntrySize) {
+        throw new TypeError(
+          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
+        )
+      }
+      if (typeof this.sizeCalculation !== 'function') {
+        throw new TypeError('sizeCalculation set to non-function')
+      }
+    }
+
+    this.fetchMethod = fetchMethod || null
+    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
+      throw new TypeError(
+        'fetchMethod must be a function if specified'
+      )
+    }
+
+    this.fetchContext = fetchContext
+    if (!this.fetchMethod && fetchContext !== undefined) {
+      throw new TypeError(
+        'cannot set fetchContext without fetchMethod'
+      )
+    }
+
+    this.keyMap = new Map()
+    this.keyList = new Array(max).fill(null)
+    this.valList = new Array(max).fill(null)
+    this.next = new UintArray(max)
+    this.prev = new UintArray(max)
+    this.head = 0
+    this.tail = 0
+    this.free = new Stack(max)
+    this.initialFill = 1
+    this.size = 0
+
+    if (typeof dispose === 'function') {
+      this.dispose = dispose
+    }
+    if (typeof disposeAfter === 'function') {
+      this.disposeAfter = disposeAfter
+      this.disposed = []
+    } else {
+      this.disposeAfter = null
+      this.disposed = null
+    }
+    this.noDisposeOnSet = !!noDisposeOnSet
+    this.noUpdateTTL = !!noUpdateTTL
+    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
+    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
+    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
+    this.ignoreFetchAbort = !!ignoreFetchAbort
+
+    // NB: maxEntrySize is set to maxSize if it's set
+    if (this.maxEntrySize !== 0) {
+      if (this.maxSize !== 0) {
+        if (!isPosInt(this.maxSize)) {
+          throw new TypeError(
+            'maxSize must be a positive integer if specified'
+          )
+        }
+      }
+      if (!isPosInt(this.maxEntrySize)) {
+        throw new TypeError(
+          'maxEntrySize must be a positive integer if specified'
+        )
+      }
+      this.initializeSizeTracking()
+    }
+
+    this.allowStale = !!allowStale || !!stale
+    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
+    this.updateAgeOnGet = !!updateAgeOnGet
+    this.updateAgeOnHas = !!updateAgeOnHas
+    this.ttlResolution =
+      isPosInt(ttlResolution) || ttlResolution === 0
+        ? ttlResolution
+        : 1
+    this.ttlAutopurge = !!ttlAutopurge
+    this.ttl = ttl || maxAge || 0
+    if (this.ttl) {
+      if (!isPosInt(this.ttl)) {
+        throw new TypeError(
+          'ttl must be a positive integer if specified'
+        )
+      }
+      this.initializeTTLTracking()
+    }
+
+    // do not allow completely unbounded caches
+    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
+      throw new TypeError(
+        'At least one of max, maxSize, or ttl is required'
+      )
+    }
+    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
+      const code = 'LRU_CACHE_UNBOUNDED'
+      if (shouldWarn(code)) {
+        warned.add(code)
+        const msg =
+          'TTL caching without ttlAutopurge, max, or maxSize can ' +
+          'result in unbounded memory consumption.'
+        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
+      }
+    }
+
+    if (stale) {
+      deprecatedOption('stale', 'allowStale')
+    }
+    if (maxAge) {
+      deprecatedOption('maxAge', 'ttl')
+    }
+    if (length) {
+      deprecatedOption('length', 'sizeCalculation')
+    }
+  }
+
+  getRemainingTTL(key) {
+    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
+  }
+
+  initializeTTLTracking() {
+    this.ttls = new ZeroArray(this.max)
+    this.starts = new ZeroArray(this.max)
+
+    this.setItemTTL = (index, ttl, start = perf.now()) => {
+      this.starts[index] = ttl !== 0 ? start : 0
+      this.ttls[index] = ttl
+      if (ttl !== 0 && this.ttlAutopurge) {
+        const t = setTimeout(() => {
+          if (this.isStale(index)) {
+            this.delete(this.keyList[index])
+          }
+        }, ttl + 1)
+        /* istanbul ignore else - unref() not supported on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+    }
+
+    this.updateItemAge = index => {
+      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
+    }
+
+    this.statusTTL = (status, index) => {
+      if (status) {
+        status.ttl = this.ttls[index]
+        status.start = this.starts[index]
+        status.now = cachedNow || getNow()
+        status.remainingTTL = status.now + status.ttl - status.start
+      }
+    }
+
+    // debounce calls to perf.now() to 1s so we're not hitting
+    // that costly call repeatedly.
+    let cachedNow = 0
+    const getNow = () => {
+      const n = perf.now()
+      if (this.ttlResolution > 0) {
+        cachedNow = n
+        const t = setTimeout(
+          () => (cachedNow = 0),
+          this.ttlResolution
+        )
+        /* istanbul ignore else - not available on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+      return n
+    }
+
+    this.getRemainingTTL = key => {
+      const index = this.keyMap.get(key)
+      if (index === undefined) {
+        return 0
+      }
+      return this.ttls[index] === 0 || this.starts[index] === 0
+        ? Infinity
+        : this.starts[index] +
+            this.ttls[index] -
+            (cachedNow || getNow())
+    }
+
+    this.isStale = index => {
+      return (
+        this.ttls[index] !== 0 &&
+        this.starts[index] !== 0 &&
+        (cachedNow || getNow()) - this.starts[index] >
+          this.ttls[index]
+      )
+    }
+  }
+  updateItemAge(_index) {}
+  statusTTL(_status, _index) {}
+  setItemTTL(_index, _ttl, _start) {}
+  isStale(_index) {
+    return false
+  }
+
+  initializeSizeTracking() {
+    this.calculatedSize = 0
+    this.sizes = new ZeroArray(this.max)
+    this.removeItemSize = index => {
+      this.calculatedSize -= this.sizes[index]
+      this.sizes[index] = 0
+    }
+    this.requireSize = (k, v, size, sizeCalculation) => {
+      // provisionally accept background fetches.
+      // actual value size will be checked when they return.
+      if (this.isBackgroundFetch(v)) {
+        return 0
+      }
+      if (!isPosInt(size)) {
+        if (sizeCalculation) {
+          if (typeof sizeCalculation !== 'function') {
+            throw new TypeError('sizeCalculation must be a function')
+          }
+          size = sizeCalculation(v, k)
+          if (!isPosInt(size)) {
+            throw new TypeError(
+              'sizeCalculation return invalid (expect positive integer)'
+            )
+          }
+        } else {
+          throw new TypeError(
+            'invalid size value (must be positive integer). ' +
+              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
+              'must be set.'
+          )
+        }
+      }
+      return size
+    }
+    this.addItemSize = (index, size, status) => {
+      this.sizes[index] = size
+      if (this.maxSize) {
+        const maxSize = this.maxSize - this.sizes[index]
+        while (this.calculatedSize > maxSize) {
+          this.evict(true)
+        }
+      }
+      this.calculatedSize += this.sizes[index]
+      if (status) {
+        status.entrySize = size
+        status.totalCalculatedSize = this.calculatedSize
+      }
+    }
+  }
+  removeItemSize(_index) {}
+  addItemSize(_index, _size) {}
+  requireSize(_k, _v, size, sizeCalculation) {
+    if (size || sizeCalculation) {
+      throw new TypeError(
+        'cannot set size without setting maxSize or maxEntrySize on cache'
+      )
+    }
+  }
+
+  *indexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.tail; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.head) {
+          break
+        } else {
+          i = this.prev[i]
+        }
+      }
+    }
+  }
+
+  *rindexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.head; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.tail) {
+          break
+        } else {
+          i = this.next[i]
+        }
+      }
+    }
+  }
+
+  isValidIndex(index) {
+    return (
+      index !== undefined &&
+      this.keyMap.get(this.keyList[index]) === index
+    )
+  }
+
+  *entries() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+  *rentries() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+
+  *keys() {
+    for (const i of this.indexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+  *rkeys() {
+    for (const i of this.rindexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+
+  *values() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+  *rvalues() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+
+  [Symbol.iterator]() {
+    return this.entries()
+  }
+
+  find(fn, getOptions) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      if (fn(value, this.keyList[i], this)) {
+        return this.get(this.keyList[i], getOptions)
+      }
+    }
+  }
+
+  forEach(fn, thisp = this) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  rforEach(fn, thisp = this) {
+    for (const i of this.rindexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  get prune() {
+    deprecatedMethod('prune', 'purgeStale')
+    return this.purgeStale
+  }
+
+  purgeStale() {
+    let deleted = false
+    for (const i of this.rindexes({ allowStale: true })) {
+      if (this.isStale(i)) {
+        this.delete(this.keyList[i])
+        deleted = true
+      }
+    }
+    return deleted
+  }
+
+  dump() {
+    const arr = []
+    for (const i of this.indexes({ allowStale: true })) {
+      const key = this.keyList[i]
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      const entry = { value }
+      if (this.ttls) {
+        entry.ttl = this.ttls[i]
+        // always dump the start relative to a portable timestamp
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = perf.now() - this.starts[i]
+        entry.start = Math.floor(Date.now() - age)
+      }
+      if (this.sizes) {
+        entry.size = this.sizes[i]
+      }
+      arr.unshift([key, entry])
+    }
+    return arr
+  }
+
+  load(arr) {
+    this.clear()
+    for (const [key, entry] of arr) {
+      if (entry.start) {
+        // entry.start is a portable timestamp, but we may be using
+        // node's performance.now(), so calculate the offset.
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = Date.now() - entry.start
+        entry.start = perf.now() - age
+      }
+      this.set(key, entry.value, entry)
+    }
+  }
+
+  dispose(_v, _k, _reason) {}
+
+  set(
+    k,
+    v,
+    {
+      ttl = this.ttl,
+      start,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      status,
+    } = {}
+  ) {
+    size = this.requireSize(k, v, size, sizeCalculation)
+    // if the item doesn't fit, don't do anything
+    // NB: maxEntrySize set to maxSize by default
+    if (this.maxEntrySize && size > this.maxEntrySize) {
+      if (status) {
+        status.set = 'miss'
+        status.maxEntrySizeExceeded = true
+      }
+      // have to delete, in case a background fetch is there already.
+      // in non-async cases, this is a no-op
+      this.delete(k)
+      return this
+    }
+    let index = this.size === 0 ? undefined : this.keyMap.get(k)
+    if (index === undefined) {
+      // addition
+      index = this.newIndex()
+      this.keyList[index] = k
+      this.valList[index] = v
+      this.keyMap.set(k, index)
+      this.next[this.tail] = index
+      this.prev[index] = this.tail
+      this.tail = index
+      this.size++
+      this.addItemSize(index, size, status)
+      if (status) {
+        status.set = 'add'
+      }
+      noUpdateTTL = false
+    } else {
+      // update
+      this.moveToTail(index)
+      const oldVal = this.valList[index]
+      if (v !== oldVal) {
+        if (this.isBackgroundFetch(oldVal)) {
+          oldVal.__abortController.abort(new Error('replaced'))
+        } else {
+          if (!noDisposeOnSet) {
+            this.dispose(oldVal, k, 'set')
+            if (this.disposeAfter) {
+              this.disposed.push([oldVal, k, 'set'])
+            }
+          }
+        }
+        this.removeItemSize(index)
+        this.valList[index] = v
+        this.addItemSize(index, size, status)
+        if (status) {
+          status.set = 'replace'
+          const oldValue =
+            oldVal && this.isBackgroundFetch(oldVal)
+              ? oldVal.__staleWhileFetching
+              : oldVal
+          if (oldValue !== undefined) status.oldValue = oldValue
+        }
+      } else if (status) {
+        status.set = 'update'
+      }
+    }
+    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
+      this.initializeTTLTracking()
+    }
+    if (!noUpdateTTL) {
+      this.setItemTTL(index, ttl, start)
+    }
+    this.statusTTL(status, index)
+    if (this.disposeAfter) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return this
+  }
+
+  newIndex() {
+    if (this.size === 0) {
+      return this.tail
+    }
+    if (this.size === this.max && this.max !== 0) {
+      return this.evict(false)
+    }
+    if (this.free.length !== 0) {
+      return this.free.pop()
+    }
+    // initial fill, just keep writing down the list
+    return this.initialFill++
+  }
+
+  pop() {
+    if (this.size) {
+      const val = this.valList[this.head]
+      this.evict(true)
+      return val
+    }
+  }
+
+  evict(free) {
+    const head = this.head
+    const k = this.keyList[head]
+    const v = this.valList[head]
+    if (this.isBackgroundFetch(v)) {
+      v.__abortController.abort(new Error('evicted'))
+    } else {
+      this.dispose(v, k, 'evict')
+      if (this.disposeAfter) {
+        this.disposed.push([v, k, 'evict'])
+      }
+    }
+    this.removeItemSize(head)
+    // if we aren't about to use the index, then null these out
+    if (free) {
+      this.keyList[head] = null
+      this.valList[head] = null
+      this.free.push(head)
+    }
+    this.head = this.next[head]
+    this.keyMap.delete(k)
+    this.size--
+    return head
+  }
+
+  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      if (!this.isStale(index)) {
+        if (updateAgeOnHas) {
+          this.updateItemAge(index)
+        }
+        if (status) status.has = 'hit'
+        this.statusTTL(status, index)
+        return true
+      } else if (status) {
+        status.has = 'stale'
+        this.statusTTL(status, index)
+      }
+    } else if (status) {
+      status.has = 'miss'
+    }
+    return false
+  }
+
+  // like get(), but without any LRU updating or TTL expiration
+  peek(k, { allowStale = this.allowStale } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined && (allowStale || !this.isStale(index))) {
+      const v = this.valList[index]
+      // either stale and allowed, or forcing a refresh of non-stale value
+      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
+    }
+  }
+
+  backgroundFetch(k, index, options, context) {
+    const v = index === undefined ? undefined : this.valList[index]
+    if (this.isBackgroundFetch(v)) {
+      return v
+    }
+    const ac = new AC()
+    if (options.signal) {
+      options.signal.addEventListener('abort', () =>
+        ac.abort(options.signal.reason)
+      )
+    }
+    const fetchOpts = {
+      signal: ac.signal,
+      options,
+      context,
+    }
+    const cb = (v, updateCache = false) => {
+      const { aborted } = ac.signal
+      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
+      if (options.status) {
+        if (aborted && !updateCache) {
+          options.status.fetchAborted = true
+          options.status.fetchError = ac.signal.reason
+          if (ignoreAbort) options.status.fetchAbortIgnored = true
+        } else {
+          options.status.fetchResolved = true
+        }
+      }
+      if (aborted && !ignoreAbort && !updateCache) {
+        return fetchFail(ac.signal.reason)
+      }
+      // either we didn't abort, and are still here, or we did, and ignored
+      if (this.valList[index] === p) {
+        if (v === undefined) {
+          if (p.__staleWhileFetching) {
+            this.valList[index] = p.__staleWhileFetching
+          } else {
+            this.delete(k)
+          }
+        } else {
+          if (options.status) options.status.fetchUpdated = true
+          this.set(k, v, fetchOpts.options)
+        }
+      }
+      return v
+    }
+    const eb = er => {
+      if (options.status) {
+        options.status.fetchRejected = true
+        options.status.fetchError = er
+      }
+      return fetchFail(er)
+    }
+    const fetchFail = er => {
+      const { aborted } = ac.signal
+      const allowStaleAborted =
+        aborted && options.allowStaleOnFetchAbort
+      const allowStale =
+        allowStaleAborted || options.allowStaleOnFetchRejection
+      const noDelete = allowStale || options.noDeleteOnFetchRejection
+      if (this.valList[index] === p) {
+        // if we allow stale on fetch rejections, then we need to ensure that
+        // the stale value is not removed from the cache when the fetch fails.
+        const del = !noDelete || p.__staleWhileFetching === undefined
+        if (del) {
+          this.delete(k)
+        } else if (!allowStaleAborted) {
+          // still replace the *promise* with the stale value,
+          // since we are done with the promise at this point.
+          // leave it untouched if we're still waiting for an
+          // aborted background fetch that hasn't yet returned.
+          this.valList[index] = p.__staleWhileFetching
+        }
+      }
+      if (allowStale) {
+        if (options.status && p.__staleWhileFetching !== undefined) {
+          options.status.returnedStale = true
+        }
+        return p.__staleWhileFetching
+      } else if (p.__returned === p) {
+        throw er
+      }
+    }
+    const pcall = (res, rej) => {
+      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
+      // ignored, we go until we finish, regardless.
+      // defer check until we are actually aborting,
+      // so fetchMethod can override.
+      ac.signal.addEventListener('abort', () => {
+        if (
+          !options.ignoreFetchAbort ||
+          options.allowStaleOnFetchAbort
+        ) {
+          res()
+          // when it eventually resolves, update the cache.
+          if (options.allowStaleOnFetchAbort) {
+            res = v => cb(v, true)
+          }
+        }
+      })
+    }
+    if (options.status) options.status.fetchDispatched = true
+    const p = new Promise(pcall).then(cb, eb)
+    p.__abortController = ac
+    p.__staleWhileFetching = v
+    p.__returned = null
+    if (index === undefined) {
+      // internal, don't expose status.
+      this.set(k, p, { ...fetchOpts.options, status: undefined })
+      index = this.keyMap.get(k)
+    } else {
+      this.valList[index] = p
+    }
+    return p
+  }
+
+  isBackgroundFetch(p) {
+    return (
+      p &&
+      typeof p === 'object' &&
+      typeof p.then === 'function' &&
+      Object.prototype.hasOwnProperty.call(
+        p,
+        '__staleWhileFetching'
+      ) &&
+      Object.prototype.hasOwnProperty.call(p, '__returned') &&
+      (p.__returned === p || p.__returned === null)
+    )
+  }
+
+  // this takes the union of get() and set() opts, because it does both
+  async fetch(
+    k,
+    {
+      // get options
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      // set options
+      ttl = this.ttl,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      // fetch exclusive options
+      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
+      ignoreFetchAbort = this.ignoreFetchAbort,
+      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
+      fetchContext = this.fetchContext,
+      forceRefresh = false,
+      status,
+      signal,
+    } = {}
+  ) {
+    if (!this.fetchMethod) {
+      if (status) status.fetch = 'get'
+      return this.get(k, {
+        allowStale,
+        updateAgeOnGet,
+        noDeleteOnStaleGet,
+        status,
+      })
+    }
+
+    const options = {
+      allowStale,
+      updateAgeOnGet,
+      noDeleteOnStaleGet,
+      ttl,
+      noDisposeOnSet,
+      size,
+      sizeCalculation,
+      noUpdateTTL,
+      noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+      status,
+      signal,
+    }
+
+    let index = this.keyMap.get(k)
+    if (index === undefined) {
+      if (status) status.fetch = 'miss'
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      return (p.__returned = p)
+    } else {
+      // in cache, maybe already fetching
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        const stale =
+          allowStale && v.__staleWhileFetching !== undefined
+        if (status) {
+          status.fetch = 'inflight'
+          if (stale) status.returnedStale = true
+        }
+        return stale ? v.__staleWhileFetching : (v.__returned = v)
+      }
+
+      // if we force a refresh, that means do NOT serve the cached value,
+      // unless we are already in the process of refreshing the cache.
+      const isStale = this.isStale(index)
+      if (!forceRefresh && !isStale) {
+        if (status) status.fetch = 'hit'
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        this.statusTTL(status, index)
+        return v
+      }
+
+      // ok, it is stale or a forced refresh, and not already fetching.
+      // refresh the cache.
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      const hasStale = p.__staleWhileFetching !== undefined
+      const staleVal = hasStale && allowStale
+      if (status) {
+        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
+        if (staleVal && isStale) status.returnedStale = true
+      }
+      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
+    }
+  }
+
+  get(
+    k,
+    {
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      status,
+    } = {}
+  ) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      const value = this.valList[index]
+      const fetching = this.isBackgroundFetch(value)
+      this.statusTTL(status, index)
+      if (this.isStale(index)) {
+        if (status) status.get = 'stale'
+        // delete only if not an in-flight background fetch
+        if (!fetching) {
+          if (!noDeleteOnStaleGet) {
+            this.delete(k)
+          }
+          if (status) status.returnedStale = allowStale
+          return allowStale ? value : undefined
+        } else {
+          if (status) {
+            status.returnedStale =
+              allowStale && value.__staleWhileFetching !== undefined
+          }
+          return allowStale ? value.__staleWhileFetching : undefined
+        }
+      } else {
+        if (status) status.get = 'hit'
+        // if we're currently fetching it, we don't actually have it yet
+        // it's not stale, which means this isn't a staleWhileRefetching.
+        // If it's not stale, and fetching, AND has a __staleWhileFetching
+        // value, then that means the user fetched with {forceRefresh:true},
+        // so it's safe to return that value.
+        if (fetching) {
+          return value.__staleWhileFetching
+        }
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        return value
+      }
+    } else if (status) {
+      status.get = 'miss'
+    }
+  }
+
+  connect(p, n) {
+    this.prev[n] = p
+    this.next[p] = n
+  }
+
+  moveToTail(index) {
+    // if tail already, nothing to do
+    // if head, move head to next[index]
+    // else
+    //   move next[prev[index]] to next[index] (head has no prev)
+    //   move prev[next[index]] to prev[index]
+    // prev[index] = tail
+    // next[tail] = index
+    // tail = index
+    if (index !== this.tail) {
+      if (index === this.head) {
+        this.head = this.next[index]
+      } else {
+        this.connect(this.prev[index], this.next[index])
+      }
+      this.connect(this.tail, index)
+      this.tail = index
+    }
+  }
+
+  get del() {
+    deprecatedMethod('del', 'delete')
+    return this.delete
+  }
+
+  delete(k) {
+    let deleted = false
+    if (this.size !== 0) {
+      const index = this.keyMap.get(k)
+      if (index !== undefined) {
+        deleted = true
+        if (this.size === 1) {
+          this.clear()
+        } else {
+          this.removeItemSize(index)
+          const v = this.valList[index]
+          if (this.isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('deleted'))
+          } else {
+            this.dispose(v, k, 'delete')
+            if (this.disposeAfter) {
+              this.disposed.push([v, k, 'delete'])
+            }
+          }
+          this.keyMap.delete(k)
+          this.keyList[index] = null
+          this.valList[index] = null
+          if (index === this.tail) {
+            this.tail = this.prev[index]
+          } else if (index === this.head) {
+            this.head = this.next[index]
+          } else {
+            this.next[this.prev[index]] = this.next[index]
+            this.prev[this.next[index]] = this.prev[index]
+          }
+          this.size--
+          this.free.push(index)
+        }
+      }
+    }
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return deleted
+  }
+
+  clear() {
+    for (const index of this.rindexes({ allowStale: true })) {
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        v.__abortController.abort(new Error('deleted'))
+      } else {
+        const k = this.keyList[index]
+        this.dispose(v, k, 'delete')
+        if (this.disposeAfter) {
+          this.disposed.push([v, k, 'delete'])
+        }
+      }
+    }
+
+    this.keyMap.clear()
+    this.valList.fill(null)
+    this.keyList.fill(null)
+    if (this.ttls) {
+      this.ttls.fill(0)
+      this.starts.fill(0)
+    }
+    if (this.sizes) {
+      this.sizes.fill(0)
+    }
+    this.head = 0
+    this.tail = 0
+    this.initialFill = 1
+    this.free.length = 0
+    this.calculatedSize = 0
+    this.size = 0
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+  }
+
+  get reset() {
+    deprecatedMethod('reset', 'clear')
+    return this.clear
+  }
+
+  get length() {
+    deprecatedProperty('length', 'size')
+    return this.size
+  }
+
+  static get AbortController() {
+    return AC
+  }
+  static get AbortSignal() {
+    return AS
+  }
+}
+
+export default LRUCache
diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/package.json b/node_modules/npm-registry-fetch/node_modules/lru-cache/package.json
new file mode 100644
index 0000000000000..9684991727e7a
--- /dev/null
+++ b/node_modules/npm-registry-fetch/node_modules/lru-cache/package.json
@@ -0,0 +1,96 @@
+{
+  "name": "lru-cache",
+  "description": "A cache object that deletes the least-recently-used items.",
+  "version": "7.18.3",
+  "author": "Isaac Z. Schlueter ",
+  "keywords": [
+    "mru",
+    "lru",
+    "cache"
+  ],
+  "sideEffects": false,
+  "scripts": {
+    "build": "npm run prepare",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "prepare": "node ./scripts/transpile-to-esm.js",
+    "size": "size-limit",
+    "test": "tap",
+    "snap": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write .",
+    "typedoc": "typedoc ./index.d.ts"
+  },
+  "type": "commonjs",
+  "main": "./index.js",
+  "module": "./index.mjs",
+  "types": "./index.d.ts",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./index.d.ts",
+        "default": "./index.mjs"
+      },
+      "require": {
+        "types": "./index.d.ts",
+        "default": "./index.js"
+      }
+    },
+    "./package.json": "./package.json"
+  },
+  "repository": "git://github.com/isaacs/node-lru-cache.git",
+  "devDependencies": {
+    "@size-limit/preset-small-lib": "^7.0.8",
+    "@types/node": "^17.0.31",
+    "@types/tap": "^15.0.6",
+    "benchmark": "^2.1.4",
+    "c8": "^7.11.2",
+    "clock-mock": "^1.0.6",
+    "eslint-config-prettier": "^8.5.0",
+    "prettier": "^2.6.2",
+    "size-limit": "^7.0.8",
+    "tap": "^16.3.4",
+    "ts-node": "^10.7.0",
+    "tslib": "^2.4.0",
+    "typedoc": "^0.23.24",
+    "typescript": "^4.6.4"
+  },
+  "license": "ISC",
+  "files": [
+    "index.js",
+    "index.mjs",
+    "index.d.ts"
+  ],
+  "engines": {
+    "node": ">=12"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 70,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "tap": {
+    "nyc-arg": [
+      "--include=index.js"
+    ],
+    "node-arg": [
+      "--expose-gc",
+      "--require",
+      "ts-node/register"
+    ],
+    "ts": false
+  },
+  "size-limit": [
+    {
+      "path": "./index.js"
+    }
+  ]
+}
diff --git a/node_modules/pacote/node_modules/lru-cache/LICENSE b/node_modules/pacote/node_modules/lru-cache/LICENSE
new file mode 100644
index 0000000000000..f785757cd63f8
--- /dev/null
+++ b/node_modules/pacote/node_modules/lru-cache/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/lru-cache/index.js b/node_modules/pacote/node_modules/lru-cache/index.js
new file mode 100644
index 0000000000000..48e99fe5e5a70
--- /dev/null
+++ b/node_modules/pacote/node_modules/lru-cache/index.js
@@ -0,0 +1,1227 @@
+const perf =
+  typeof performance === 'object' &&
+  performance &&
+  typeof performance.now === 'function'
+    ? performance
+    : Date
+
+const hasAbortController = typeof AbortController === 'function'
+
+// minimal backwards-compatibility polyfill
+// this doesn't have nearly all the checks and whatnot that
+// actual AbortController/Signal has, but it's enough for
+// our purposes, and if used properly, behaves the same.
+const AC = hasAbortController
+  ? AbortController
+  : class AbortController {
+      constructor() {
+        this.signal = new AS()
+      }
+      abort(reason = new Error('This operation was aborted')) {
+        this.signal.reason = this.signal.reason || reason
+        this.signal.aborted = true
+        this.signal.dispatchEvent({
+          type: 'abort',
+          target: this.signal,
+        })
+      }
+    }
+
+const hasAbortSignal = typeof AbortSignal === 'function'
+// Some polyfills put this on the AC class, not global
+const hasACAbortSignal = typeof AC.AbortSignal === 'function'
+const AS = hasAbortSignal
+  ? AbortSignal
+  : hasACAbortSignal
+  ? AC.AbortController
+  : class AbortSignal {
+      constructor() {
+        this.reason = undefined
+        this.aborted = false
+        this._listeners = []
+      }
+      dispatchEvent(e) {
+        if (e.type === 'abort') {
+          this.aborted = true
+          this.onabort(e)
+          this._listeners.forEach(f => f(e), this)
+        }
+      }
+      onabort() {}
+      addEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners.push(fn)
+        }
+      }
+      removeEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners = this._listeners.filter(f => f !== fn)
+        }
+      }
+    }
+
+const warned = new Set()
+const deprecatedOption = (opt, instead) => {
+  const code = `LRU_CACHE_OPTION_${opt}`
+  if (shouldWarn(code)) {
+    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
+  }
+}
+const deprecatedMethod = (method, instead) => {
+  const code = `LRU_CACHE_METHOD_${method}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
+    warn(code, `${method} method`, `cache.${instead}()`, get)
+  }
+}
+const deprecatedProperty = (field, instead) => {
+  const code = `LRU_CACHE_PROPERTY_${field}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
+    warn(code, `${field} property`, `cache.${instead}`, get)
+  }
+}
+
+const emitWarning = (...a) => {
+  typeof process === 'object' &&
+  process &&
+  typeof process.emitWarning === 'function'
+    ? process.emitWarning(...a)
+    : console.error(...a)
+}
+
+const shouldWarn = code => !warned.has(code)
+
+const warn = (code, what, instead, fn) => {
+  warned.add(code)
+  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
+  emitWarning(msg, 'DeprecationWarning', code, fn)
+}
+
+const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
+
+/* istanbul ignore next - This is a little bit ridiculous, tbh.
+ * The maximum array length is 2^32-1 or thereabouts on most JS impls.
+ * And well before that point, you're caching the entire world, I mean,
+ * that's ~32GB of just integers for the next/prev links, plus whatever
+ * else to hold that many keys and values.  Just filling the memory with
+ * zeroes at init time is brutal when you get that big.
+ * But why not be complete?
+ * Maybe in the future, these limits will have expanded. */
+const getUintArray = max =>
+  !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+    ? Uint8Array
+    : max <= Math.pow(2, 16)
+    ? Uint16Array
+    : max <= Math.pow(2, 32)
+    ? Uint32Array
+    : max <= Number.MAX_SAFE_INTEGER
+    ? ZeroArray
+    : null
+
+class ZeroArray extends Array {
+  constructor(size) {
+    super(size)
+    this.fill(0)
+  }
+}
+
+class Stack {
+  constructor(max) {
+    if (max === 0) {
+      return []
+    }
+    const UintArray = getUintArray(max)
+    this.heap = new UintArray(max)
+    this.length = 0
+  }
+  push(n) {
+    this.heap[this.length++] = n
+  }
+  pop() {
+    return this.heap[--this.length]
+  }
+}
+
+class LRUCache {
+  constructor(options = {}) {
+    const {
+      max = 0,
+      ttl,
+      ttlResolution = 1,
+      ttlAutopurge,
+      updateAgeOnGet,
+      updateAgeOnHas,
+      allowStale,
+      dispose,
+      disposeAfter,
+      noDisposeOnSet,
+      noUpdateTTL,
+      maxSize = 0,
+      maxEntrySize = 0,
+      sizeCalculation,
+      fetchMethod,
+      fetchContext,
+      noDeleteOnFetchRejection,
+      noDeleteOnStaleGet,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+    } = options
+
+    // deprecated options, don't trigger a warning for getting them if
+    // the thing being passed in is another LRUCache we're copying.
+    const { length, maxAge, stale } =
+      options instanceof LRUCache ? {} : options
+
+    if (max !== 0 && !isPosInt(max)) {
+      throw new TypeError('max option must be a nonnegative integer')
+    }
+
+    const UintArray = max ? getUintArray(max) : Array
+    if (!UintArray) {
+      throw new Error('invalid max value: ' + max)
+    }
+
+    this.max = max
+    this.maxSize = maxSize
+    this.maxEntrySize = maxEntrySize || this.maxSize
+    this.sizeCalculation = sizeCalculation || length
+    if (this.sizeCalculation) {
+      if (!this.maxSize && !this.maxEntrySize) {
+        throw new TypeError(
+          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
+        )
+      }
+      if (typeof this.sizeCalculation !== 'function') {
+        throw new TypeError('sizeCalculation set to non-function')
+      }
+    }
+
+    this.fetchMethod = fetchMethod || null
+    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
+      throw new TypeError(
+        'fetchMethod must be a function if specified'
+      )
+    }
+
+    this.fetchContext = fetchContext
+    if (!this.fetchMethod && fetchContext !== undefined) {
+      throw new TypeError(
+        'cannot set fetchContext without fetchMethod'
+      )
+    }
+
+    this.keyMap = new Map()
+    this.keyList = new Array(max).fill(null)
+    this.valList = new Array(max).fill(null)
+    this.next = new UintArray(max)
+    this.prev = new UintArray(max)
+    this.head = 0
+    this.tail = 0
+    this.free = new Stack(max)
+    this.initialFill = 1
+    this.size = 0
+
+    if (typeof dispose === 'function') {
+      this.dispose = dispose
+    }
+    if (typeof disposeAfter === 'function') {
+      this.disposeAfter = disposeAfter
+      this.disposed = []
+    } else {
+      this.disposeAfter = null
+      this.disposed = null
+    }
+    this.noDisposeOnSet = !!noDisposeOnSet
+    this.noUpdateTTL = !!noUpdateTTL
+    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
+    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
+    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
+    this.ignoreFetchAbort = !!ignoreFetchAbort
+
+    // NB: maxEntrySize is set to maxSize if it's set
+    if (this.maxEntrySize !== 0) {
+      if (this.maxSize !== 0) {
+        if (!isPosInt(this.maxSize)) {
+          throw new TypeError(
+            'maxSize must be a positive integer if specified'
+          )
+        }
+      }
+      if (!isPosInt(this.maxEntrySize)) {
+        throw new TypeError(
+          'maxEntrySize must be a positive integer if specified'
+        )
+      }
+      this.initializeSizeTracking()
+    }
+
+    this.allowStale = !!allowStale || !!stale
+    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
+    this.updateAgeOnGet = !!updateAgeOnGet
+    this.updateAgeOnHas = !!updateAgeOnHas
+    this.ttlResolution =
+      isPosInt(ttlResolution) || ttlResolution === 0
+        ? ttlResolution
+        : 1
+    this.ttlAutopurge = !!ttlAutopurge
+    this.ttl = ttl || maxAge || 0
+    if (this.ttl) {
+      if (!isPosInt(this.ttl)) {
+        throw new TypeError(
+          'ttl must be a positive integer if specified'
+        )
+      }
+      this.initializeTTLTracking()
+    }
+
+    // do not allow completely unbounded caches
+    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
+      throw new TypeError(
+        'At least one of max, maxSize, or ttl is required'
+      )
+    }
+    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
+      const code = 'LRU_CACHE_UNBOUNDED'
+      if (shouldWarn(code)) {
+        warned.add(code)
+        const msg =
+          'TTL caching without ttlAutopurge, max, or maxSize can ' +
+          'result in unbounded memory consumption.'
+        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
+      }
+    }
+
+    if (stale) {
+      deprecatedOption('stale', 'allowStale')
+    }
+    if (maxAge) {
+      deprecatedOption('maxAge', 'ttl')
+    }
+    if (length) {
+      deprecatedOption('length', 'sizeCalculation')
+    }
+  }
+
+  getRemainingTTL(key) {
+    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
+  }
+
+  initializeTTLTracking() {
+    this.ttls = new ZeroArray(this.max)
+    this.starts = new ZeroArray(this.max)
+
+    this.setItemTTL = (index, ttl, start = perf.now()) => {
+      this.starts[index] = ttl !== 0 ? start : 0
+      this.ttls[index] = ttl
+      if (ttl !== 0 && this.ttlAutopurge) {
+        const t = setTimeout(() => {
+          if (this.isStale(index)) {
+            this.delete(this.keyList[index])
+          }
+        }, ttl + 1)
+        /* istanbul ignore else - unref() not supported on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+    }
+
+    this.updateItemAge = index => {
+      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
+    }
+
+    this.statusTTL = (status, index) => {
+      if (status) {
+        status.ttl = this.ttls[index]
+        status.start = this.starts[index]
+        status.now = cachedNow || getNow()
+        status.remainingTTL = status.now + status.ttl - status.start
+      }
+    }
+
+    // debounce calls to perf.now() to 1s so we're not hitting
+    // that costly call repeatedly.
+    let cachedNow = 0
+    const getNow = () => {
+      const n = perf.now()
+      if (this.ttlResolution > 0) {
+        cachedNow = n
+        const t = setTimeout(
+          () => (cachedNow = 0),
+          this.ttlResolution
+        )
+        /* istanbul ignore else - not available on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+      return n
+    }
+
+    this.getRemainingTTL = key => {
+      const index = this.keyMap.get(key)
+      if (index === undefined) {
+        return 0
+      }
+      return this.ttls[index] === 0 || this.starts[index] === 0
+        ? Infinity
+        : this.starts[index] +
+            this.ttls[index] -
+            (cachedNow || getNow())
+    }
+
+    this.isStale = index => {
+      return (
+        this.ttls[index] !== 0 &&
+        this.starts[index] !== 0 &&
+        (cachedNow || getNow()) - this.starts[index] >
+          this.ttls[index]
+      )
+    }
+  }
+  updateItemAge(_index) {}
+  statusTTL(_status, _index) {}
+  setItemTTL(_index, _ttl, _start) {}
+  isStale(_index) {
+    return false
+  }
+
+  initializeSizeTracking() {
+    this.calculatedSize = 0
+    this.sizes = new ZeroArray(this.max)
+    this.removeItemSize = index => {
+      this.calculatedSize -= this.sizes[index]
+      this.sizes[index] = 0
+    }
+    this.requireSize = (k, v, size, sizeCalculation) => {
+      // provisionally accept background fetches.
+      // actual value size will be checked when they return.
+      if (this.isBackgroundFetch(v)) {
+        return 0
+      }
+      if (!isPosInt(size)) {
+        if (sizeCalculation) {
+          if (typeof sizeCalculation !== 'function') {
+            throw new TypeError('sizeCalculation must be a function')
+          }
+          size = sizeCalculation(v, k)
+          if (!isPosInt(size)) {
+            throw new TypeError(
+              'sizeCalculation return invalid (expect positive integer)'
+            )
+          }
+        } else {
+          throw new TypeError(
+            'invalid size value (must be positive integer). ' +
+              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
+              'must be set.'
+          )
+        }
+      }
+      return size
+    }
+    this.addItemSize = (index, size, status) => {
+      this.sizes[index] = size
+      if (this.maxSize) {
+        const maxSize = this.maxSize - this.sizes[index]
+        while (this.calculatedSize > maxSize) {
+          this.evict(true)
+        }
+      }
+      this.calculatedSize += this.sizes[index]
+      if (status) {
+        status.entrySize = size
+        status.totalCalculatedSize = this.calculatedSize
+      }
+    }
+  }
+  removeItemSize(_index) {}
+  addItemSize(_index, _size) {}
+  requireSize(_k, _v, size, sizeCalculation) {
+    if (size || sizeCalculation) {
+      throw new TypeError(
+        'cannot set size without setting maxSize or maxEntrySize on cache'
+      )
+    }
+  }
+
+  *indexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.tail; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.head) {
+          break
+        } else {
+          i = this.prev[i]
+        }
+      }
+    }
+  }
+
+  *rindexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.head; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.tail) {
+          break
+        } else {
+          i = this.next[i]
+        }
+      }
+    }
+  }
+
+  isValidIndex(index) {
+    return (
+      index !== undefined &&
+      this.keyMap.get(this.keyList[index]) === index
+    )
+  }
+
+  *entries() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+  *rentries() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+
+  *keys() {
+    for (const i of this.indexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+  *rkeys() {
+    for (const i of this.rindexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+
+  *values() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+  *rvalues() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+
+  [Symbol.iterator]() {
+    return this.entries()
+  }
+
+  find(fn, getOptions) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      if (fn(value, this.keyList[i], this)) {
+        return this.get(this.keyList[i], getOptions)
+      }
+    }
+  }
+
+  forEach(fn, thisp = this) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  rforEach(fn, thisp = this) {
+    for (const i of this.rindexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  get prune() {
+    deprecatedMethod('prune', 'purgeStale')
+    return this.purgeStale
+  }
+
+  purgeStale() {
+    let deleted = false
+    for (const i of this.rindexes({ allowStale: true })) {
+      if (this.isStale(i)) {
+        this.delete(this.keyList[i])
+        deleted = true
+      }
+    }
+    return deleted
+  }
+
+  dump() {
+    const arr = []
+    for (const i of this.indexes({ allowStale: true })) {
+      const key = this.keyList[i]
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      const entry = { value }
+      if (this.ttls) {
+        entry.ttl = this.ttls[i]
+        // always dump the start relative to a portable timestamp
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = perf.now() - this.starts[i]
+        entry.start = Math.floor(Date.now() - age)
+      }
+      if (this.sizes) {
+        entry.size = this.sizes[i]
+      }
+      arr.unshift([key, entry])
+    }
+    return arr
+  }
+
+  load(arr) {
+    this.clear()
+    for (const [key, entry] of arr) {
+      if (entry.start) {
+        // entry.start is a portable timestamp, but we may be using
+        // node's performance.now(), so calculate the offset.
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = Date.now() - entry.start
+        entry.start = perf.now() - age
+      }
+      this.set(key, entry.value, entry)
+    }
+  }
+
+  dispose(_v, _k, _reason) {}
+
+  set(
+    k,
+    v,
+    {
+      ttl = this.ttl,
+      start,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      status,
+    } = {}
+  ) {
+    size = this.requireSize(k, v, size, sizeCalculation)
+    // if the item doesn't fit, don't do anything
+    // NB: maxEntrySize set to maxSize by default
+    if (this.maxEntrySize && size > this.maxEntrySize) {
+      if (status) {
+        status.set = 'miss'
+        status.maxEntrySizeExceeded = true
+      }
+      // have to delete, in case a background fetch is there already.
+      // in non-async cases, this is a no-op
+      this.delete(k)
+      return this
+    }
+    let index = this.size === 0 ? undefined : this.keyMap.get(k)
+    if (index === undefined) {
+      // addition
+      index = this.newIndex()
+      this.keyList[index] = k
+      this.valList[index] = v
+      this.keyMap.set(k, index)
+      this.next[this.tail] = index
+      this.prev[index] = this.tail
+      this.tail = index
+      this.size++
+      this.addItemSize(index, size, status)
+      if (status) {
+        status.set = 'add'
+      }
+      noUpdateTTL = false
+    } else {
+      // update
+      this.moveToTail(index)
+      const oldVal = this.valList[index]
+      if (v !== oldVal) {
+        if (this.isBackgroundFetch(oldVal)) {
+          oldVal.__abortController.abort(new Error('replaced'))
+        } else {
+          if (!noDisposeOnSet) {
+            this.dispose(oldVal, k, 'set')
+            if (this.disposeAfter) {
+              this.disposed.push([oldVal, k, 'set'])
+            }
+          }
+        }
+        this.removeItemSize(index)
+        this.valList[index] = v
+        this.addItemSize(index, size, status)
+        if (status) {
+          status.set = 'replace'
+          const oldValue =
+            oldVal && this.isBackgroundFetch(oldVal)
+              ? oldVal.__staleWhileFetching
+              : oldVal
+          if (oldValue !== undefined) status.oldValue = oldValue
+        }
+      } else if (status) {
+        status.set = 'update'
+      }
+    }
+    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
+      this.initializeTTLTracking()
+    }
+    if (!noUpdateTTL) {
+      this.setItemTTL(index, ttl, start)
+    }
+    this.statusTTL(status, index)
+    if (this.disposeAfter) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return this
+  }
+
+  newIndex() {
+    if (this.size === 0) {
+      return this.tail
+    }
+    if (this.size === this.max && this.max !== 0) {
+      return this.evict(false)
+    }
+    if (this.free.length !== 0) {
+      return this.free.pop()
+    }
+    // initial fill, just keep writing down the list
+    return this.initialFill++
+  }
+
+  pop() {
+    if (this.size) {
+      const val = this.valList[this.head]
+      this.evict(true)
+      return val
+    }
+  }
+
+  evict(free) {
+    const head = this.head
+    const k = this.keyList[head]
+    const v = this.valList[head]
+    if (this.isBackgroundFetch(v)) {
+      v.__abortController.abort(new Error('evicted'))
+    } else {
+      this.dispose(v, k, 'evict')
+      if (this.disposeAfter) {
+        this.disposed.push([v, k, 'evict'])
+      }
+    }
+    this.removeItemSize(head)
+    // if we aren't about to use the index, then null these out
+    if (free) {
+      this.keyList[head] = null
+      this.valList[head] = null
+      this.free.push(head)
+    }
+    this.head = this.next[head]
+    this.keyMap.delete(k)
+    this.size--
+    return head
+  }
+
+  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      if (!this.isStale(index)) {
+        if (updateAgeOnHas) {
+          this.updateItemAge(index)
+        }
+        if (status) status.has = 'hit'
+        this.statusTTL(status, index)
+        return true
+      } else if (status) {
+        status.has = 'stale'
+        this.statusTTL(status, index)
+      }
+    } else if (status) {
+      status.has = 'miss'
+    }
+    return false
+  }
+
+  // like get(), but without any LRU updating or TTL expiration
+  peek(k, { allowStale = this.allowStale } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined && (allowStale || !this.isStale(index))) {
+      const v = this.valList[index]
+      // either stale and allowed, or forcing a refresh of non-stale value
+      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
+    }
+  }
+
+  backgroundFetch(k, index, options, context) {
+    const v = index === undefined ? undefined : this.valList[index]
+    if (this.isBackgroundFetch(v)) {
+      return v
+    }
+    const ac = new AC()
+    if (options.signal) {
+      options.signal.addEventListener('abort', () =>
+        ac.abort(options.signal.reason)
+      )
+    }
+    const fetchOpts = {
+      signal: ac.signal,
+      options,
+      context,
+    }
+    const cb = (v, updateCache = false) => {
+      const { aborted } = ac.signal
+      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
+      if (options.status) {
+        if (aborted && !updateCache) {
+          options.status.fetchAborted = true
+          options.status.fetchError = ac.signal.reason
+          if (ignoreAbort) options.status.fetchAbortIgnored = true
+        } else {
+          options.status.fetchResolved = true
+        }
+      }
+      if (aborted && !ignoreAbort && !updateCache) {
+        return fetchFail(ac.signal.reason)
+      }
+      // either we didn't abort, and are still here, or we did, and ignored
+      if (this.valList[index] === p) {
+        if (v === undefined) {
+          if (p.__staleWhileFetching) {
+            this.valList[index] = p.__staleWhileFetching
+          } else {
+            this.delete(k)
+          }
+        } else {
+          if (options.status) options.status.fetchUpdated = true
+          this.set(k, v, fetchOpts.options)
+        }
+      }
+      return v
+    }
+    const eb = er => {
+      if (options.status) {
+        options.status.fetchRejected = true
+        options.status.fetchError = er
+      }
+      return fetchFail(er)
+    }
+    const fetchFail = er => {
+      const { aborted } = ac.signal
+      const allowStaleAborted =
+        aborted && options.allowStaleOnFetchAbort
+      const allowStale =
+        allowStaleAborted || options.allowStaleOnFetchRejection
+      const noDelete = allowStale || options.noDeleteOnFetchRejection
+      if (this.valList[index] === p) {
+        // if we allow stale on fetch rejections, then we need to ensure that
+        // the stale value is not removed from the cache when the fetch fails.
+        const del = !noDelete || p.__staleWhileFetching === undefined
+        if (del) {
+          this.delete(k)
+        } else if (!allowStaleAborted) {
+          // still replace the *promise* with the stale value,
+          // since we are done with the promise at this point.
+          // leave it untouched if we're still waiting for an
+          // aborted background fetch that hasn't yet returned.
+          this.valList[index] = p.__staleWhileFetching
+        }
+      }
+      if (allowStale) {
+        if (options.status && p.__staleWhileFetching !== undefined) {
+          options.status.returnedStale = true
+        }
+        return p.__staleWhileFetching
+      } else if (p.__returned === p) {
+        throw er
+      }
+    }
+    const pcall = (res, rej) => {
+      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
+      // ignored, we go until we finish, regardless.
+      // defer check until we are actually aborting,
+      // so fetchMethod can override.
+      ac.signal.addEventListener('abort', () => {
+        if (
+          !options.ignoreFetchAbort ||
+          options.allowStaleOnFetchAbort
+        ) {
+          res()
+          // when it eventually resolves, update the cache.
+          if (options.allowStaleOnFetchAbort) {
+            res = v => cb(v, true)
+          }
+        }
+      })
+    }
+    if (options.status) options.status.fetchDispatched = true
+    const p = new Promise(pcall).then(cb, eb)
+    p.__abortController = ac
+    p.__staleWhileFetching = v
+    p.__returned = null
+    if (index === undefined) {
+      // internal, don't expose status.
+      this.set(k, p, { ...fetchOpts.options, status: undefined })
+      index = this.keyMap.get(k)
+    } else {
+      this.valList[index] = p
+    }
+    return p
+  }
+
+  isBackgroundFetch(p) {
+    return (
+      p &&
+      typeof p === 'object' &&
+      typeof p.then === 'function' &&
+      Object.prototype.hasOwnProperty.call(
+        p,
+        '__staleWhileFetching'
+      ) &&
+      Object.prototype.hasOwnProperty.call(p, '__returned') &&
+      (p.__returned === p || p.__returned === null)
+    )
+  }
+
+  // this takes the union of get() and set() opts, because it does both
+  async fetch(
+    k,
+    {
+      // get options
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      // set options
+      ttl = this.ttl,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      // fetch exclusive options
+      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
+      ignoreFetchAbort = this.ignoreFetchAbort,
+      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
+      fetchContext = this.fetchContext,
+      forceRefresh = false,
+      status,
+      signal,
+    } = {}
+  ) {
+    if (!this.fetchMethod) {
+      if (status) status.fetch = 'get'
+      return this.get(k, {
+        allowStale,
+        updateAgeOnGet,
+        noDeleteOnStaleGet,
+        status,
+      })
+    }
+
+    const options = {
+      allowStale,
+      updateAgeOnGet,
+      noDeleteOnStaleGet,
+      ttl,
+      noDisposeOnSet,
+      size,
+      sizeCalculation,
+      noUpdateTTL,
+      noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+      status,
+      signal,
+    }
+
+    let index = this.keyMap.get(k)
+    if (index === undefined) {
+      if (status) status.fetch = 'miss'
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      return (p.__returned = p)
+    } else {
+      // in cache, maybe already fetching
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        const stale =
+          allowStale && v.__staleWhileFetching !== undefined
+        if (status) {
+          status.fetch = 'inflight'
+          if (stale) status.returnedStale = true
+        }
+        return stale ? v.__staleWhileFetching : (v.__returned = v)
+      }
+
+      // if we force a refresh, that means do NOT serve the cached value,
+      // unless we are already in the process of refreshing the cache.
+      const isStale = this.isStale(index)
+      if (!forceRefresh && !isStale) {
+        if (status) status.fetch = 'hit'
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        this.statusTTL(status, index)
+        return v
+      }
+
+      // ok, it is stale or a forced refresh, and not already fetching.
+      // refresh the cache.
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      const hasStale = p.__staleWhileFetching !== undefined
+      const staleVal = hasStale && allowStale
+      if (status) {
+        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
+        if (staleVal && isStale) status.returnedStale = true
+      }
+      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
+    }
+  }
+
+  get(
+    k,
+    {
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      status,
+    } = {}
+  ) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      const value = this.valList[index]
+      const fetching = this.isBackgroundFetch(value)
+      this.statusTTL(status, index)
+      if (this.isStale(index)) {
+        if (status) status.get = 'stale'
+        // delete only if not an in-flight background fetch
+        if (!fetching) {
+          if (!noDeleteOnStaleGet) {
+            this.delete(k)
+          }
+          if (status) status.returnedStale = allowStale
+          return allowStale ? value : undefined
+        } else {
+          if (status) {
+            status.returnedStale =
+              allowStale && value.__staleWhileFetching !== undefined
+          }
+          return allowStale ? value.__staleWhileFetching : undefined
+        }
+      } else {
+        if (status) status.get = 'hit'
+        // if we're currently fetching it, we don't actually have it yet
+        // it's not stale, which means this isn't a staleWhileRefetching.
+        // If it's not stale, and fetching, AND has a __staleWhileFetching
+        // value, then that means the user fetched with {forceRefresh:true},
+        // so it's safe to return that value.
+        if (fetching) {
+          return value.__staleWhileFetching
+        }
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        return value
+      }
+    } else if (status) {
+      status.get = 'miss'
+    }
+  }
+
+  connect(p, n) {
+    this.prev[n] = p
+    this.next[p] = n
+  }
+
+  moveToTail(index) {
+    // if tail already, nothing to do
+    // if head, move head to next[index]
+    // else
+    //   move next[prev[index]] to next[index] (head has no prev)
+    //   move prev[next[index]] to prev[index]
+    // prev[index] = tail
+    // next[tail] = index
+    // tail = index
+    if (index !== this.tail) {
+      if (index === this.head) {
+        this.head = this.next[index]
+      } else {
+        this.connect(this.prev[index], this.next[index])
+      }
+      this.connect(this.tail, index)
+      this.tail = index
+    }
+  }
+
+  get del() {
+    deprecatedMethod('del', 'delete')
+    return this.delete
+  }
+
+  delete(k) {
+    let deleted = false
+    if (this.size !== 0) {
+      const index = this.keyMap.get(k)
+      if (index !== undefined) {
+        deleted = true
+        if (this.size === 1) {
+          this.clear()
+        } else {
+          this.removeItemSize(index)
+          const v = this.valList[index]
+          if (this.isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('deleted'))
+          } else {
+            this.dispose(v, k, 'delete')
+            if (this.disposeAfter) {
+              this.disposed.push([v, k, 'delete'])
+            }
+          }
+          this.keyMap.delete(k)
+          this.keyList[index] = null
+          this.valList[index] = null
+          if (index === this.tail) {
+            this.tail = this.prev[index]
+          } else if (index === this.head) {
+            this.head = this.next[index]
+          } else {
+            this.next[this.prev[index]] = this.next[index]
+            this.prev[this.next[index]] = this.prev[index]
+          }
+          this.size--
+          this.free.push(index)
+        }
+      }
+    }
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return deleted
+  }
+
+  clear() {
+    for (const index of this.rindexes({ allowStale: true })) {
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        v.__abortController.abort(new Error('deleted'))
+      } else {
+        const k = this.keyList[index]
+        this.dispose(v, k, 'delete')
+        if (this.disposeAfter) {
+          this.disposed.push([v, k, 'delete'])
+        }
+      }
+    }
+
+    this.keyMap.clear()
+    this.valList.fill(null)
+    this.keyList.fill(null)
+    if (this.ttls) {
+      this.ttls.fill(0)
+      this.starts.fill(0)
+    }
+    if (this.sizes) {
+      this.sizes.fill(0)
+    }
+    this.head = 0
+    this.tail = 0
+    this.initialFill = 1
+    this.free.length = 0
+    this.calculatedSize = 0
+    this.size = 0
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+  }
+
+  get reset() {
+    deprecatedMethod('reset', 'clear')
+    return this.clear
+  }
+
+  get length() {
+    deprecatedProperty('length', 'size')
+    return this.size
+  }
+
+  static get AbortController() {
+    return AC
+  }
+  static get AbortSignal() {
+    return AS
+  }
+}
+
+module.exports = LRUCache
diff --git a/node_modules/pacote/node_modules/lru-cache/index.mjs b/node_modules/pacote/node_modules/lru-cache/index.mjs
new file mode 100644
index 0000000000000..4a0b4813ec515
--- /dev/null
+++ b/node_modules/pacote/node_modules/lru-cache/index.mjs
@@ -0,0 +1,1227 @@
+const perf =
+  typeof performance === 'object' &&
+  performance &&
+  typeof performance.now === 'function'
+    ? performance
+    : Date
+
+const hasAbortController = typeof AbortController === 'function'
+
+// minimal backwards-compatibility polyfill
+// this doesn't have nearly all the checks and whatnot that
+// actual AbortController/Signal has, but it's enough for
+// our purposes, and if used properly, behaves the same.
+const AC = hasAbortController
+  ? AbortController
+  : class AbortController {
+      constructor() {
+        this.signal = new AS()
+      }
+      abort(reason = new Error('This operation was aborted')) {
+        this.signal.reason = this.signal.reason || reason
+        this.signal.aborted = true
+        this.signal.dispatchEvent({
+          type: 'abort',
+          target: this.signal,
+        })
+      }
+    }
+
+const hasAbortSignal = typeof AbortSignal === 'function'
+// Some polyfills put this on the AC class, not global
+const hasACAbortSignal = typeof AC.AbortSignal === 'function'
+const AS = hasAbortSignal
+  ? AbortSignal
+  : hasACAbortSignal
+  ? AC.AbortController
+  : class AbortSignal {
+      constructor() {
+        this.reason = undefined
+        this.aborted = false
+        this._listeners = []
+      }
+      dispatchEvent(e) {
+        if (e.type === 'abort') {
+          this.aborted = true
+          this.onabort(e)
+          this._listeners.forEach(f => f(e), this)
+        }
+      }
+      onabort() {}
+      addEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners.push(fn)
+        }
+      }
+      removeEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners = this._listeners.filter(f => f !== fn)
+        }
+      }
+    }
+
+const warned = new Set()
+const deprecatedOption = (opt, instead) => {
+  const code = `LRU_CACHE_OPTION_${opt}`
+  if (shouldWarn(code)) {
+    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
+  }
+}
+const deprecatedMethod = (method, instead) => {
+  const code = `LRU_CACHE_METHOD_${method}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
+    warn(code, `${method} method`, `cache.${instead}()`, get)
+  }
+}
+const deprecatedProperty = (field, instead) => {
+  const code = `LRU_CACHE_PROPERTY_${field}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
+    warn(code, `${field} property`, `cache.${instead}`, get)
+  }
+}
+
+const emitWarning = (...a) => {
+  typeof process === 'object' &&
+  process &&
+  typeof process.emitWarning === 'function'
+    ? process.emitWarning(...a)
+    : console.error(...a)
+}
+
+const shouldWarn = code => !warned.has(code)
+
+const warn = (code, what, instead, fn) => {
+  warned.add(code)
+  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
+  emitWarning(msg, 'DeprecationWarning', code, fn)
+}
+
+const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
+
+/* istanbul ignore next - This is a little bit ridiculous, tbh.
+ * The maximum array length is 2^32-1 or thereabouts on most JS impls.
+ * And well before that point, you're caching the entire world, I mean,
+ * that's ~32GB of just integers for the next/prev links, plus whatever
+ * else to hold that many keys and values.  Just filling the memory with
+ * zeroes at init time is brutal when you get that big.
+ * But why not be complete?
+ * Maybe in the future, these limits will have expanded. */
+const getUintArray = max =>
+  !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+    ? Uint8Array
+    : max <= Math.pow(2, 16)
+    ? Uint16Array
+    : max <= Math.pow(2, 32)
+    ? Uint32Array
+    : max <= Number.MAX_SAFE_INTEGER
+    ? ZeroArray
+    : null
+
+class ZeroArray extends Array {
+  constructor(size) {
+    super(size)
+    this.fill(0)
+  }
+}
+
+class Stack {
+  constructor(max) {
+    if (max === 0) {
+      return []
+    }
+    const UintArray = getUintArray(max)
+    this.heap = new UintArray(max)
+    this.length = 0
+  }
+  push(n) {
+    this.heap[this.length++] = n
+  }
+  pop() {
+    return this.heap[--this.length]
+  }
+}
+
+class LRUCache {
+  constructor(options = {}) {
+    const {
+      max = 0,
+      ttl,
+      ttlResolution = 1,
+      ttlAutopurge,
+      updateAgeOnGet,
+      updateAgeOnHas,
+      allowStale,
+      dispose,
+      disposeAfter,
+      noDisposeOnSet,
+      noUpdateTTL,
+      maxSize = 0,
+      maxEntrySize = 0,
+      sizeCalculation,
+      fetchMethod,
+      fetchContext,
+      noDeleteOnFetchRejection,
+      noDeleteOnStaleGet,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+    } = options
+
+    // deprecated options, don't trigger a warning for getting them if
+    // the thing being passed in is another LRUCache we're copying.
+    const { length, maxAge, stale } =
+      options instanceof LRUCache ? {} : options
+
+    if (max !== 0 && !isPosInt(max)) {
+      throw new TypeError('max option must be a nonnegative integer')
+    }
+
+    const UintArray = max ? getUintArray(max) : Array
+    if (!UintArray) {
+      throw new Error('invalid max value: ' + max)
+    }
+
+    this.max = max
+    this.maxSize = maxSize
+    this.maxEntrySize = maxEntrySize || this.maxSize
+    this.sizeCalculation = sizeCalculation || length
+    if (this.sizeCalculation) {
+      if (!this.maxSize && !this.maxEntrySize) {
+        throw new TypeError(
+          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
+        )
+      }
+      if (typeof this.sizeCalculation !== 'function') {
+        throw new TypeError('sizeCalculation set to non-function')
+      }
+    }
+
+    this.fetchMethod = fetchMethod || null
+    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
+      throw new TypeError(
+        'fetchMethod must be a function if specified'
+      )
+    }
+
+    this.fetchContext = fetchContext
+    if (!this.fetchMethod && fetchContext !== undefined) {
+      throw new TypeError(
+        'cannot set fetchContext without fetchMethod'
+      )
+    }
+
+    this.keyMap = new Map()
+    this.keyList = new Array(max).fill(null)
+    this.valList = new Array(max).fill(null)
+    this.next = new UintArray(max)
+    this.prev = new UintArray(max)
+    this.head = 0
+    this.tail = 0
+    this.free = new Stack(max)
+    this.initialFill = 1
+    this.size = 0
+
+    if (typeof dispose === 'function') {
+      this.dispose = dispose
+    }
+    if (typeof disposeAfter === 'function') {
+      this.disposeAfter = disposeAfter
+      this.disposed = []
+    } else {
+      this.disposeAfter = null
+      this.disposed = null
+    }
+    this.noDisposeOnSet = !!noDisposeOnSet
+    this.noUpdateTTL = !!noUpdateTTL
+    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
+    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
+    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
+    this.ignoreFetchAbort = !!ignoreFetchAbort
+
+    // NB: maxEntrySize is set to maxSize if it's set
+    if (this.maxEntrySize !== 0) {
+      if (this.maxSize !== 0) {
+        if (!isPosInt(this.maxSize)) {
+          throw new TypeError(
+            'maxSize must be a positive integer if specified'
+          )
+        }
+      }
+      if (!isPosInt(this.maxEntrySize)) {
+        throw new TypeError(
+          'maxEntrySize must be a positive integer if specified'
+        )
+      }
+      this.initializeSizeTracking()
+    }
+
+    this.allowStale = !!allowStale || !!stale
+    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
+    this.updateAgeOnGet = !!updateAgeOnGet
+    this.updateAgeOnHas = !!updateAgeOnHas
+    this.ttlResolution =
+      isPosInt(ttlResolution) || ttlResolution === 0
+        ? ttlResolution
+        : 1
+    this.ttlAutopurge = !!ttlAutopurge
+    this.ttl = ttl || maxAge || 0
+    if (this.ttl) {
+      if (!isPosInt(this.ttl)) {
+        throw new TypeError(
+          'ttl must be a positive integer if specified'
+        )
+      }
+      this.initializeTTLTracking()
+    }
+
+    // do not allow completely unbounded caches
+    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
+      throw new TypeError(
+        'At least one of max, maxSize, or ttl is required'
+      )
+    }
+    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
+      const code = 'LRU_CACHE_UNBOUNDED'
+      if (shouldWarn(code)) {
+        warned.add(code)
+        const msg =
+          'TTL caching without ttlAutopurge, max, or maxSize can ' +
+          'result in unbounded memory consumption.'
+        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
+      }
+    }
+
+    if (stale) {
+      deprecatedOption('stale', 'allowStale')
+    }
+    if (maxAge) {
+      deprecatedOption('maxAge', 'ttl')
+    }
+    if (length) {
+      deprecatedOption('length', 'sizeCalculation')
+    }
+  }
+
+  getRemainingTTL(key) {
+    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
+  }
+
+  initializeTTLTracking() {
+    this.ttls = new ZeroArray(this.max)
+    this.starts = new ZeroArray(this.max)
+
+    this.setItemTTL = (index, ttl, start = perf.now()) => {
+      this.starts[index] = ttl !== 0 ? start : 0
+      this.ttls[index] = ttl
+      if (ttl !== 0 && this.ttlAutopurge) {
+        const t = setTimeout(() => {
+          if (this.isStale(index)) {
+            this.delete(this.keyList[index])
+          }
+        }, ttl + 1)
+        /* istanbul ignore else - unref() not supported on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+    }
+
+    this.updateItemAge = index => {
+      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
+    }
+
+    this.statusTTL = (status, index) => {
+      if (status) {
+        status.ttl = this.ttls[index]
+        status.start = this.starts[index]
+        status.now = cachedNow || getNow()
+        status.remainingTTL = status.now + status.ttl - status.start
+      }
+    }
+
+    // debounce calls to perf.now() to 1s so we're not hitting
+    // that costly call repeatedly.
+    let cachedNow = 0
+    const getNow = () => {
+      const n = perf.now()
+      if (this.ttlResolution > 0) {
+        cachedNow = n
+        const t = setTimeout(
+          () => (cachedNow = 0),
+          this.ttlResolution
+        )
+        /* istanbul ignore else - not available on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+      return n
+    }
+
+    this.getRemainingTTL = key => {
+      const index = this.keyMap.get(key)
+      if (index === undefined) {
+        return 0
+      }
+      return this.ttls[index] === 0 || this.starts[index] === 0
+        ? Infinity
+        : this.starts[index] +
+            this.ttls[index] -
+            (cachedNow || getNow())
+    }
+
+    this.isStale = index => {
+      return (
+        this.ttls[index] !== 0 &&
+        this.starts[index] !== 0 &&
+        (cachedNow || getNow()) - this.starts[index] >
+          this.ttls[index]
+      )
+    }
+  }
+  updateItemAge(_index) {}
+  statusTTL(_status, _index) {}
+  setItemTTL(_index, _ttl, _start) {}
+  isStale(_index) {
+    return false
+  }
+
+  initializeSizeTracking() {
+    this.calculatedSize = 0
+    this.sizes = new ZeroArray(this.max)
+    this.removeItemSize = index => {
+      this.calculatedSize -= this.sizes[index]
+      this.sizes[index] = 0
+    }
+    this.requireSize = (k, v, size, sizeCalculation) => {
+      // provisionally accept background fetches.
+      // actual value size will be checked when they return.
+      if (this.isBackgroundFetch(v)) {
+        return 0
+      }
+      if (!isPosInt(size)) {
+        if (sizeCalculation) {
+          if (typeof sizeCalculation !== 'function') {
+            throw new TypeError('sizeCalculation must be a function')
+          }
+          size = sizeCalculation(v, k)
+          if (!isPosInt(size)) {
+            throw new TypeError(
+              'sizeCalculation return invalid (expect positive integer)'
+            )
+          }
+        } else {
+          throw new TypeError(
+            'invalid size value (must be positive integer). ' +
+              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
+              'must be set.'
+          )
+        }
+      }
+      return size
+    }
+    this.addItemSize = (index, size, status) => {
+      this.sizes[index] = size
+      if (this.maxSize) {
+        const maxSize = this.maxSize - this.sizes[index]
+        while (this.calculatedSize > maxSize) {
+          this.evict(true)
+        }
+      }
+      this.calculatedSize += this.sizes[index]
+      if (status) {
+        status.entrySize = size
+        status.totalCalculatedSize = this.calculatedSize
+      }
+    }
+  }
+  removeItemSize(_index) {}
+  addItemSize(_index, _size) {}
+  requireSize(_k, _v, size, sizeCalculation) {
+    if (size || sizeCalculation) {
+      throw new TypeError(
+        'cannot set size without setting maxSize or maxEntrySize on cache'
+      )
+    }
+  }
+
+  *indexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.tail; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.head) {
+          break
+        } else {
+          i = this.prev[i]
+        }
+      }
+    }
+  }
+
+  *rindexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.head; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.tail) {
+          break
+        } else {
+          i = this.next[i]
+        }
+      }
+    }
+  }
+
+  isValidIndex(index) {
+    return (
+      index !== undefined &&
+      this.keyMap.get(this.keyList[index]) === index
+    )
+  }
+
+  *entries() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+  *rentries() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+
+  *keys() {
+    for (const i of this.indexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+  *rkeys() {
+    for (const i of this.rindexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+
+  *values() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+  *rvalues() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+
+  [Symbol.iterator]() {
+    return this.entries()
+  }
+
+  find(fn, getOptions) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      if (fn(value, this.keyList[i], this)) {
+        return this.get(this.keyList[i], getOptions)
+      }
+    }
+  }
+
+  forEach(fn, thisp = this) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  rforEach(fn, thisp = this) {
+    for (const i of this.rindexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  get prune() {
+    deprecatedMethod('prune', 'purgeStale')
+    return this.purgeStale
+  }
+
+  purgeStale() {
+    let deleted = false
+    for (const i of this.rindexes({ allowStale: true })) {
+      if (this.isStale(i)) {
+        this.delete(this.keyList[i])
+        deleted = true
+      }
+    }
+    return deleted
+  }
+
+  dump() {
+    const arr = []
+    for (const i of this.indexes({ allowStale: true })) {
+      const key = this.keyList[i]
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      const entry = { value }
+      if (this.ttls) {
+        entry.ttl = this.ttls[i]
+        // always dump the start relative to a portable timestamp
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = perf.now() - this.starts[i]
+        entry.start = Math.floor(Date.now() - age)
+      }
+      if (this.sizes) {
+        entry.size = this.sizes[i]
+      }
+      arr.unshift([key, entry])
+    }
+    return arr
+  }
+
+  load(arr) {
+    this.clear()
+    for (const [key, entry] of arr) {
+      if (entry.start) {
+        // entry.start is a portable timestamp, but we may be using
+        // node's performance.now(), so calculate the offset.
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = Date.now() - entry.start
+        entry.start = perf.now() - age
+      }
+      this.set(key, entry.value, entry)
+    }
+  }
+
+  dispose(_v, _k, _reason) {}
+
+  set(
+    k,
+    v,
+    {
+      ttl = this.ttl,
+      start,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      status,
+    } = {}
+  ) {
+    size = this.requireSize(k, v, size, sizeCalculation)
+    // if the item doesn't fit, don't do anything
+    // NB: maxEntrySize set to maxSize by default
+    if (this.maxEntrySize && size > this.maxEntrySize) {
+      if (status) {
+        status.set = 'miss'
+        status.maxEntrySizeExceeded = true
+      }
+      // have to delete, in case a background fetch is there already.
+      // in non-async cases, this is a no-op
+      this.delete(k)
+      return this
+    }
+    let index = this.size === 0 ? undefined : this.keyMap.get(k)
+    if (index === undefined) {
+      // addition
+      index = this.newIndex()
+      this.keyList[index] = k
+      this.valList[index] = v
+      this.keyMap.set(k, index)
+      this.next[this.tail] = index
+      this.prev[index] = this.tail
+      this.tail = index
+      this.size++
+      this.addItemSize(index, size, status)
+      if (status) {
+        status.set = 'add'
+      }
+      noUpdateTTL = false
+    } else {
+      // update
+      this.moveToTail(index)
+      const oldVal = this.valList[index]
+      if (v !== oldVal) {
+        if (this.isBackgroundFetch(oldVal)) {
+          oldVal.__abortController.abort(new Error('replaced'))
+        } else {
+          if (!noDisposeOnSet) {
+            this.dispose(oldVal, k, 'set')
+            if (this.disposeAfter) {
+              this.disposed.push([oldVal, k, 'set'])
+            }
+          }
+        }
+        this.removeItemSize(index)
+        this.valList[index] = v
+        this.addItemSize(index, size, status)
+        if (status) {
+          status.set = 'replace'
+          const oldValue =
+            oldVal && this.isBackgroundFetch(oldVal)
+              ? oldVal.__staleWhileFetching
+              : oldVal
+          if (oldValue !== undefined) status.oldValue = oldValue
+        }
+      } else if (status) {
+        status.set = 'update'
+      }
+    }
+    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
+      this.initializeTTLTracking()
+    }
+    if (!noUpdateTTL) {
+      this.setItemTTL(index, ttl, start)
+    }
+    this.statusTTL(status, index)
+    if (this.disposeAfter) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return this
+  }
+
+  newIndex() {
+    if (this.size === 0) {
+      return this.tail
+    }
+    if (this.size === this.max && this.max !== 0) {
+      return this.evict(false)
+    }
+    if (this.free.length !== 0) {
+      return this.free.pop()
+    }
+    // initial fill, just keep writing down the list
+    return this.initialFill++
+  }
+
+  pop() {
+    if (this.size) {
+      const val = this.valList[this.head]
+      this.evict(true)
+      return val
+    }
+  }
+
+  evict(free) {
+    const head = this.head
+    const k = this.keyList[head]
+    const v = this.valList[head]
+    if (this.isBackgroundFetch(v)) {
+      v.__abortController.abort(new Error('evicted'))
+    } else {
+      this.dispose(v, k, 'evict')
+      if (this.disposeAfter) {
+        this.disposed.push([v, k, 'evict'])
+      }
+    }
+    this.removeItemSize(head)
+    // if we aren't about to use the index, then null these out
+    if (free) {
+      this.keyList[head] = null
+      this.valList[head] = null
+      this.free.push(head)
+    }
+    this.head = this.next[head]
+    this.keyMap.delete(k)
+    this.size--
+    return head
+  }
+
+  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      if (!this.isStale(index)) {
+        if (updateAgeOnHas) {
+          this.updateItemAge(index)
+        }
+        if (status) status.has = 'hit'
+        this.statusTTL(status, index)
+        return true
+      } else if (status) {
+        status.has = 'stale'
+        this.statusTTL(status, index)
+      }
+    } else if (status) {
+      status.has = 'miss'
+    }
+    return false
+  }
+
+  // like get(), but without any LRU updating or TTL expiration
+  peek(k, { allowStale = this.allowStale } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined && (allowStale || !this.isStale(index))) {
+      const v = this.valList[index]
+      // either stale and allowed, or forcing a refresh of non-stale value
+      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
+    }
+  }
+
+  backgroundFetch(k, index, options, context) {
+    const v = index === undefined ? undefined : this.valList[index]
+    if (this.isBackgroundFetch(v)) {
+      return v
+    }
+    const ac = new AC()
+    if (options.signal) {
+      options.signal.addEventListener('abort', () =>
+        ac.abort(options.signal.reason)
+      )
+    }
+    const fetchOpts = {
+      signal: ac.signal,
+      options,
+      context,
+    }
+    const cb = (v, updateCache = false) => {
+      const { aborted } = ac.signal
+      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
+      if (options.status) {
+        if (aborted && !updateCache) {
+          options.status.fetchAborted = true
+          options.status.fetchError = ac.signal.reason
+          if (ignoreAbort) options.status.fetchAbortIgnored = true
+        } else {
+          options.status.fetchResolved = true
+        }
+      }
+      if (aborted && !ignoreAbort && !updateCache) {
+        return fetchFail(ac.signal.reason)
+      }
+      // either we didn't abort, and are still here, or we did, and ignored
+      if (this.valList[index] === p) {
+        if (v === undefined) {
+          if (p.__staleWhileFetching) {
+            this.valList[index] = p.__staleWhileFetching
+          } else {
+            this.delete(k)
+          }
+        } else {
+          if (options.status) options.status.fetchUpdated = true
+          this.set(k, v, fetchOpts.options)
+        }
+      }
+      return v
+    }
+    const eb = er => {
+      if (options.status) {
+        options.status.fetchRejected = true
+        options.status.fetchError = er
+      }
+      return fetchFail(er)
+    }
+    const fetchFail = er => {
+      const { aborted } = ac.signal
+      const allowStaleAborted =
+        aborted && options.allowStaleOnFetchAbort
+      const allowStale =
+        allowStaleAborted || options.allowStaleOnFetchRejection
+      const noDelete = allowStale || options.noDeleteOnFetchRejection
+      if (this.valList[index] === p) {
+        // if we allow stale on fetch rejections, then we need to ensure that
+        // the stale value is not removed from the cache when the fetch fails.
+        const del = !noDelete || p.__staleWhileFetching === undefined
+        if (del) {
+          this.delete(k)
+        } else if (!allowStaleAborted) {
+          // still replace the *promise* with the stale value,
+          // since we are done with the promise at this point.
+          // leave it untouched if we're still waiting for an
+          // aborted background fetch that hasn't yet returned.
+          this.valList[index] = p.__staleWhileFetching
+        }
+      }
+      if (allowStale) {
+        if (options.status && p.__staleWhileFetching !== undefined) {
+          options.status.returnedStale = true
+        }
+        return p.__staleWhileFetching
+      } else if (p.__returned === p) {
+        throw er
+      }
+    }
+    const pcall = (res, rej) => {
+      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
+      // ignored, we go until we finish, regardless.
+      // defer check until we are actually aborting,
+      // so fetchMethod can override.
+      ac.signal.addEventListener('abort', () => {
+        if (
+          !options.ignoreFetchAbort ||
+          options.allowStaleOnFetchAbort
+        ) {
+          res()
+          // when it eventually resolves, update the cache.
+          if (options.allowStaleOnFetchAbort) {
+            res = v => cb(v, true)
+          }
+        }
+      })
+    }
+    if (options.status) options.status.fetchDispatched = true
+    const p = new Promise(pcall).then(cb, eb)
+    p.__abortController = ac
+    p.__staleWhileFetching = v
+    p.__returned = null
+    if (index === undefined) {
+      // internal, don't expose status.
+      this.set(k, p, { ...fetchOpts.options, status: undefined })
+      index = this.keyMap.get(k)
+    } else {
+      this.valList[index] = p
+    }
+    return p
+  }
+
+  isBackgroundFetch(p) {
+    return (
+      p &&
+      typeof p === 'object' &&
+      typeof p.then === 'function' &&
+      Object.prototype.hasOwnProperty.call(
+        p,
+        '__staleWhileFetching'
+      ) &&
+      Object.prototype.hasOwnProperty.call(p, '__returned') &&
+      (p.__returned === p || p.__returned === null)
+    )
+  }
+
+  // this takes the union of get() and set() opts, because it does both
+  async fetch(
+    k,
+    {
+      // get options
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      // set options
+      ttl = this.ttl,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      // fetch exclusive options
+      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
+      ignoreFetchAbort = this.ignoreFetchAbort,
+      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
+      fetchContext = this.fetchContext,
+      forceRefresh = false,
+      status,
+      signal,
+    } = {}
+  ) {
+    if (!this.fetchMethod) {
+      if (status) status.fetch = 'get'
+      return this.get(k, {
+        allowStale,
+        updateAgeOnGet,
+        noDeleteOnStaleGet,
+        status,
+      })
+    }
+
+    const options = {
+      allowStale,
+      updateAgeOnGet,
+      noDeleteOnStaleGet,
+      ttl,
+      noDisposeOnSet,
+      size,
+      sizeCalculation,
+      noUpdateTTL,
+      noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+      status,
+      signal,
+    }
+
+    let index = this.keyMap.get(k)
+    if (index === undefined) {
+      if (status) status.fetch = 'miss'
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      return (p.__returned = p)
+    } else {
+      // in cache, maybe already fetching
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        const stale =
+          allowStale && v.__staleWhileFetching !== undefined
+        if (status) {
+          status.fetch = 'inflight'
+          if (stale) status.returnedStale = true
+        }
+        return stale ? v.__staleWhileFetching : (v.__returned = v)
+      }
+
+      // if we force a refresh, that means do NOT serve the cached value,
+      // unless we are already in the process of refreshing the cache.
+      const isStale = this.isStale(index)
+      if (!forceRefresh && !isStale) {
+        if (status) status.fetch = 'hit'
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        this.statusTTL(status, index)
+        return v
+      }
+
+      // ok, it is stale or a forced refresh, and not already fetching.
+      // refresh the cache.
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      const hasStale = p.__staleWhileFetching !== undefined
+      const staleVal = hasStale && allowStale
+      if (status) {
+        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
+        if (staleVal && isStale) status.returnedStale = true
+      }
+      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
+    }
+  }
+
+  get(
+    k,
+    {
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      status,
+    } = {}
+  ) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      const value = this.valList[index]
+      const fetching = this.isBackgroundFetch(value)
+      this.statusTTL(status, index)
+      if (this.isStale(index)) {
+        if (status) status.get = 'stale'
+        // delete only if not an in-flight background fetch
+        if (!fetching) {
+          if (!noDeleteOnStaleGet) {
+            this.delete(k)
+          }
+          if (status) status.returnedStale = allowStale
+          return allowStale ? value : undefined
+        } else {
+          if (status) {
+            status.returnedStale =
+              allowStale && value.__staleWhileFetching !== undefined
+          }
+          return allowStale ? value.__staleWhileFetching : undefined
+        }
+      } else {
+        if (status) status.get = 'hit'
+        // if we're currently fetching it, we don't actually have it yet
+        // it's not stale, which means this isn't a staleWhileRefetching.
+        // If it's not stale, and fetching, AND has a __staleWhileFetching
+        // value, then that means the user fetched with {forceRefresh:true},
+        // so it's safe to return that value.
+        if (fetching) {
+          return value.__staleWhileFetching
+        }
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        return value
+      }
+    } else if (status) {
+      status.get = 'miss'
+    }
+  }
+
+  connect(p, n) {
+    this.prev[n] = p
+    this.next[p] = n
+  }
+
+  moveToTail(index) {
+    // if tail already, nothing to do
+    // if head, move head to next[index]
+    // else
+    //   move next[prev[index]] to next[index] (head has no prev)
+    //   move prev[next[index]] to prev[index]
+    // prev[index] = tail
+    // next[tail] = index
+    // tail = index
+    if (index !== this.tail) {
+      if (index === this.head) {
+        this.head = this.next[index]
+      } else {
+        this.connect(this.prev[index], this.next[index])
+      }
+      this.connect(this.tail, index)
+      this.tail = index
+    }
+  }
+
+  get del() {
+    deprecatedMethod('del', 'delete')
+    return this.delete
+  }
+
+  delete(k) {
+    let deleted = false
+    if (this.size !== 0) {
+      const index = this.keyMap.get(k)
+      if (index !== undefined) {
+        deleted = true
+        if (this.size === 1) {
+          this.clear()
+        } else {
+          this.removeItemSize(index)
+          const v = this.valList[index]
+          if (this.isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('deleted'))
+          } else {
+            this.dispose(v, k, 'delete')
+            if (this.disposeAfter) {
+              this.disposed.push([v, k, 'delete'])
+            }
+          }
+          this.keyMap.delete(k)
+          this.keyList[index] = null
+          this.valList[index] = null
+          if (index === this.tail) {
+            this.tail = this.prev[index]
+          } else if (index === this.head) {
+            this.head = this.next[index]
+          } else {
+            this.next[this.prev[index]] = this.next[index]
+            this.prev[this.next[index]] = this.prev[index]
+          }
+          this.size--
+          this.free.push(index)
+        }
+      }
+    }
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return deleted
+  }
+
+  clear() {
+    for (const index of this.rindexes({ allowStale: true })) {
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        v.__abortController.abort(new Error('deleted'))
+      } else {
+        const k = this.keyList[index]
+        this.dispose(v, k, 'delete')
+        if (this.disposeAfter) {
+          this.disposed.push([v, k, 'delete'])
+        }
+      }
+    }
+
+    this.keyMap.clear()
+    this.valList.fill(null)
+    this.keyList.fill(null)
+    if (this.ttls) {
+      this.ttls.fill(0)
+      this.starts.fill(0)
+    }
+    if (this.sizes) {
+      this.sizes.fill(0)
+    }
+    this.head = 0
+    this.tail = 0
+    this.initialFill = 1
+    this.free.length = 0
+    this.calculatedSize = 0
+    this.size = 0
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+  }
+
+  get reset() {
+    deprecatedMethod('reset', 'clear')
+    return this.clear
+  }
+
+  get length() {
+    deprecatedProperty('length', 'size')
+    return this.size
+  }
+
+  static get AbortController() {
+    return AC
+  }
+  static get AbortSignal() {
+    return AS
+  }
+}
+
+export default LRUCache
diff --git a/node_modules/pacote/node_modules/lru-cache/package.json b/node_modules/pacote/node_modules/lru-cache/package.json
new file mode 100644
index 0000000000000..9684991727e7a
--- /dev/null
+++ b/node_modules/pacote/node_modules/lru-cache/package.json
@@ -0,0 +1,96 @@
+{
+  "name": "lru-cache",
+  "description": "A cache object that deletes the least-recently-used items.",
+  "version": "7.18.3",
+  "author": "Isaac Z. Schlueter ",
+  "keywords": [
+    "mru",
+    "lru",
+    "cache"
+  ],
+  "sideEffects": false,
+  "scripts": {
+    "build": "npm run prepare",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "prepare": "node ./scripts/transpile-to-esm.js",
+    "size": "size-limit",
+    "test": "tap",
+    "snap": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write .",
+    "typedoc": "typedoc ./index.d.ts"
+  },
+  "type": "commonjs",
+  "main": "./index.js",
+  "module": "./index.mjs",
+  "types": "./index.d.ts",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./index.d.ts",
+        "default": "./index.mjs"
+      },
+      "require": {
+        "types": "./index.d.ts",
+        "default": "./index.js"
+      }
+    },
+    "./package.json": "./package.json"
+  },
+  "repository": "git://github.com/isaacs/node-lru-cache.git",
+  "devDependencies": {
+    "@size-limit/preset-small-lib": "^7.0.8",
+    "@types/node": "^17.0.31",
+    "@types/tap": "^15.0.6",
+    "benchmark": "^2.1.4",
+    "c8": "^7.11.2",
+    "clock-mock": "^1.0.6",
+    "eslint-config-prettier": "^8.5.0",
+    "prettier": "^2.6.2",
+    "size-limit": "^7.0.8",
+    "tap": "^16.3.4",
+    "ts-node": "^10.7.0",
+    "tslib": "^2.4.0",
+    "typedoc": "^0.23.24",
+    "typescript": "^4.6.4"
+  },
+  "license": "ISC",
+  "files": [
+    "index.js",
+    "index.mjs",
+    "index.d.ts"
+  ],
+  "engines": {
+    "node": ">=12"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 70,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "tap": {
+    "nyc-arg": [
+      "--include=index.js"
+    ],
+    "node-arg": [
+      "--expose-gc",
+      "--require",
+      "ts-node/register"
+    ],
+    "ts": false
+  },
+  "size-limit": [
+    {
+      "path": "./index.js"
+    }
+  ]
+}
diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.js b/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.js
deleted file mode 100644
index 02d76ec800a92..0000000000000
--- a/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.js
+++ /dev/null
@@ -1,1404 +0,0 @@
-"use strict";
-/**
- * @module LRUCache
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.LRUCache = void 0;
-const perf = typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function'
-    ? performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ? process : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function'
-        ? PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-        ? Uint8Array
-        : max <= Math.pow(2, 16)
-            ? Uint16Array
-            : max <= Math.pow(2, 32)
-                ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER
-                    ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * All properties from the options object (with the exception of
- * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as
- * normal public members. (`max` and `maxBase` are read-only getters.)
- * Changing any of these will alter the defaults for subsequent method calls,
- * but is otherwise safe.
- */
-class LRUCache {
-    // properties coming in from the options of these, only max and maxSize
-    // really *need* to be protected. The rest can be modified, as they just
-    // set defaults for various methods.
-    #max;
-    #maxSize;
-    #dispose;
-    #disposeAfter;
-    #fetchMethod;
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0
-                ? ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the remaining TTL time for a given entry key
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.delete(this.#keyList[index]);
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (ttl === 0 || start === 0) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            return (ttls[index] !== 0 &&
-                starts[index] !== 0 &&
-                (cachedNow || getNow()) - starts[index] > ttls[index]);
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to Array.find().  fn is called as fn(value, key, cache).
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from
-     * most recently used to least recently used.  fn is called as
-     * fn(value, key, cache).  Does not update age or recenty of use.
-     * Does not iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.delete(this.#keyList[i]);
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to cache.load()
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     * Note that the shape of the resulting cache may be different if the
-     * same options are not used in both caches.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.delete(k);
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0
-                ? this.#tail
-                : this.#free.length !== 0
-                    ? this.#free.pop()
-                    : this.#size === this.#max
-                        ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
-                        ? oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined &&
-            (allowStale || !this.#isStale(index))) {
-            const v = this.#valList[index];
-            // either stale and allowed, or forcing a refresh of non-stale value
-            return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-        }
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.delete(k);
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.delete(k);
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.delete(k);
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.clear();
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, 'delete');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, 'delete']);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        this.#next[this.#prev[index]] = this.#next[index];
-                        this.#prev[this.#next[index]] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, 'delete');
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, 'delete']);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-exports.LRUCache = LRUCache;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.min.js b/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.min.js
deleted file mode 100644
index 8d34a03041d25..0000000000000
--- a/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=R.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#W(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};exports.LRUCache=C;
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/package.json b/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.js b/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.js
deleted file mode 100644
index 23b9754ad6c76..0000000000000
--- a/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.js
+++ /dev/null
@@ -1,1400 +0,0 @@
-/**
- * @module LRUCache
- */
-const perf = typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function'
-    ? performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ? process : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function'
-        ? PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-        ? Uint8Array
-        : max <= Math.pow(2, 16)
-            ? Uint16Array
-            : max <= Math.pow(2, 32)
-                ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER
-                    ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * All properties from the options object (with the exception of
- * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as
- * normal public members. (`max` and `maxBase` are read-only getters.)
- * Changing any of these will alter the defaults for subsequent method calls,
- * but is otherwise safe.
- */
-export class LRUCache {
-    // properties coming in from the options of these, only max and maxSize
-    // really *need* to be protected. The rest can be modified, as they just
-    // set defaults for various methods.
-    #max;
-    #maxSize;
-    #dispose;
-    #disposeAfter;
-    #fetchMethod;
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0
-                ? ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the remaining TTL time for a given entry key
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.delete(this.#keyList[index]);
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (ttl === 0 || start === 0) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            return (ttls[index] !== 0 &&
-                starts[index] !== 0 &&
-                (cachedNow || getNow()) - starts[index] > ttls[index]);
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to Array.find().  fn is called as fn(value, key, cache).
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from
-     * most recently used to least recently used.  fn is called as
-     * fn(value, key, cache).  Does not update age or recenty of use.
-     * Does not iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.delete(this.#keyList[i]);
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to cache.load()
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     * Note that the shape of the resulting cache may be different if the
-     * same options are not used in both caches.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.delete(k);
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0
-                ? this.#tail
-                : this.#free.length !== 0
-                    ? this.#free.pop()
-                    : this.#size === this.#max
-                        ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
-                        ? oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined &&
-            (allowStale || !this.#isStale(index))) {
-            const v = this.#valList[index];
-            // either stale and allowed, or forcing a refresh of non-stale value
-            return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-        }
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.delete(k);
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.delete(k);
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.delete(k);
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.clear();
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, 'delete');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, 'delete']);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        this.#next[this.#prev[index]] = this.#next[index];
-                        this.#prev[this.#next[index]] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, 'delete');
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, 'delete']);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.min.js b/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.min.js
deleted file mode 100644
index 5a16b3940d6df..0000000000000
--- a/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var R=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=C.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,R))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#W=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#R(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#W=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#R(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#W(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#W(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#R(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#R(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};export{R as LRUCache};
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/package.json b/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/path-scurry/node_modules/lru-cache/package.json b/node_modules/path-scurry/node_modules/lru-cache/package.json
deleted file mode 100644
index bae4a04839d1f..0000000000000
--- a/node_modules/path-scurry/node_modules/lru-cache/package.json
+++ /dev/null
@@ -1,108 +0,0 @@
-{
-  "name": "lru-cache",
-  "description": "A cache object that deletes the least-recently-used items.",
-  "version": "10.0.1",
-  "author": "Isaac Z. Schlueter ",
-  "keywords": [
-    "mru",
-    "lru",
-    "cache"
-  ],
-  "sideEffects": false,
-  "scripts": {
-    "build": "npm run prepare",
-    "preprepare": "rm -rf dist",
-    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
-    "postprepare": "bash fixup.sh",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "c8 tap",
-    "snap": "c8 tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write .",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts",
-    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
-    "prebenchmark": "npm run prepare",
-    "benchmark": "make -C benchmark",
-    "preprofile": "npm run prepare",
-    "profile": "make -C benchmark profile"
-  },
-  "main": "./dist/cjs/index.js",
-  "module": "./dist/mjs/index.js",
-  "exports": {
-    "./min": {
-      "import": {
-        "types": "./dist/mjs/index.d.ts",
-        "default": "./dist/mjs/index.min.js"
-      },
-      "require": {
-        "types": "./dist/cjs/index.d.ts",
-        "default": "./dist/cjs/index.min.js"
-      }
-    },
-    ".": {
-      "import": {
-        "types": "./dist/mjs/index.d.ts",
-        "default": "./dist/mjs/index.js"
-      },
-      "require": {
-        "types": "./dist/cjs/index.d.ts",
-        "default": "./dist/cjs/index.js"
-      }
-    }
-  },
-  "repository": "git://github.com/isaacs/node-lru-cache.git",
-  "devDependencies": {
-    "@size-limit/preset-small-lib": "^7.0.8",
-    "@types/node": "^20.2.5",
-    "@types/tap": "^15.0.6",
-    "benchmark": "^2.1.4",
-    "c8": "^7.11.2",
-    "clock-mock": "^1.0.6",
-    "esbuild": "^0.17.11",
-    "eslint-config-prettier": "^8.5.0",
-    "marked": "^4.2.12",
-    "mkdirp": "^2.1.5",
-    "prettier": "^2.6.2",
-    "size-limit": "^7.0.8",
-    "tap": "^16.3.4",
-    "ts-node": "^10.9.1",
-    "tslib": "^2.4.0",
-    "typedoc": "^0.24.6",
-    "typescript": "^5.0.4"
-  },
-  "license": "ISC",
-  "files": [
-    "dist"
-  ],
-  "engines": {
-    "node": "14 || >=16.14"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tap": {
-    "coverage": false,
-    "node-arg": [
-      "--expose-gc",
-      "-r",
-      "ts-node/register"
-    ],
-    "ts": false
-  },
-  "size-limit": [
-    {
-      "path": "./dist/mjs/index.js"
-    }
-  ]
-}
diff --git a/node_modules/sigstore/node_modules/lru-cache/LICENSE b/node_modules/sigstore/node_modules/lru-cache/LICENSE
new file mode 100644
index 0000000000000..f785757cd63f8
--- /dev/null
+++ b/node_modules/sigstore/node_modules/lru-cache/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/lru-cache/index.js b/node_modules/sigstore/node_modules/lru-cache/index.js
new file mode 100644
index 0000000000000..48e99fe5e5a70
--- /dev/null
+++ b/node_modules/sigstore/node_modules/lru-cache/index.js
@@ -0,0 +1,1227 @@
+const perf =
+  typeof performance === 'object' &&
+  performance &&
+  typeof performance.now === 'function'
+    ? performance
+    : Date
+
+const hasAbortController = typeof AbortController === 'function'
+
+// minimal backwards-compatibility polyfill
+// this doesn't have nearly all the checks and whatnot that
+// actual AbortController/Signal has, but it's enough for
+// our purposes, and if used properly, behaves the same.
+const AC = hasAbortController
+  ? AbortController
+  : class AbortController {
+      constructor() {
+        this.signal = new AS()
+      }
+      abort(reason = new Error('This operation was aborted')) {
+        this.signal.reason = this.signal.reason || reason
+        this.signal.aborted = true
+        this.signal.dispatchEvent({
+          type: 'abort',
+          target: this.signal,
+        })
+      }
+    }
+
+const hasAbortSignal = typeof AbortSignal === 'function'
+// Some polyfills put this on the AC class, not global
+const hasACAbortSignal = typeof AC.AbortSignal === 'function'
+const AS = hasAbortSignal
+  ? AbortSignal
+  : hasACAbortSignal
+  ? AC.AbortController
+  : class AbortSignal {
+      constructor() {
+        this.reason = undefined
+        this.aborted = false
+        this._listeners = []
+      }
+      dispatchEvent(e) {
+        if (e.type === 'abort') {
+          this.aborted = true
+          this.onabort(e)
+          this._listeners.forEach(f => f(e), this)
+        }
+      }
+      onabort() {}
+      addEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners.push(fn)
+        }
+      }
+      removeEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners = this._listeners.filter(f => f !== fn)
+        }
+      }
+    }
+
+const warned = new Set()
+const deprecatedOption = (opt, instead) => {
+  const code = `LRU_CACHE_OPTION_${opt}`
+  if (shouldWarn(code)) {
+    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
+  }
+}
+const deprecatedMethod = (method, instead) => {
+  const code = `LRU_CACHE_METHOD_${method}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
+    warn(code, `${method} method`, `cache.${instead}()`, get)
+  }
+}
+const deprecatedProperty = (field, instead) => {
+  const code = `LRU_CACHE_PROPERTY_${field}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
+    warn(code, `${field} property`, `cache.${instead}`, get)
+  }
+}
+
+const emitWarning = (...a) => {
+  typeof process === 'object' &&
+  process &&
+  typeof process.emitWarning === 'function'
+    ? process.emitWarning(...a)
+    : console.error(...a)
+}
+
+const shouldWarn = code => !warned.has(code)
+
+const warn = (code, what, instead, fn) => {
+  warned.add(code)
+  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
+  emitWarning(msg, 'DeprecationWarning', code, fn)
+}
+
+const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
+
+/* istanbul ignore next - This is a little bit ridiculous, tbh.
+ * The maximum array length is 2^32-1 or thereabouts on most JS impls.
+ * And well before that point, you're caching the entire world, I mean,
+ * that's ~32GB of just integers for the next/prev links, plus whatever
+ * else to hold that many keys and values.  Just filling the memory with
+ * zeroes at init time is brutal when you get that big.
+ * But why not be complete?
+ * Maybe in the future, these limits will have expanded. */
+const getUintArray = max =>
+  !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+    ? Uint8Array
+    : max <= Math.pow(2, 16)
+    ? Uint16Array
+    : max <= Math.pow(2, 32)
+    ? Uint32Array
+    : max <= Number.MAX_SAFE_INTEGER
+    ? ZeroArray
+    : null
+
+class ZeroArray extends Array {
+  constructor(size) {
+    super(size)
+    this.fill(0)
+  }
+}
+
+class Stack {
+  constructor(max) {
+    if (max === 0) {
+      return []
+    }
+    const UintArray = getUintArray(max)
+    this.heap = new UintArray(max)
+    this.length = 0
+  }
+  push(n) {
+    this.heap[this.length++] = n
+  }
+  pop() {
+    return this.heap[--this.length]
+  }
+}
+
+class LRUCache {
+  constructor(options = {}) {
+    const {
+      max = 0,
+      ttl,
+      ttlResolution = 1,
+      ttlAutopurge,
+      updateAgeOnGet,
+      updateAgeOnHas,
+      allowStale,
+      dispose,
+      disposeAfter,
+      noDisposeOnSet,
+      noUpdateTTL,
+      maxSize = 0,
+      maxEntrySize = 0,
+      sizeCalculation,
+      fetchMethod,
+      fetchContext,
+      noDeleteOnFetchRejection,
+      noDeleteOnStaleGet,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+    } = options
+
+    // deprecated options, don't trigger a warning for getting them if
+    // the thing being passed in is another LRUCache we're copying.
+    const { length, maxAge, stale } =
+      options instanceof LRUCache ? {} : options
+
+    if (max !== 0 && !isPosInt(max)) {
+      throw new TypeError('max option must be a nonnegative integer')
+    }
+
+    const UintArray = max ? getUintArray(max) : Array
+    if (!UintArray) {
+      throw new Error('invalid max value: ' + max)
+    }
+
+    this.max = max
+    this.maxSize = maxSize
+    this.maxEntrySize = maxEntrySize || this.maxSize
+    this.sizeCalculation = sizeCalculation || length
+    if (this.sizeCalculation) {
+      if (!this.maxSize && !this.maxEntrySize) {
+        throw new TypeError(
+          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
+        )
+      }
+      if (typeof this.sizeCalculation !== 'function') {
+        throw new TypeError('sizeCalculation set to non-function')
+      }
+    }
+
+    this.fetchMethod = fetchMethod || null
+    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
+      throw new TypeError(
+        'fetchMethod must be a function if specified'
+      )
+    }
+
+    this.fetchContext = fetchContext
+    if (!this.fetchMethod && fetchContext !== undefined) {
+      throw new TypeError(
+        'cannot set fetchContext without fetchMethod'
+      )
+    }
+
+    this.keyMap = new Map()
+    this.keyList = new Array(max).fill(null)
+    this.valList = new Array(max).fill(null)
+    this.next = new UintArray(max)
+    this.prev = new UintArray(max)
+    this.head = 0
+    this.tail = 0
+    this.free = new Stack(max)
+    this.initialFill = 1
+    this.size = 0
+
+    if (typeof dispose === 'function') {
+      this.dispose = dispose
+    }
+    if (typeof disposeAfter === 'function') {
+      this.disposeAfter = disposeAfter
+      this.disposed = []
+    } else {
+      this.disposeAfter = null
+      this.disposed = null
+    }
+    this.noDisposeOnSet = !!noDisposeOnSet
+    this.noUpdateTTL = !!noUpdateTTL
+    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
+    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
+    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
+    this.ignoreFetchAbort = !!ignoreFetchAbort
+
+    // NB: maxEntrySize is set to maxSize if it's set
+    if (this.maxEntrySize !== 0) {
+      if (this.maxSize !== 0) {
+        if (!isPosInt(this.maxSize)) {
+          throw new TypeError(
+            'maxSize must be a positive integer if specified'
+          )
+        }
+      }
+      if (!isPosInt(this.maxEntrySize)) {
+        throw new TypeError(
+          'maxEntrySize must be a positive integer if specified'
+        )
+      }
+      this.initializeSizeTracking()
+    }
+
+    this.allowStale = !!allowStale || !!stale
+    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
+    this.updateAgeOnGet = !!updateAgeOnGet
+    this.updateAgeOnHas = !!updateAgeOnHas
+    this.ttlResolution =
+      isPosInt(ttlResolution) || ttlResolution === 0
+        ? ttlResolution
+        : 1
+    this.ttlAutopurge = !!ttlAutopurge
+    this.ttl = ttl || maxAge || 0
+    if (this.ttl) {
+      if (!isPosInt(this.ttl)) {
+        throw new TypeError(
+          'ttl must be a positive integer if specified'
+        )
+      }
+      this.initializeTTLTracking()
+    }
+
+    // do not allow completely unbounded caches
+    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
+      throw new TypeError(
+        'At least one of max, maxSize, or ttl is required'
+      )
+    }
+    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
+      const code = 'LRU_CACHE_UNBOUNDED'
+      if (shouldWarn(code)) {
+        warned.add(code)
+        const msg =
+          'TTL caching without ttlAutopurge, max, or maxSize can ' +
+          'result in unbounded memory consumption.'
+        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
+      }
+    }
+
+    if (stale) {
+      deprecatedOption('stale', 'allowStale')
+    }
+    if (maxAge) {
+      deprecatedOption('maxAge', 'ttl')
+    }
+    if (length) {
+      deprecatedOption('length', 'sizeCalculation')
+    }
+  }
+
+  getRemainingTTL(key) {
+    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
+  }
+
+  initializeTTLTracking() {
+    this.ttls = new ZeroArray(this.max)
+    this.starts = new ZeroArray(this.max)
+
+    this.setItemTTL = (index, ttl, start = perf.now()) => {
+      this.starts[index] = ttl !== 0 ? start : 0
+      this.ttls[index] = ttl
+      if (ttl !== 0 && this.ttlAutopurge) {
+        const t = setTimeout(() => {
+          if (this.isStale(index)) {
+            this.delete(this.keyList[index])
+          }
+        }, ttl + 1)
+        /* istanbul ignore else - unref() not supported on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+    }
+
+    this.updateItemAge = index => {
+      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
+    }
+
+    this.statusTTL = (status, index) => {
+      if (status) {
+        status.ttl = this.ttls[index]
+        status.start = this.starts[index]
+        status.now = cachedNow || getNow()
+        status.remainingTTL = status.now + status.ttl - status.start
+      }
+    }
+
+    // debounce calls to perf.now() to 1s so we're not hitting
+    // that costly call repeatedly.
+    let cachedNow = 0
+    const getNow = () => {
+      const n = perf.now()
+      if (this.ttlResolution > 0) {
+        cachedNow = n
+        const t = setTimeout(
+          () => (cachedNow = 0),
+          this.ttlResolution
+        )
+        /* istanbul ignore else - not available on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+      return n
+    }
+
+    this.getRemainingTTL = key => {
+      const index = this.keyMap.get(key)
+      if (index === undefined) {
+        return 0
+      }
+      return this.ttls[index] === 0 || this.starts[index] === 0
+        ? Infinity
+        : this.starts[index] +
+            this.ttls[index] -
+            (cachedNow || getNow())
+    }
+
+    this.isStale = index => {
+      return (
+        this.ttls[index] !== 0 &&
+        this.starts[index] !== 0 &&
+        (cachedNow || getNow()) - this.starts[index] >
+          this.ttls[index]
+      )
+    }
+  }
+  updateItemAge(_index) {}
+  statusTTL(_status, _index) {}
+  setItemTTL(_index, _ttl, _start) {}
+  isStale(_index) {
+    return false
+  }
+
+  initializeSizeTracking() {
+    this.calculatedSize = 0
+    this.sizes = new ZeroArray(this.max)
+    this.removeItemSize = index => {
+      this.calculatedSize -= this.sizes[index]
+      this.sizes[index] = 0
+    }
+    this.requireSize = (k, v, size, sizeCalculation) => {
+      // provisionally accept background fetches.
+      // actual value size will be checked when they return.
+      if (this.isBackgroundFetch(v)) {
+        return 0
+      }
+      if (!isPosInt(size)) {
+        if (sizeCalculation) {
+          if (typeof sizeCalculation !== 'function') {
+            throw new TypeError('sizeCalculation must be a function')
+          }
+          size = sizeCalculation(v, k)
+          if (!isPosInt(size)) {
+            throw new TypeError(
+              'sizeCalculation return invalid (expect positive integer)'
+            )
+          }
+        } else {
+          throw new TypeError(
+            'invalid size value (must be positive integer). ' +
+              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
+              'must be set.'
+          )
+        }
+      }
+      return size
+    }
+    this.addItemSize = (index, size, status) => {
+      this.sizes[index] = size
+      if (this.maxSize) {
+        const maxSize = this.maxSize - this.sizes[index]
+        while (this.calculatedSize > maxSize) {
+          this.evict(true)
+        }
+      }
+      this.calculatedSize += this.sizes[index]
+      if (status) {
+        status.entrySize = size
+        status.totalCalculatedSize = this.calculatedSize
+      }
+    }
+  }
+  removeItemSize(_index) {}
+  addItemSize(_index, _size) {}
+  requireSize(_k, _v, size, sizeCalculation) {
+    if (size || sizeCalculation) {
+      throw new TypeError(
+        'cannot set size without setting maxSize or maxEntrySize on cache'
+      )
+    }
+  }
+
+  *indexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.tail; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.head) {
+          break
+        } else {
+          i = this.prev[i]
+        }
+      }
+    }
+  }
+
+  *rindexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.head; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.tail) {
+          break
+        } else {
+          i = this.next[i]
+        }
+      }
+    }
+  }
+
+  isValidIndex(index) {
+    return (
+      index !== undefined &&
+      this.keyMap.get(this.keyList[index]) === index
+    )
+  }
+
+  *entries() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+  *rentries() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+
+  *keys() {
+    for (const i of this.indexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+  *rkeys() {
+    for (const i of this.rindexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+
+  *values() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+  *rvalues() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+
+  [Symbol.iterator]() {
+    return this.entries()
+  }
+
+  find(fn, getOptions) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      if (fn(value, this.keyList[i], this)) {
+        return this.get(this.keyList[i], getOptions)
+      }
+    }
+  }
+
+  forEach(fn, thisp = this) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  rforEach(fn, thisp = this) {
+    for (const i of this.rindexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  get prune() {
+    deprecatedMethod('prune', 'purgeStale')
+    return this.purgeStale
+  }
+
+  purgeStale() {
+    let deleted = false
+    for (const i of this.rindexes({ allowStale: true })) {
+      if (this.isStale(i)) {
+        this.delete(this.keyList[i])
+        deleted = true
+      }
+    }
+    return deleted
+  }
+
+  dump() {
+    const arr = []
+    for (const i of this.indexes({ allowStale: true })) {
+      const key = this.keyList[i]
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      const entry = { value }
+      if (this.ttls) {
+        entry.ttl = this.ttls[i]
+        // always dump the start relative to a portable timestamp
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = perf.now() - this.starts[i]
+        entry.start = Math.floor(Date.now() - age)
+      }
+      if (this.sizes) {
+        entry.size = this.sizes[i]
+      }
+      arr.unshift([key, entry])
+    }
+    return arr
+  }
+
+  load(arr) {
+    this.clear()
+    for (const [key, entry] of arr) {
+      if (entry.start) {
+        // entry.start is a portable timestamp, but we may be using
+        // node's performance.now(), so calculate the offset.
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = Date.now() - entry.start
+        entry.start = perf.now() - age
+      }
+      this.set(key, entry.value, entry)
+    }
+  }
+
+  dispose(_v, _k, _reason) {}
+
+  set(
+    k,
+    v,
+    {
+      ttl = this.ttl,
+      start,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      status,
+    } = {}
+  ) {
+    size = this.requireSize(k, v, size, sizeCalculation)
+    // if the item doesn't fit, don't do anything
+    // NB: maxEntrySize set to maxSize by default
+    if (this.maxEntrySize && size > this.maxEntrySize) {
+      if (status) {
+        status.set = 'miss'
+        status.maxEntrySizeExceeded = true
+      }
+      // have to delete, in case a background fetch is there already.
+      // in non-async cases, this is a no-op
+      this.delete(k)
+      return this
+    }
+    let index = this.size === 0 ? undefined : this.keyMap.get(k)
+    if (index === undefined) {
+      // addition
+      index = this.newIndex()
+      this.keyList[index] = k
+      this.valList[index] = v
+      this.keyMap.set(k, index)
+      this.next[this.tail] = index
+      this.prev[index] = this.tail
+      this.tail = index
+      this.size++
+      this.addItemSize(index, size, status)
+      if (status) {
+        status.set = 'add'
+      }
+      noUpdateTTL = false
+    } else {
+      // update
+      this.moveToTail(index)
+      const oldVal = this.valList[index]
+      if (v !== oldVal) {
+        if (this.isBackgroundFetch(oldVal)) {
+          oldVal.__abortController.abort(new Error('replaced'))
+        } else {
+          if (!noDisposeOnSet) {
+            this.dispose(oldVal, k, 'set')
+            if (this.disposeAfter) {
+              this.disposed.push([oldVal, k, 'set'])
+            }
+          }
+        }
+        this.removeItemSize(index)
+        this.valList[index] = v
+        this.addItemSize(index, size, status)
+        if (status) {
+          status.set = 'replace'
+          const oldValue =
+            oldVal && this.isBackgroundFetch(oldVal)
+              ? oldVal.__staleWhileFetching
+              : oldVal
+          if (oldValue !== undefined) status.oldValue = oldValue
+        }
+      } else if (status) {
+        status.set = 'update'
+      }
+    }
+    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
+      this.initializeTTLTracking()
+    }
+    if (!noUpdateTTL) {
+      this.setItemTTL(index, ttl, start)
+    }
+    this.statusTTL(status, index)
+    if (this.disposeAfter) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return this
+  }
+
+  newIndex() {
+    if (this.size === 0) {
+      return this.tail
+    }
+    if (this.size === this.max && this.max !== 0) {
+      return this.evict(false)
+    }
+    if (this.free.length !== 0) {
+      return this.free.pop()
+    }
+    // initial fill, just keep writing down the list
+    return this.initialFill++
+  }
+
+  pop() {
+    if (this.size) {
+      const val = this.valList[this.head]
+      this.evict(true)
+      return val
+    }
+  }
+
+  evict(free) {
+    const head = this.head
+    const k = this.keyList[head]
+    const v = this.valList[head]
+    if (this.isBackgroundFetch(v)) {
+      v.__abortController.abort(new Error('evicted'))
+    } else {
+      this.dispose(v, k, 'evict')
+      if (this.disposeAfter) {
+        this.disposed.push([v, k, 'evict'])
+      }
+    }
+    this.removeItemSize(head)
+    // if we aren't about to use the index, then null these out
+    if (free) {
+      this.keyList[head] = null
+      this.valList[head] = null
+      this.free.push(head)
+    }
+    this.head = this.next[head]
+    this.keyMap.delete(k)
+    this.size--
+    return head
+  }
+
+  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      if (!this.isStale(index)) {
+        if (updateAgeOnHas) {
+          this.updateItemAge(index)
+        }
+        if (status) status.has = 'hit'
+        this.statusTTL(status, index)
+        return true
+      } else if (status) {
+        status.has = 'stale'
+        this.statusTTL(status, index)
+      }
+    } else if (status) {
+      status.has = 'miss'
+    }
+    return false
+  }
+
+  // like get(), but without any LRU updating or TTL expiration
+  peek(k, { allowStale = this.allowStale } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined && (allowStale || !this.isStale(index))) {
+      const v = this.valList[index]
+      // either stale and allowed, or forcing a refresh of non-stale value
+      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
+    }
+  }
+
+  backgroundFetch(k, index, options, context) {
+    const v = index === undefined ? undefined : this.valList[index]
+    if (this.isBackgroundFetch(v)) {
+      return v
+    }
+    const ac = new AC()
+    if (options.signal) {
+      options.signal.addEventListener('abort', () =>
+        ac.abort(options.signal.reason)
+      )
+    }
+    const fetchOpts = {
+      signal: ac.signal,
+      options,
+      context,
+    }
+    const cb = (v, updateCache = false) => {
+      const { aborted } = ac.signal
+      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
+      if (options.status) {
+        if (aborted && !updateCache) {
+          options.status.fetchAborted = true
+          options.status.fetchError = ac.signal.reason
+          if (ignoreAbort) options.status.fetchAbortIgnored = true
+        } else {
+          options.status.fetchResolved = true
+        }
+      }
+      if (aborted && !ignoreAbort && !updateCache) {
+        return fetchFail(ac.signal.reason)
+      }
+      // either we didn't abort, and are still here, or we did, and ignored
+      if (this.valList[index] === p) {
+        if (v === undefined) {
+          if (p.__staleWhileFetching) {
+            this.valList[index] = p.__staleWhileFetching
+          } else {
+            this.delete(k)
+          }
+        } else {
+          if (options.status) options.status.fetchUpdated = true
+          this.set(k, v, fetchOpts.options)
+        }
+      }
+      return v
+    }
+    const eb = er => {
+      if (options.status) {
+        options.status.fetchRejected = true
+        options.status.fetchError = er
+      }
+      return fetchFail(er)
+    }
+    const fetchFail = er => {
+      const { aborted } = ac.signal
+      const allowStaleAborted =
+        aborted && options.allowStaleOnFetchAbort
+      const allowStale =
+        allowStaleAborted || options.allowStaleOnFetchRejection
+      const noDelete = allowStale || options.noDeleteOnFetchRejection
+      if (this.valList[index] === p) {
+        // if we allow stale on fetch rejections, then we need to ensure that
+        // the stale value is not removed from the cache when the fetch fails.
+        const del = !noDelete || p.__staleWhileFetching === undefined
+        if (del) {
+          this.delete(k)
+        } else if (!allowStaleAborted) {
+          // still replace the *promise* with the stale value,
+          // since we are done with the promise at this point.
+          // leave it untouched if we're still waiting for an
+          // aborted background fetch that hasn't yet returned.
+          this.valList[index] = p.__staleWhileFetching
+        }
+      }
+      if (allowStale) {
+        if (options.status && p.__staleWhileFetching !== undefined) {
+          options.status.returnedStale = true
+        }
+        return p.__staleWhileFetching
+      } else if (p.__returned === p) {
+        throw er
+      }
+    }
+    const pcall = (res, rej) => {
+      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
+      // ignored, we go until we finish, regardless.
+      // defer check until we are actually aborting,
+      // so fetchMethod can override.
+      ac.signal.addEventListener('abort', () => {
+        if (
+          !options.ignoreFetchAbort ||
+          options.allowStaleOnFetchAbort
+        ) {
+          res()
+          // when it eventually resolves, update the cache.
+          if (options.allowStaleOnFetchAbort) {
+            res = v => cb(v, true)
+          }
+        }
+      })
+    }
+    if (options.status) options.status.fetchDispatched = true
+    const p = new Promise(pcall).then(cb, eb)
+    p.__abortController = ac
+    p.__staleWhileFetching = v
+    p.__returned = null
+    if (index === undefined) {
+      // internal, don't expose status.
+      this.set(k, p, { ...fetchOpts.options, status: undefined })
+      index = this.keyMap.get(k)
+    } else {
+      this.valList[index] = p
+    }
+    return p
+  }
+
+  isBackgroundFetch(p) {
+    return (
+      p &&
+      typeof p === 'object' &&
+      typeof p.then === 'function' &&
+      Object.prototype.hasOwnProperty.call(
+        p,
+        '__staleWhileFetching'
+      ) &&
+      Object.prototype.hasOwnProperty.call(p, '__returned') &&
+      (p.__returned === p || p.__returned === null)
+    )
+  }
+
+  // this takes the union of get() and set() opts, because it does both
+  async fetch(
+    k,
+    {
+      // get options
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      // set options
+      ttl = this.ttl,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      // fetch exclusive options
+      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
+      ignoreFetchAbort = this.ignoreFetchAbort,
+      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
+      fetchContext = this.fetchContext,
+      forceRefresh = false,
+      status,
+      signal,
+    } = {}
+  ) {
+    if (!this.fetchMethod) {
+      if (status) status.fetch = 'get'
+      return this.get(k, {
+        allowStale,
+        updateAgeOnGet,
+        noDeleteOnStaleGet,
+        status,
+      })
+    }
+
+    const options = {
+      allowStale,
+      updateAgeOnGet,
+      noDeleteOnStaleGet,
+      ttl,
+      noDisposeOnSet,
+      size,
+      sizeCalculation,
+      noUpdateTTL,
+      noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+      status,
+      signal,
+    }
+
+    let index = this.keyMap.get(k)
+    if (index === undefined) {
+      if (status) status.fetch = 'miss'
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      return (p.__returned = p)
+    } else {
+      // in cache, maybe already fetching
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        const stale =
+          allowStale && v.__staleWhileFetching !== undefined
+        if (status) {
+          status.fetch = 'inflight'
+          if (stale) status.returnedStale = true
+        }
+        return stale ? v.__staleWhileFetching : (v.__returned = v)
+      }
+
+      // if we force a refresh, that means do NOT serve the cached value,
+      // unless we are already in the process of refreshing the cache.
+      const isStale = this.isStale(index)
+      if (!forceRefresh && !isStale) {
+        if (status) status.fetch = 'hit'
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        this.statusTTL(status, index)
+        return v
+      }
+
+      // ok, it is stale or a forced refresh, and not already fetching.
+      // refresh the cache.
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      const hasStale = p.__staleWhileFetching !== undefined
+      const staleVal = hasStale && allowStale
+      if (status) {
+        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
+        if (staleVal && isStale) status.returnedStale = true
+      }
+      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
+    }
+  }
+
+  get(
+    k,
+    {
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      status,
+    } = {}
+  ) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      const value = this.valList[index]
+      const fetching = this.isBackgroundFetch(value)
+      this.statusTTL(status, index)
+      if (this.isStale(index)) {
+        if (status) status.get = 'stale'
+        // delete only if not an in-flight background fetch
+        if (!fetching) {
+          if (!noDeleteOnStaleGet) {
+            this.delete(k)
+          }
+          if (status) status.returnedStale = allowStale
+          return allowStale ? value : undefined
+        } else {
+          if (status) {
+            status.returnedStale =
+              allowStale && value.__staleWhileFetching !== undefined
+          }
+          return allowStale ? value.__staleWhileFetching : undefined
+        }
+      } else {
+        if (status) status.get = 'hit'
+        // if we're currently fetching it, we don't actually have it yet
+        // it's not stale, which means this isn't a staleWhileRefetching.
+        // If it's not stale, and fetching, AND has a __staleWhileFetching
+        // value, then that means the user fetched with {forceRefresh:true},
+        // so it's safe to return that value.
+        if (fetching) {
+          return value.__staleWhileFetching
+        }
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        return value
+      }
+    } else if (status) {
+      status.get = 'miss'
+    }
+  }
+
+  connect(p, n) {
+    this.prev[n] = p
+    this.next[p] = n
+  }
+
+  moveToTail(index) {
+    // if tail already, nothing to do
+    // if head, move head to next[index]
+    // else
+    //   move next[prev[index]] to next[index] (head has no prev)
+    //   move prev[next[index]] to prev[index]
+    // prev[index] = tail
+    // next[tail] = index
+    // tail = index
+    if (index !== this.tail) {
+      if (index === this.head) {
+        this.head = this.next[index]
+      } else {
+        this.connect(this.prev[index], this.next[index])
+      }
+      this.connect(this.tail, index)
+      this.tail = index
+    }
+  }
+
+  get del() {
+    deprecatedMethod('del', 'delete')
+    return this.delete
+  }
+
+  delete(k) {
+    let deleted = false
+    if (this.size !== 0) {
+      const index = this.keyMap.get(k)
+      if (index !== undefined) {
+        deleted = true
+        if (this.size === 1) {
+          this.clear()
+        } else {
+          this.removeItemSize(index)
+          const v = this.valList[index]
+          if (this.isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('deleted'))
+          } else {
+            this.dispose(v, k, 'delete')
+            if (this.disposeAfter) {
+              this.disposed.push([v, k, 'delete'])
+            }
+          }
+          this.keyMap.delete(k)
+          this.keyList[index] = null
+          this.valList[index] = null
+          if (index === this.tail) {
+            this.tail = this.prev[index]
+          } else if (index === this.head) {
+            this.head = this.next[index]
+          } else {
+            this.next[this.prev[index]] = this.next[index]
+            this.prev[this.next[index]] = this.prev[index]
+          }
+          this.size--
+          this.free.push(index)
+        }
+      }
+    }
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return deleted
+  }
+
+  clear() {
+    for (const index of this.rindexes({ allowStale: true })) {
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        v.__abortController.abort(new Error('deleted'))
+      } else {
+        const k = this.keyList[index]
+        this.dispose(v, k, 'delete')
+        if (this.disposeAfter) {
+          this.disposed.push([v, k, 'delete'])
+        }
+      }
+    }
+
+    this.keyMap.clear()
+    this.valList.fill(null)
+    this.keyList.fill(null)
+    if (this.ttls) {
+      this.ttls.fill(0)
+      this.starts.fill(0)
+    }
+    if (this.sizes) {
+      this.sizes.fill(0)
+    }
+    this.head = 0
+    this.tail = 0
+    this.initialFill = 1
+    this.free.length = 0
+    this.calculatedSize = 0
+    this.size = 0
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+  }
+
+  get reset() {
+    deprecatedMethod('reset', 'clear')
+    return this.clear
+  }
+
+  get length() {
+    deprecatedProperty('length', 'size')
+    return this.size
+  }
+
+  static get AbortController() {
+    return AC
+  }
+  static get AbortSignal() {
+    return AS
+  }
+}
+
+module.exports = LRUCache
diff --git a/node_modules/sigstore/node_modules/lru-cache/index.mjs b/node_modules/sigstore/node_modules/lru-cache/index.mjs
new file mode 100644
index 0000000000000..4a0b4813ec515
--- /dev/null
+++ b/node_modules/sigstore/node_modules/lru-cache/index.mjs
@@ -0,0 +1,1227 @@
+const perf =
+  typeof performance === 'object' &&
+  performance &&
+  typeof performance.now === 'function'
+    ? performance
+    : Date
+
+const hasAbortController = typeof AbortController === 'function'
+
+// minimal backwards-compatibility polyfill
+// this doesn't have nearly all the checks and whatnot that
+// actual AbortController/Signal has, but it's enough for
+// our purposes, and if used properly, behaves the same.
+const AC = hasAbortController
+  ? AbortController
+  : class AbortController {
+      constructor() {
+        this.signal = new AS()
+      }
+      abort(reason = new Error('This operation was aborted')) {
+        this.signal.reason = this.signal.reason || reason
+        this.signal.aborted = true
+        this.signal.dispatchEvent({
+          type: 'abort',
+          target: this.signal,
+        })
+      }
+    }
+
+const hasAbortSignal = typeof AbortSignal === 'function'
+// Some polyfills put this on the AC class, not global
+const hasACAbortSignal = typeof AC.AbortSignal === 'function'
+const AS = hasAbortSignal
+  ? AbortSignal
+  : hasACAbortSignal
+  ? AC.AbortController
+  : class AbortSignal {
+      constructor() {
+        this.reason = undefined
+        this.aborted = false
+        this._listeners = []
+      }
+      dispatchEvent(e) {
+        if (e.type === 'abort') {
+          this.aborted = true
+          this.onabort(e)
+          this._listeners.forEach(f => f(e), this)
+        }
+      }
+      onabort() {}
+      addEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners.push(fn)
+        }
+      }
+      removeEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners = this._listeners.filter(f => f !== fn)
+        }
+      }
+    }
+
+const warned = new Set()
+const deprecatedOption = (opt, instead) => {
+  const code = `LRU_CACHE_OPTION_${opt}`
+  if (shouldWarn(code)) {
+    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
+  }
+}
+const deprecatedMethod = (method, instead) => {
+  const code = `LRU_CACHE_METHOD_${method}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
+    warn(code, `${method} method`, `cache.${instead}()`, get)
+  }
+}
+const deprecatedProperty = (field, instead) => {
+  const code = `LRU_CACHE_PROPERTY_${field}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
+    warn(code, `${field} property`, `cache.${instead}`, get)
+  }
+}
+
+const emitWarning = (...a) => {
+  typeof process === 'object' &&
+  process &&
+  typeof process.emitWarning === 'function'
+    ? process.emitWarning(...a)
+    : console.error(...a)
+}
+
+const shouldWarn = code => !warned.has(code)
+
+const warn = (code, what, instead, fn) => {
+  warned.add(code)
+  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
+  emitWarning(msg, 'DeprecationWarning', code, fn)
+}
+
+const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
+
+/* istanbul ignore next - This is a little bit ridiculous, tbh.
+ * The maximum array length is 2^32-1 or thereabouts on most JS impls.
+ * And well before that point, you're caching the entire world, I mean,
+ * that's ~32GB of just integers for the next/prev links, plus whatever
+ * else to hold that many keys and values.  Just filling the memory with
+ * zeroes at init time is brutal when you get that big.
+ * But why not be complete?
+ * Maybe in the future, these limits will have expanded. */
+const getUintArray = max =>
+  !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+    ? Uint8Array
+    : max <= Math.pow(2, 16)
+    ? Uint16Array
+    : max <= Math.pow(2, 32)
+    ? Uint32Array
+    : max <= Number.MAX_SAFE_INTEGER
+    ? ZeroArray
+    : null
+
+class ZeroArray extends Array {
+  constructor(size) {
+    super(size)
+    this.fill(0)
+  }
+}
+
+class Stack {
+  constructor(max) {
+    if (max === 0) {
+      return []
+    }
+    const UintArray = getUintArray(max)
+    this.heap = new UintArray(max)
+    this.length = 0
+  }
+  push(n) {
+    this.heap[this.length++] = n
+  }
+  pop() {
+    return this.heap[--this.length]
+  }
+}
+
+class LRUCache {
+  constructor(options = {}) {
+    const {
+      max = 0,
+      ttl,
+      ttlResolution = 1,
+      ttlAutopurge,
+      updateAgeOnGet,
+      updateAgeOnHas,
+      allowStale,
+      dispose,
+      disposeAfter,
+      noDisposeOnSet,
+      noUpdateTTL,
+      maxSize = 0,
+      maxEntrySize = 0,
+      sizeCalculation,
+      fetchMethod,
+      fetchContext,
+      noDeleteOnFetchRejection,
+      noDeleteOnStaleGet,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+    } = options
+
+    // deprecated options, don't trigger a warning for getting them if
+    // the thing being passed in is another LRUCache we're copying.
+    const { length, maxAge, stale } =
+      options instanceof LRUCache ? {} : options
+
+    if (max !== 0 && !isPosInt(max)) {
+      throw new TypeError('max option must be a nonnegative integer')
+    }
+
+    const UintArray = max ? getUintArray(max) : Array
+    if (!UintArray) {
+      throw new Error('invalid max value: ' + max)
+    }
+
+    this.max = max
+    this.maxSize = maxSize
+    this.maxEntrySize = maxEntrySize || this.maxSize
+    this.sizeCalculation = sizeCalculation || length
+    if (this.sizeCalculation) {
+      if (!this.maxSize && !this.maxEntrySize) {
+        throw new TypeError(
+          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
+        )
+      }
+      if (typeof this.sizeCalculation !== 'function') {
+        throw new TypeError('sizeCalculation set to non-function')
+      }
+    }
+
+    this.fetchMethod = fetchMethod || null
+    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
+      throw new TypeError(
+        'fetchMethod must be a function if specified'
+      )
+    }
+
+    this.fetchContext = fetchContext
+    if (!this.fetchMethod && fetchContext !== undefined) {
+      throw new TypeError(
+        'cannot set fetchContext without fetchMethod'
+      )
+    }
+
+    this.keyMap = new Map()
+    this.keyList = new Array(max).fill(null)
+    this.valList = new Array(max).fill(null)
+    this.next = new UintArray(max)
+    this.prev = new UintArray(max)
+    this.head = 0
+    this.tail = 0
+    this.free = new Stack(max)
+    this.initialFill = 1
+    this.size = 0
+
+    if (typeof dispose === 'function') {
+      this.dispose = dispose
+    }
+    if (typeof disposeAfter === 'function') {
+      this.disposeAfter = disposeAfter
+      this.disposed = []
+    } else {
+      this.disposeAfter = null
+      this.disposed = null
+    }
+    this.noDisposeOnSet = !!noDisposeOnSet
+    this.noUpdateTTL = !!noUpdateTTL
+    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
+    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
+    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
+    this.ignoreFetchAbort = !!ignoreFetchAbort
+
+    // NB: maxEntrySize is set to maxSize if it's set
+    if (this.maxEntrySize !== 0) {
+      if (this.maxSize !== 0) {
+        if (!isPosInt(this.maxSize)) {
+          throw new TypeError(
+            'maxSize must be a positive integer if specified'
+          )
+        }
+      }
+      if (!isPosInt(this.maxEntrySize)) {
+        throw new TypeError(
+          'maxEntrySize must be a positive integer if specified'
+        )
+      }
+      this.initializeSizeTracking()
+    }
+
+    this.allowStale = !!allowStale || !!stale
+    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
+    this.updateAgeOnGet = !!updateAgeOnGet
+    this.updateAgeOnHas = !!updateAgeOnHas
+    this.ttlResolution =
+      isPosInt(ttlResolution) || ttlResolution === 0
+        ? ttlResolution
+        : 1
+    this.ttlAutopurge = !!ttlAutopurge
+    this.ttl = ttl || maxAge || 0
+    if (this.ttl) {
+      if (!isPosInt(this.ttl)) {
+        throw new TypeError(
+          'ttl must be a positive integer if specified'
+        )
+      }
+      this.initializeTTLTracking()
+    }
+
+    // do not allow completely unbounded caches
+    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
+      throw new TypeError(
+        'At least one of max, maxSize, or ttl is required'
+      )
+    }
+    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
+      const code = 'LRU_CACHE_UNBOUNDED'
+      if (shouldWarn(code)) {
+        warned.add(code)
+        const msg =
+          'TTL caching without ttlAutopurge, max, or maxSize can ' +
+          'result in unbounded memory consumption.'
+        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
+      }
+    }
+
+    if (stale) {
+      deprecatedOption('stale', 'allowStale')
+    }
+    if (maxAge) {
+      deprecatedOption('maxAge', 'ttl')
+    }
+    if (length) {
+      deprecatedOption('length', 'sizeCalculation')
+    }
+  }
+
+  getRemainingTTL(key) {
+    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
+  }
+
+  initializeTTLTracking() {
+    this.ttls = new ZeroArray(this.max)
+    this.starts = new ZeroArray(this.max)
+
+    this.setItemTTL = (index, ttl, start = perf.now()) => {
+      this.starts[index] = ttl !== 0 ? start : 0
+      this.ttls[index] = ttl
+      if (ttl !== 0 && this.ttlAutopurge) {
+        const t = setTimeout(() => {
+          if (this.isStale(index)) {
+            this.delete(this.keyList[index])
+          }
+        }, ttl + 1)
+        /* istanbul ignore else - unref() not supported on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+    }
+
+    this.updateItemAge = index => {
+      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
+    }
+
+    this.statusTTL = (status, index) => {
+      if (status) {
+        status.ttl = this.ttls[index]
+        status.start = this.starts[index]
+        status.now = cachedNow || getNow()
+        status.remainingTTL = status.now + status.ttl - status.start
+      }
+    }
+
+    // debounce calls to perf.now() to 1s so we're not hitting
+    // that costly call repeatedly.
+    let cachedNow = 0
+    const getNow = () => {
+      const n = perf.now()
+      if (this.ttlResolution > 0) {
+        cachedNow = n
+        const t = setTimeout(
+          () => (cachedNow = 0),
+          this.ttlResolution
+        )
+        /* istanbul ignore else - not available on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+      return n
+    }
+
+    this.getRemainingTTL = key => {
+      const index = this.keyMap.get(key)
+      if (index === undefined) {
+        return 0
+      }
+      return this.ttls[index] === 0 || this.starts[index] === 0
+        ? Infinity
+        : this.starts[index] +
+            this.ttls[index] -
+            (cachedNow || getNow())
+    }
+
+    this.isStale = index => {
+      return (
+        this.ttls[index] !== 0 &&
+        this.starts[index] !== 0 &&
+        (cachedNow || getNow()) - this.starts[index] >
+          this.ttls[index]
+      )
+    }
+  }
+  updateItemAge(_index) {}
+  statusTTL(_status, _index) {}
+  setItemTTL(_index, _ttl, _start) {}
+  isStale(_index) {
+    return false
+  }
+
+  initializeSizeTracking() {
+    this.calculatedSize = 0
+    this.sizes = new ZeroArray(this.max)
+    this.removeItemSize = index => {
+      this.calculatedSize -= this.sizes[index]
+      this.sizes[index] = 0
+    }
+    this.requireSize = (k, v, size, sizeCalculation) => {
+      // provisionally accept background fetches.
+      // actual value size will be checked when they return.
+      if (this.isBackgroundFetch(v)) {
+        return 0
+      }
+      if (!isPosInt(size)) {
+        if (sizeCalculation) {
+          if (typeof sizeCalculation !== 'function') {
+            throw new TypeError('sizeCalculation must be a function')
+          }
+          size = sizeCalculation(v, k)
+          if (!isPosInt(size)) {
+            throw new TypeError(
+              'sizeCalculation return invalid (expect positive integer)'
+            )
+          }
+        } else {
+          throw new TypeError(
+            'invalid size value (must be positive integer). ' +
+              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
+              'must be set.'
+          )
+        }
+      }
+      return size
+    }
+    this.addItemSize = (index, size, status) => {
+      this.sizes[index] = size
+      if (this.maxSize) {
+        const maxSize = this.maxSize - this.sizes[index]
+        while (this.calculatedSize > maxSize) {
+          this.evict(true)
+        }
+      }
+      this.calculatedSize += this.sizes[index]
+      if (status) {
+        status.entrySize = size
+        status.totalCalculatedSize = this.calculatedSize
+      }
+    }
+  }
+  removeItemSize(_index) {}
+  addItemSize(_index, _size) {}
+  requireSize(_k, _v, size, sizeCalculation) {
+    if (size || sizeCalculation) {
+      throw new TypeError(
+        'cannot set size without setting maxSize or maxEntrySize on cache'
+      )
+    }
+  }
+
+  *indexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.tail; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.head) {
+          break
+        } else {
+          i = this.prev[i]
+        }
+      }
+    }
+  }
+
+  *rindexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.head; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.tail) {
+          break
+        } else {
+          i = this.next[i]
+        }
+      }
+    }
+  }
+
+  isValidIndex(index) {
+    return (
+      index !== undefined &&
+      this.keyMap.get(this.keyList[index]) === index
+    )
+  }
+
+  *entries() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+  *rentries() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+
+  *keys() {
+    for (const i of this.indexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+  *rkeys() {
+    for (const i of this.rindexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+
+  *values() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+  *rvalues() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+
+  [Symbol.iterator]() {
+    return this.entries()
+  }
+
+  find(fn, getOptions) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      if (fn(value, this.keyList[i], this)) {
+        return this.get(this.keyList[i], getOptions)
+      }
+    }
+  }
+
+  forEach(fn, thisp = this) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  rforEach(fn, thisp = this) {
+    for (const i of this.rindexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  get prune() {
+    deprecatedMethod('prune', 'purgeStale')
+    return this.purgeStale
+  }
+
+  purgeStale() {
+    let deleted = false
+    for (const i of this.rindexes({ allowStale: true })) {
+      if (this.isStale(i)) {
+        this.delete(this.keyList[i])
+        deleted = true
+      }
+    }
+    return deleted
+  }
+
+  dump() {
+    const arr = []
+    for (const i of this.indexes({ allowStale: true })) {
+      const key = this.keyList[i]
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      const entry = { value }
+      if (this.ttls) {
+        entry.ttl = this.ttls[i]
+        // always dump the start relative to a portable timestamp
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = perf.now() - this.starts[i]
+        entry.start = Math.floor(Date.now() - age)
+      }
+      if (this.sizes) {
+        entry.size = this.sizes[i]
+      }
+      arr.unshift([key, entry])
+    }
+    return arr
+  }
+
+  load(arr) {
+    this.clear()
+    for (const [key, entry] of arr) {
+      if (entry.start) {
+        // entry.start is a portable timestamp, but we may be using
+        // node's performance.now(), so calculate the offset.
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = Date.now() - entry.start
+        entry.start = perf.now() - age
+      }
+      this.set(key, entry.value, entry)
+    }
+  }
+
+  dispose(_v, _k, _reason) {}
+
+  set(
+    k,
+    v,
+    {
+      ttl = this.ttl,
+      start,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      status,
+    } = {}
+  ) {
+    size = this.requireSize(k, v, size, sizeCalculation)
+    // if the item doesn't fit, don't do anything
+    // NB: maxEntrySize set to maxSize by default
+    if (this.maxEntrySize && size > this.maxEntrySize) {
+      if (status) {
+        status.set = 'miss'
+        status.maxEntrySizeExceeded = true
+      }
+      // have to delete, in case a background fetch is there already.
+      // in non-async cases, this is a no-op
+      this.delete(k)
+      return this
+    }
+    let index = this.size === 0 ? undefined : this.keyMap.get(k)
+    if (index === undefined) {
+      // addition
+      index = this.newIndex()
+      this.keyList[index] = k
+      this.valList[index] = v
+      this.keyMap.set(k, index)
+      this.next[this.tail] = index
+      this.prev[index] = this.tail
+      this.tail = index
+      this.size++
+      this.addItemSize(index, size, status)
+      if (status) {
+        status.set = 'add'
+      }
+      noUpdateTTL = false
+    } else {
+      // update
+      this.moveToTail(index)
+      const oldVal = this.valList[index]
+      if (v !== oldVal) {
+        if (this.isBackgroundFetch(oldVal)) {
+          oldVal.__abortController.abort(new Error('replaced'))
+        } else {
+          if (!noDisposeOnSet) {
+            this.dispose(oldVal, k, 'set')
+            if (this.disposeAfter) {
+              this.disposed.push([oldVal, k, 'set'])
+            }
+          }
+        }
+        this.removeItemSize(index)
+        this.valList[index] = v
+        this.addItemSize(index, size, status)
+        if (status) {
+          status.set = 'replace'
+          const oldValue =
+            oldVal && this.isBackgroundFetch(oldVal)
+              ? oldVal.__staleWhileFetching
+              : oldVal
+          if (oldValue !== undefined) status.oldValue = oldValue
+        }
+      } else if (status) {
+        status.set = 'update'
+      }
+    }
+    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
+      this.initializeTTLTracking()
+    }
+    if (!noUpdateTTL) {
+      this.setItemTTL(index, ttl, start)
+    }
+    this.statusTTL(status, index)
+    if (this.disposeAfter) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return this
+  }
+
+  newIndex() {
+    if (this.size === 0) {
+      return this.tail
+    }
+    if (this.size === this.max && this.max !== 0) {
+      return this.evict(false)
+    }
+    if (this.free.length !== 0) {
+      return this.free.pop()
+    }
+    // initial fill, just keep writing down the list
+    return this.initialFill++
+  }
+
+  pop() {
+    if (this.size) {
+      const val = this.valList[this.head]
+      this.evict(true)
+      return val
+    }
+  }
+
+  evict(free) {
+    const head = this.head
+    const k = this.keyList[head]
+    const v = this.valList[head]
+    if (this.isBackgroundFetch(v)) {
+      v.__abortController.abort(new Error('evicted'))
+    } else {
+      this.dispose(v, k, 'evict')
+      if (this.disposeAfter) {
+        this.disposed.push([v, k, 'evict'])
+      }
+    }
+    this.removeItemSize(head)
+    // if we aren't about to use the index, then null these out
+    if (free) {
+      this.keyList[head] = null
+      this.valList[head] = null
+      this.free.push(head)
+    }
+    this.head = this.next[head]
+    this.keyMap.delete(k)
+    this.size--
+    return head
+  }
+
+  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      if (!this.isStale(index)) {
+        if (updateAgeOnHas) {
+          this.updateItemAge(index)
+        }
+        if (status) status.has = 'hit'
+        this.statusTTL(status, index)
+        return true
+      } else if (status) {
+        status.has = 'stale'
+        this.statusTTL(status, index)
+      }
+    } else if (status) {
+      status.has = 'miss'
+    }
+    return false
+  }
+
+  // like get(), but without any LRU updating or TTL expiration
+  peek(k, { allowStale = this.allowStale } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined && (allowStale || !this.isStale(index))) {
+      const v = this.valList[index]
+      // either stale and allowed, or forcing a refresh of non-stale value
+      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
+    }
+  }
+
+  backgroundFetch(k, index, options, context) {
+    const v = index === undefined ? undefined : this.valList[index]
+    if (this.isBackgroundFetch(v)) {
+      return v
+    }
+    const ac = new AC()
+    if (options.signal) {
+      options.signal.addEventListener('abort', () =>
+        ac.abort(options.signal.reason)
+      )
+    }
+    const fetchOpts = {
+      signal: ac.signal,
+      options,
+      context,
+    }
+    const cb = (v, updateCache = false) => {
+      const { aborted } = ac.signal
+      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
+      if (options.status) {
+        if (aborted && !updateCache) {
+          options.status.fetchAborted = true
+          options.status.fetchError = ac.signal.reason
+          if (ignoreAbort) options.status.fetchAbortIgnored = true
+        } else {
+          options.status.fetchResolved = true
+        }
+      }
+      if (aborted && !ignoreAbort && !updateCache) {
+        return fetchFail(ac.signal.reason)
+      }
+      // either we didn't abort, and are still here, or we did, and ignored
+      if (this.valList[index] === p) {
+        if (v === undefined) {
+          if (p.__staleWhileFetching) {
+            this.valList[index] = p.__staleWhileFetching
+          } else {
+            this.delete(k)
+          }
+        } else {
+          if (options.status) options.status.fetchUpdated = true
+          this.set(k, v, fetchOpts.options)
+        }
+      }
+      return v
+    }
+    const eb = er => {
+      if (options.status) {
+        options.status.fetchRejected = true
+        options.status.fetchError = er
+      }
+      return fetchFail(er)
+    }
+    const fetchFail = er => {
+      const { aborted } = ac.signal
+      const allowStaleAborted =
+        aborted && options.allowStaleOnFetchAbort
+      const allowStale =
+        allowStaleAborted || options.allowStaleOnFetchRejection
+      const noDelete = allowStale || options.noDeleteOnFetchRejection
+      if (this.valList[index] === p) {
+        // if we allow stale on fetch rejections, then we need to ensure that
+        // the stale value is not removed from the cache when the fetch fails.
+        const del = !noDelete || p.__staleWhileFetching === undefined
+        if (del) {
+          this.delete(k)
+        } else if (!allowStaleAborted) {
+          // still replace the *promise* with the stale value,
+          // since we are done with the promise at this point.
+          // leave it untouched if we're still waiting for an
+          // aborted background fetch that hasn't yet returned.
+          this.valList[index] = p.__staleWhileFetching
+        }
+      }
+      if (allowStale) {
+        if (options.status && p.__staleWhileFetching !== undefined) {
+          options.status.returnedStale = true
+        }
+        return p.__staleWhileFetching
+      } else if (p.__returned === p) {
+        throw er
+      }
+    }
+    const pcall = (res, rej) => {
+      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
+      // ignored, we go until we finish, regardless.
+      // defer check until we are actually aborting,
+      // so fetchMethod can override.
+      ac.signal.addEventListener('abort', () => {
+        if (
+          !options.ignoreFetchAbort ||
+          options.allowStaleOnFetchAbort
+        ) {
+          res()
+          // when it eventually resolves, update the cache.
+          if (options.allowStaleOnFetchAbort) {
+            res = v => cb(v, true)
+          }
+        }
+      })
+    }
+    if (options.status) options.status.fetchDispatched = true
+    const p = new Promise(pcall).then(cb, eb)
+    p.__abortController = ac
+    p.__staleWhileFetching = v
+    p.__returned = null
+    if (index === undefined) {
+      // internal, don't expose status.
+      this.set(k, p, { ...fetchOpts.options, status: undefined })
+      index = this.keyMap.get(k)
+    } else {
+      this.valList[index] = p
+    }
+    return p
+  }
+
+  isBackgroundFetch(p) {
+    return (
+      p &&
+      typeof p === 'object' &&
+      typeof p.then === 'function' &&
+      Object.prototype.hasOwnProperty.call(
+        p,
+        '__staleWhileFetching'
+      ) &&
+      Object.prototype.hasOwnProperty.call(p, '__returned') &&
+      (p.__returned === p || p.__returned === null)
+    )
+  }
+
+  // this takes the union of get() and set() opts, because it does both
+  async fetch(
+    k,
+    {
+      // get options
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      // set options
+      ttl = this.ttl,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      // fetch exclusive options
+      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
+      ignoreFetchAbort = this.ignoreFetchAbort,
+      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
+      fetchContext = this.fetchContext,
+      forceRefresh = false,
+      status,
+      signal,
+    } = {}
+  ) {
+    if (!this.fetchMethod) {
+      if (status) status.fetch = 'get'
+      return this.get(k, {
+        allowStale,
+        updateAgeOnGet,
+        noDeleteOnStaleGet,
+        status,
+      })
+    }
+
+    const options = {
+      allowStale,
+      updateAgeOnGet,
+      noDeleteOnStaleGet,
+      ttl,
+      noDisposeOnSet,
+      size,
+      sizeCalculation,
+      noUpdateTTL,
+      noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+      status,
+      signal,
+    }
+
+    let index = this.keyMap.get(k)
+    if (index === undefined) {
+      if (status) status.fetch = 'miss'
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      return (p.__returned = p)
+    } else {
+      // in cache, maybe already fetching
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        const stale =
+          allowStale && v.__staleWhileFetching !== undefined
+        if (status) {
+          status.fetch = 'inflight'
+          if (stale) status.returnedStale = true
+        }
+        return stale ? v.__staleWhileFetching : (v.__returned = v)
+      }
+
+      // if we force a refresh, that means do NOT serve the cached value,
+      // unless we are already in the process of refreshing the cache.
+      const isStale = this.isStale(index)
+      if (!forceRefresh && !isStale) {
+        if (status) status.fetch = 'hit'
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        this.statusTTL(status, index)
+        return v
+      }
+
+      // ok, it is stale or a forced refresh, and not already fetching.
+      // refresh the cache.
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      const hasStale = p.__staleWhileFetching !== undefined
+      const staleVal = hasStale && allowStale
+      if (status) {
+        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
+        if (staleVal && isStale) status.returnedStale = true
+      }
+      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
+    }
+  }
+
+  get(
+    k,
+    {
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      status,
+    } = {}
+  ) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      const value = this.valList[index]
+      const fetching = this.isBackgroundFetch(value)
+      this.statusTTL(status, index)
+      if (this.isStale(index)) {
+        if (status) status.get = 'stale'
+        // delete only if not an in-flight background fetch
+        if (!fetching) {
+          if (!noDeleteOnStaleGet) {
+            this.delete(k)
+          }
+          if (status) status.returnedStale = allowStale
+          return allowStale ? value : undefined
+        } else {
+          if (status) {
+            status.returnedStale =
+              allowStale && value.__staleWhileFetching !== undefined
+          }
+          return allowStale ? value.__staleWhileFetching : undefined
+        }
+      } else {
+        if (status) status.get = 'hit'
+        // if we're currently fetching it, we don't actually have it yet
+        // it's not stale, which means this isn't a staleWhileRefetching.
+        // If it's not stale, and fetching, AND has a __staleWhileFetching
+        // value, then that means the user fetched with {forceRefresh:true},
+        // so it's safe to return that value.
+        if (fetching) {
+          return value.__staleWhileFetching
+        }
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        return value
+      }
+    } else if (status) {
+      status.get = 'miss'
+    }
+  }
+
+  connect(p, n) {
+    this.prev[n] = p
+    this.next[p] = n
+  }
+
+  moveToTail(index) {
+    // if tail already, nothing to do
+    // if head, move head to next[index]
+    // else
+    //   move next[prev[index]] to next[index] (head has no prev)
+    //   move prev[next[index]] to prev[index]
+    // prev[index] = tail
+    // next[tail] = index
+    // tail = index
+    if (index !== this.tail) {
+      if (index === this.head) {
+        this.head = this.next[index]
+      } else {
+        this.connect(this.prev[index], this.next[index])
+      }
+      this.connect(this.tail, index)
+      this.tail = index
+    }
+  }
+
+  get del() {
+    deprecatedMethod('del', 'delete')
+    return this.delete
+  }
+
+  delete(k) {
+    let deleted = false
+    if (this.size !== 0) {
+      const index = this.keyMap.get(k)
+      if (index !== undefined) {
+        deleted = true
+        if (this.size === 1) {
+          this.clear()
+        } else {
+          this.removeItemSize(index)
+          const v = this.valList[index]
+          if (this.isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('deleted'))
+          } else {
+            this.dispose(v, k, 'delete')
+            if (this.disposeAfter) {
+              this.disposed.push([v, k, 'delete'])
+            }
+          }
+          this.keyMap.delete(k)
+          this.keyList[index] = null
+          this.valList[index] = null
+          if (index === this.tail) {
+            this.tail = this.prev[index]
+          } else if (index === this.head) {
+            this.head = this.next[index]
+          } else {
+            this.next[this.prev[index]] = this.next[index]
+            this.prev[this.next[index]] = this.prev[index]
+          }
+          this.size--
+          this.free.push(index)
+        }
+      }
+    }
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return deleted
+  }
+
+  clear() {
+    for (const index of this.rindexes({ allowStale: true })) {
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        v.__abortController.abort(new Error('deleted'))
+      } else {
+        const k = this.keyList[index]
+        this.dispose(v, k, 'delete')
+        if (this.disposeAfter) {
+          this.disposed.push([v, k, 'delete'])
+        }
+      }
+    }
+
+    this.keyMap.clear()
+    this.valList.fill(null)
+    this.keyList.fill(null)
+    if (this.ttls) {
+      this.ttls.fill(0)
+      this.starts.fill(0)
+    }
+    if (this.sizes) {
+      this.sizes.fill(0)
+    }
+    this.head = 0
+    this.tail = 0
+    this.initialFill = 1
+    this.free.length = 0
+    this.calculatedSize = 0
+    this.size = 0
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+  }
+
+  get reset() {
+    deprecatedMethod('reset', 'clear')
+    return this.clear
+  }
+
+  get length() {
+    deprecatedProperty('length', 'size')
+    return this.size
+  }
+
+  static get AbortController() {
+    return AC
+  }
+  static get AbortSignal() {
+    return AS
+  }
+}
+
+export default LRUCache
diff --git a/node_modules/sigstore/node_modules/lru-cache/package.json b/node_modules/sigstore/node_modules/lru-cache/package.json
new file mode 100644
index 0000000000000..9684991727e7a
--- /dev/null
+++ b/node_modules/sigstore/node_modules/lru-cache/package.json
@@ -0,0 +1,96 @@
+{
+  "name": "lru-cache",
+  "description": "A cache object that deletes the least-recently-used items.",
+  "version": "7.18.3",
+  "author": "Isaac Z. Schlueter ",
+  "keywords": [
+    "mru",
+    "lru",
+    "cache"
+  ],
+  "sideEffects": false,
+  "scripts": {
+    "build": "npm run prepare",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "prepare": "node ./scripts/transpile-to-esm.js",
+    "size": "size-limit",
+    "test": "tap",
+    "snap": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write .",
+    "typedoc": "typedoc ./index.d.ts"
+  },
+  "type": "commonjs",
+  "main": "./index.js",
+  "module": "./index.mjs",
+  "types": "./index.d.ts",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./index.d.ts",
+        "default": "./index.mjs"
+      },
+      "require": {
+        "types": "./index.d.ts",
+        "default": "./index.js"
+      }
+    },
+    "./package.json": "./package.json"
+  },
+  "repository": "git://github.com/isaacs/node-lru-cache.git",
+  "devDependencies": {
+    "@size-limit/preset-small-lib": "^7.0.8",
+    "@types/node": "^17.0.31",
+    "@types/tap": "^15.0.6",
+    "benchmark": "^2.1.4",
+    "c8": "^7.11.2",
+    "clock-mock": "^1.0.6",
+    "eslint-config-prettier": "^8.5.0",
+    "prettier": "^2.6.2",
+    "size-limit": "^7.0.8",
+    "tap": "^16.3.4",
+    "ts-node": "^10.7.0",
+    "tslib": "^2.4.0",
+    "typedoc": "^0.23.24",
+    "typescript": "^4.6.4"
+  },
+  "license": "ISC",
+  "files": [
+    "index.js",
+    "index.mjs",
+    "index.d.ts"
+  ],
+  "engines": {
+    "node": ">=12"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 70,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "tap": {
+    "nyc-arg": [
+      "--include=index.js"
+    ],
+    "node-arg": [
+      "--expose-gc",
+      "--require",
+      "ts-node/register"
+    ],
+    "ts": false
+  },
+  "size-limit": [
+    {
+      "path": "./index.js"
+    }
+  ]
+}
diff --git a/node_modules/tuf-js/node_modules/lru-cache/LICENSE b/node_modules/tuf-js/node_modules/lru-cache/LICENSE
new file mode 100644
index 0000000000000..f785757cd63f8
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/lru-cache/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tuf-js/node_modules/lru-cache/index.js b/node_modules/tuf-js/node_modules/lru-cache/index.js
new file mode 100644
index 0000000000000..48e99fe5e5a70
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/lru-cache/index.js
@@ -0,0 +1,1227 @@
+const perf =
+  typeof performance === 'object' &&
+  performance &&
+  typeof performance.now === 'function'
+    ? performance
+    : Date
+
+const hasAbortController = typeof AbortController === 'function'
+
+// minimal backwards-compatibility polyfill
+// this doesn't have nearly all the checks and whatnot that
+// actual AbortController/Signal has, but it's enough for
+// our purposes, and if used properly, behaves the same.
+const AC = hasAbortController
+  ? AbortController
+  : class AbortController {
+      constructor() {
+        this.signal = new AS()
+      }
+      abort(reason = new Error('This operation was aborted')) {
+        this.signal.reason = this.signal.reason || reason
+        this.signal.aborted = true
+        this.signal.dispatchEvent({
+          type: 'abort',
+          target: this.signal,
+        })
+      }
+    }
+
+const hasAbortSignal = typeof AbortSignal === 'function'
+// Some polyfills put this on the AC class, not global
+const hasACAbortSignal = typeof AC.AbortSignal === 'function'
+const AS = hasAbortSignal
+  ? AbortSignal
+  : hasACAbortSignal
+  ? AC.AbortController
+  : class AbortSignal {
+      constructor() {
+        this.reason = undefined
+        this.aborted = false
+        this._listeners = []
+      }
+      dispatchEvent(e) {
+        if (e.type === 'abort') {
+          this.aborted = true
+          this.onabort(e)
+          this._listeners.forEach(f => f(e), this)
+        }
+      }
+      onabort() {}
+      addEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners.push(fn)
+        }
+      }
+      removeEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners = this._listeners.filter(f => f !== fn)
+        }
+      }
+    }
+
+const warned = new Set()
+const deprecatedOption = (opt, instead) => {
+  const code = `LRU_CACHE_OPTION_${opt}`
+  if (shouldWarn(code)) {
+    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
+  }
+}
+const deprecatedMethod = (method, instead) => {
+  const code = `LRU_CACHE_METHOD_${method}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
+    warn(code, `${method} method`, `cache.${instead}()`, get)
+  }
+}
+const deprecatedProperty = (field, instead) => {
+  const code = `LRU_CACHE_PROPERTY_${field}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
+    warn(code, `${field} property`, `cache.${instead}`, get)
+  }
+}
+
+const emitWarning = (...a) => {
+  typeof process === 'object' &&
+  process &&
+  typeof process.emitWarning === 'function'
+    ? process.emitWarning(...a)
+    : console.error(...a)
+}
+
+const shouldWarn = code => !warned.has(code)
+
+const warn = (code, what, instead, fn) => {
+  warned.add(code)
+  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
+  emitWarning(msg, 'DeprecationWarning', code, fn)
+}
+
+const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
+
+/* istanbul ignore next - This is a little bit ridiculous, tbh.
+ * The maximum array length is 2^32-1 or thereabouts on most JS impls.
+ * And well before that point, you're caching the entire world, I mean,
+ * that's ~32GB of just integers for the next/prev links, plus whatever
+ * else to hold that many keys and values.  Just filling the memory with
+ * zeroes at init time is brutal when you get that big.
+ * But why not be complete?
+ * Maybe in the future, these limits will have expanded. */
+const getUintArray = max =>
+  !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+    ? Uint8Array
+    : max <= Math.pow(2, 16)
+    ? Uint16Array
+    : max <= Math.pow(2, 32)
+    ? Uint32Array
+    : max <= Number.MAX_SAFE_INTEGER
+    ? ZeroArray
+    : null
+
+class ZeroArray extends Array {
+  constructor(size) {
+    super(size)
+    this.fill(0)
+  }
+}
+
+class Stack {
+  constructor(max) {
+    if (max === 0) {
+      return []
+    }
+    const UintArray = getUintArray(max)
+    this.heap = new UintArray(max)
+    this.length = 0
+  }
+  push(n) {
+    this.heap[this.length++] = n
+  }
+  pop() {
+    return this.heap[--this.length]
+  }
+}
+
+class LRUCache {
+  constructor(options = {}) {
+    const {
+      max = 0,
+      ttl,
+      ttlResolution = 1,
+      ttlAutopurge,
+      updateAgeOnGet,
+      updateAgeOnHas,
+      allowStale,
+      dispose,
+      disposeAfter,
+      noDisposeOnSet,
+      noUpdateTTL,
+      maxSize = 0,
+      maxEntrySize = 0,
+      sizeCalculation,
+      fetchMethod,
+      fetchContext,
+      noDeleteOnFetchRejection,
+      noDeleteOnStaleGet,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+    } = options
+
+    // deprecated options, don't trigger a warning for getting them if
+    // the thing being passed in is another LRUCache we're copying.
+    const { length, maxAge, stale } =
+      options instanceof LRUCache ? {} : options
+
+    if (max !== 0 && !isPosInt(max)) {
+      throw new TypeError('max option must be a nonnegative integer')
+    }
+
+    const UintArray = max ? getUintArray(max) : Array
+    if (!UintArray) {
+      throw new Error('invalid max value: ' + max)
+    }
+
+    this.max = max
+    this.maxSize = maxSize
+    this.maxEntrySize = maxEntrySize || this.maxSize
+    this.sizeCalculation = sizeCalculation || length
+    if (this.sizeCalculation) {
+      if (!this.maxSize && !this.maxEntrySize) {
+        throw new TypeError(
+          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
+        )
+      }
+      if (typeof this.sizeCalculation !== 'function') {
+        throw new TypeError('sizeCalculation set to non-function')
+      }
+    }
+
+    this.fetchMethod = fetchMethod || null
+    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
+      throw new TypeError(
+        'fetchMethod must be a function if specified'
+      )
+    }
+
+    this.fetchContext = fetchContext
+    if (!this.fetchMethod && fetchContext !== undefined) {
+      throw new TypeError(
+        'cannot set fetchContext without fetchMethod'
+      )
+    }
+
+    this.keyMap = new Map()
+    this.keyList = new Array(max).fill(null)
+    this.valList = new Array(max).fill(null)
+    this.next = new UintArray(max)
+    this.prev = new UintArray(max)
+    this.head = 0
+    this.tail = 0
+    this.free = new Stack(max)
+    this.initialFill = 1
+    this.size = 0
+
+    if (typeof dispose === 'function') {
+      this.dispose = dispose
+    }
+    if (typeof disposeAfter === 'function') {
+      this.disposeAfter = disposeAfter
+      this.disposed = []
+    } else {
+      this.disposeAfter = null
+      this.disposed = null
+    }
+    this.noDisposeOnSet = !!noDisposeOnSet
+    this.noUpdateTTL = !!noUpdateTTL
+    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
+    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
+    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
+    this.ignoreFetchAbort = !!ignoreFetchAbort
+
+    // NB: maxEntrySize is set to maxSize if it's set
+    if (this.maxEntrySize !== 0) {
+      if (this.maxSize !== 0) {
+        if (!isPosInt(this.maxSize)) {
+          throw new TypeError(
+            'maxSize must be a positive integer if specified'
+          )
+        }
+      }
+      if (!isPosInt(this.maxEntrySize)) {
+        throw new TypeError(
+          'maxEntrySize must be a positive integer if specified'
+        )
+      }
+      this.initializeSizeTracking()
+    }
+
+    this.allowStale = !!allowStale || !!stale
+    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
+    this.updateAgeOnGet = !!updateAgeOnGet
+    this.updateAgeOnHas = !!updateAgeOnHas
+    this.ttlResolution =
+      isPosInt(ttlResolution) || ttlResolution === 0
+        ? ttlResolution
+        : 1
+    this.ttlAutopurge = !!ttlAutopurge
+    this.ttl = ttl || maxAge || 0
+    if (this.ttl) {
+      if (!isPosInt(this.ttl)) {
+        throw new TypeError(
+          'ttl must be a positive integer if specified'
+        )
+      }
+      this.initializeTTLTracking()
+    }
+
+    // do not allow completely unbounded caches
+    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
+      throw new TypeError(
+        'At least one of max, maxSize, or ttl is required'
+      )
+    }
+    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
+      const code = 'LRU_CACHE_UNBOUNDED'
+      if (shouldWarn(code)) {
+        warned.add(code)
+        const msg =
+          'TTL caching without ttlAutopurge, max, or maxSize can ' +
+          'result in unbounded memory consumption.'
+        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
+      }
+    }
+
+    if (stale) {
+      deprecatedOption('stale', 'allowStale')
+    }
+    if (maxAge) {
+      deprecatedOption('maxAge', 'ttl')
+    }
+    if (length) {
+      deprecatedOption('length', 'sizeCalculation')
+    }
+  }
+
+  getRemainingTTL(key) {
+    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
+  }
+
+  initializeTTLTracking() {
+    this.ttls = new ZeroArray(this.max)
+    this.starts = new ZeroArray(this.max)
+
+    this.setItemTTL = (index, ttl, start = perf.now()) => {
+      this.starts[index] = ttl !== 0 ? start : 0
+      this.ttls[index] = ttl
+      if (ttl !== 0 && this.ttlAutopurge) {
+        const t = setTimeout(() => {
+          if (this.isStale(index)) {
+            this.delete(this.keyList[index])
+          }
+        }, ttl + 1)
+        /* istanbul ignore else - unref() not supported on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+    }
+
+    this.updateItemAge = index => {
+      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
+    }
+
+    this.statusTTL = (status, index) => {
+      if (status) {
+        status.ttl = this.ttls[index]
+        status.start = this.starts[index]
+        status.now = cachedNow || getNow()
+        status.remainingTTL = status.now + status.ttl - status.start
+      }
+    }
+
+    // debounce calls to perf.now() to 1s so we're not hitting
+    // that costly call repeatedly.
+    let cachedNow = 0
+    const getNow = () => {
+      const n = perf.now()
+      if (this.ttlResolution > 0) {
+        cachedNow = n
+        const t = setTimeout(
+          () => (cachedNow = 0),
+          this.ttlResolution
+        )
+        /* istanbul ignore else - not available on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+      return n
+    }
+
+    this.getRemainingTTL = key => {
+      const index = this.keyMap.get(key)
+      if (index === undefined) {
+        return 0
+      }
+      return this.ttls[index] === 0 || this.starts[index] === 0
+        ? Infinity
+        : this.starts[index] +
+            this.ttls[index] -
+            (cachedNow || getNow())
+    }
+
+    this.isStale = index => {
+      return (
+        this.ttls[index] !== 0 &&
+        this.starts[index] !== 0 &&
+        (cachedNow || getNow()) - this.starts[index] >
+          this.ttls[index]
+      )
+    }
+  }
+  updateItemAge(_index) {}
+  statusTTL(_status, _index) {}
+  setItemTTL(_index, _ttl, _start) {}
+  isStale(_index) {
+    return false
+  }
+
+  initializeSizeTracking() {
+    this.calculatedSize = 0
+    this.sizes = new ZeroArray(this.max)
+    this.removeItemSize = index => {
+      this.calculatedSize -= this.sizes[index]
+      this.sizes[index] = 0
+    }
+    this.requireSize = (k, v, size, sizeCalculation) => {
+      // provisionally accept background fetches.
+      // actual value size will be checked when they return.
+      if (this.isBackgroundFetch(v)) {
+        return 0
+      }
+      if (!isPosInt(size)) {
+        if (sizeCalculation) {
+          if (typeof sizeCalculation !== 'function') {
+            throw new TypeError('sizeCalculation must be a function')
+          }
+          size = sizeCalculation(v, k)
+          if (!isPosInt(size)) {
+            throw new TypeError(
+              'sizeCalculation return invalid (expect positive integer)'
+            )
+          }
+        } else {
+          throw new TypeError(
+            'invalid size value (must be positive integer). ' +
+              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
+              'must be set.'
+          )
+        }
+      }
+      return size
+    }
+    this.addItemSize = (index, size, status) => {
+      this.sizes[index] = size
+      if (this.maxSize) {
+        const maxSize = this.maxSize - this.sizes[index]
+        while (this.calculatedSize > maxSize) {
+          this.evict(true)
+        }
+      }
+      this.calculatedSize += this.sizes[index]
+      if (status) {
+        status.entrySize = size
+        status.totalCalculatedSize = this.calculatedSize
+      }
+    }
+  }
+  removeItemSize(_index) {}
+  addItemSize(_index, _size) {}
+  requireSize(_k, _v, size, sizeCalculation) {
+    if (size || sizeCalculation) {
+      throw new TypeError(
+        'cannot set size without setting maxSize or maxEntrySize on cache'
+      )
+    }
+  }
+
+  *indexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.tail; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.head) {
+          break
+        } else {
+          i = this.prev[i]
+        }
+      }
+    }
+  }
+
+  *rindexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.head; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.tail) {
+          break
+        } else {
+          i = this.next[i]
+        }
+      }
+    }
+  }
+
+  isValidIndex(index) {
+    return (
+      index !== undefined &&
+      this.keyMap.get(this.keyList[index]) === index
+    )
+  }
+
+  *entries() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+  *rentries() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+
+  *keys() {
+    for (const i of this.indexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+  *rkeys() {
+    for (const i of this.rindexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+
+  *values() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+  *rvalues() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+
+  [Symbol.iterator]() {
+    return this.entries()
+  }
+
+  find(fn, getOptions) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      if (fn(value, this.keyList[i], this)) {
+        return this.get(this.keyList[i], getOptions)
+      }
+    }
+  }
+
+  forEach(fn, thisp = this) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  rforEach(fn, thisp = this) {
+    for (const i of this.rindexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  get prune() {
+    deprecatedMethod('prune', 'purgeStale')
+    return this.purgeStale
+  }
+
+  purgeStale() {
+    let deleted = false
+    for (const i of this.rindexes({ allowStale: true })) {
+      if (this.isStale(i)) {
+        this.delete(this.keyList[i])
+        deleted = true
+      }
+    }
+    return deleted
+  }
+
+  dump() {
+    const arr = []
+    for (const i of this.indexes({ allowStale: true })) {
+      const key = this.keyList[i]
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      const entry = { value }
+      if (this.ttls) {
+        entry.ttl = this.ttls[i]
+        // always dump the start relative to a portable timestamp
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = perf.now() - this.starts[i]
+        entry.start = Math.floor(Date.now() - age)
+      }
+      if (this.sizes) {
+        entry.size = this.sizes[i]
+      }
+      arr.unshift([key, entry])
+    }
+    return arr
+  }
+
+  load(arr) {
+    this.clear()
+    for (const [key, entry] of arr) {
+      if (entry.start) {
+        // entry.start is a portable timestamp, but we may be using
+        // node's performance.now(), so calculate the offset.
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = Date.now() - entry.start
+        entry.start = perf.now() - age
+      }
+      this.set(key, entry.value, entry)
+    }
+  }
+
+  dispose(_v, _k, _reason) {}
+
+  set(
+    k,
+    v,
+    {
+      ttl = this.ttl,
+      start,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      status,
+    } = {}
+  ) {
+    size = this.requireSize(k, v, size, sizeCalculation)
+    // if the item doesn't fit, don't do anything
+    // NB: maxEntrySize set to maxSize by default
+    if (this.maxEntrySize && size > this.maxEntrySize) {
+      if (status) {
+        status.set = 'miss'
+        status.maxEntrySizeExceeded = true
+      }
+      // have to delete, in case a background fetch is there already.
+      // in non-async cases, this is a no-op
+      this.delete(k)
+      return this
+    }
+    let index = this.size === 0 ? undefined : this.keyMap.get(k)
+    if (index === undefined) {
+      // addition
+      index = this.newIndex()
+      this.keyList[index] = k
+      this.valList[index] = v
+      this.keyMap.set(k, index)
+      this.next[this.tail] = index
+      this.prev[index] = this.tail
+      this.tail = index
+      this.size++
+      this.addItemSize(index, size, status)
+      if (status) {
+        status.set = 'add'
+      }
+      noUpdateTTL = false
+    } else {
+      // update
+      this.moveToTail(index)
+      const oldVal = this.valList[index]
+      if (v !== oldVal) {
+        if (this.isBackgroundFetch(oldVal)) {
+          oldVal.__abortController.abort(new Error('replaced'))
+        } else {
+          if (!noDisposeOnSet) {
+            this.dispose(oldVal, k, 'set')
+            if (this.disposeAfter) {
+              this.disposed.push([oldVal, k, 'set'])
+            }
+          }
+        }
+        this.removeItemSize(index)
+        this.valList[index] = v
+        this.addItemSize(index, size, status)
+        if (status) {
+          status.set = 'replace'
+          const oldValue =
+            oldVal && this.isBackgroundFetch(oldVal)
+              ? oldVal.__staleWhileFetching
+              : oldVal
+          if (oldValue !== undefined) status.oldValue = oldValue
+        }
+      } else if (status) {
+        status.set = 'update'
+      }
+    }
+    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
+      this.initializeTTLTracking()
+    }
+    if (!noUpdateTTL) {
+      this.setItemTTL(index, ttl, start)
+    }
+    this.statusTTL(status, index)
+    if (this.disposeAfter) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return this
+  }
+
+  newIndex() {
+    if (this.size === 0) {
+      return this.tail
+    }
+    if (this.size === this.max && this.max !== 0) {
+      return this.evict(false)
+    }
+    if (this.free.length !== 0) {
+      return this.free.pop()
+    }
+    // initial fill, just keep writing down the list
+    return this.initialFill++
+  }
+
+  pop() {
+    if (this.size) {
+      const val = this.valList[this.head]
+      this.evict(true)
+      return val
+    }
+  }
+
+  evict(free) {
+    const head = this.head
+    const k = this.keyList[head]
+    const v = this.valList[head]
+    if (this.isBackgroundFetch(v)) {
+      v.__abortController.abort(new Error('evicted'))
+    } else {
+      this.dispose(v, k, 'evict')
+      if (this.disposeAfter) {
+        this.disposed.push([v, k, 'evict'])
+      }
+    }
+    this.removeItemSize(head)
+    // if we aren't about to use the index, then null these out
+    if (free) {
+      this.keyList[head] = null
+      this.valList[head] = null
+      this.free.push(head)
+    }
+    this.head = this.next[head]
+    this.keyMap.delete(k)
+    this.size--
+    return head
+  }
+
+  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      if (!this.isStale(index)) {
+        if (updateAgeOnHas) {
+          this.updateItemAge(index)
+        }
+        if (status) status.has = 'hit'
+        this.statusTTL(status, index)
+        return true
+      } else if (status) {
+        status.has = 'stale'
+        this.statusTTL(status, index)
+      }
+    } else if (status) {
+      status.has = 'miss'
+    }
+    return false
+  }
+
+  // like get(), but without any LRU updating or TTL expiration
+  peek(k, { allowStale = this.allowStale } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined && (allowStale || !this.isStale(index))) {
+      const v = this.valList[index]
+      // either stale and allowed, or forcing a refresh of non-stale value
+      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
+    }
+  }
+
+  backgroundFetch(k, index, options, context) {
+    const v = index === undefined ? undefined : this.valList[index]
+    if (this.isBackgroundFetch(v)) {
+      return v
+    }
+    const ac = new AC()
+    if (options.signal) {
+      options.signal.addEventListener('abort', () =>
+        ac.abort(options.signal.reason)
+      )
+    }
+    const fetchOpts = {
+      signal: ac.signal,
+      options,
+      context,
+    }
+    const cb = (v, updateCache = false) => {
+      const { aborted } = ac.signal
+      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
+      if (options.status) {
+        if (aborted && !updateCache) {
+          options.status.fetchAborted = true
+          options.status.fetchError = ac.signal.reason
+          if (ignoreAbort) options.status.fetchAbortIgnored = true
+        } else {
+          options.status.fetchResolved = true
+        }
+      }
+      if (aborted && !ignoreAbort && !updateCache) {
+        return fetchFail(ac.signal.reason)
+      }
+      // either we didn't abort, and are still here, or we did, and ignored
+      if (this.valList[index] === p) {
+        if (v === undefined) {
+          if (p.__staleWhileFetching) {
+            this.valList[index] = p.__staleWhileFetching
+          } else {
+            this.delete(k)
+          }
+        } else {
+          if (options.status) options.status.fetchUpdated = true
+          this.set(k, v, fetchOpts.options)
+        }
+      }
+      return v
+    }
+    const eb = er => {
+      if (options.status) {
+        options.status.fetchRejected = true
+        options.status.fetchError = er
+      }
+      return fetchFail(er)
+    }
+    const fetchFail = er => {
+      const { aborted } = ac.signal
+      const allowStaleAborted =
+        aborted && options.allowStaleOnFetchAbort
+      const allowStale =
+        allowStaleAborted || options.allowStaleOnFetchRejection
+      const noDelete = allowStale || options.noDeleteOnFetchRejection
+      if (this.valList[index] === p) {
+        // if we allow stale on fetch rejections, then we need to ensure that
+        // the stale value is not removed from the cache when the fetch fails.
+        const del = !noDelete || p.__staleWhileFetching === undefined
+        if (del) {
+          this.delete(k)
+        } else if (!allowStaleAborted) {
+          // still replace the *promise* with the stale value,
+          // since we are done with the promise at this point.
+          // leave it untouched if we're still waiting for an
+          // aborted background fetch that hasn't yet returned.
+          this.valList[index] = p.__staleWhileFetching
+        }
+      }
+      if (allowStale) {
+        if (options.status && p.__staleWhileFetching !== undefined) {
+          options.status.returnedStale = true
+        }
+        return p.__staleWhileFetching
+      } else if (p.__returned === p) {
+        throw er
+      }
+    }
+    const pcall = (res, rej) => {
+      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
+      // ignored, we go until we finish, regardless.
+      // defer check until we are actually aborting,
+      // so fetchMethod can override.
+      ac.signal.addEventListener('abort', () => {
+        if (
+          !options.ignoreFetchAbort ||
+          options.allowStaleOnFetchAbort
+        ) {
+          res()
+          // when it eventually resolves, update the cache.
+          if (options.allowStaleOnFetchAbort) {
+            res = v => cb(v, true)
+          }
+        }
+      })
+    }
+    if (options.status) options.status.fetchDispatched = true
+    const p = new Promise(pcall).then(cb, eb)
+    p.__abortController = ac
+    p.__staleWhileFetching = v
+    p.__returned = null
+    if (index === undefined) {
+      // internal, don't expose status.
+      this.set(k, p, { ...fetchOpts.options, status: undefined })
+      index = this.keyMap.get(k)
+    } else {
+      this.valList[index] = p
+    }
+    return p
+  }
+
+  isBackgroundFetch(p) {
+    return (
+      p &&
+      typeof p === 'object' &&
+      typeof p.then === 'function' &&
+      Object.prototype.hasOwnProperty.call(
+        p,
+        '__staleWhileFetching'
+      ) &&
+      Object.prototype.hasOwnProperty.call(p, '__returned') &&
+      (p.__returned === p || p.__returned === null)
+    )
+  }
+
+  // this takes the union of get() and set() opts, because it does both
+  async fetch(
+    k,
+    {
+      // get options
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      // set options
+      ttl = this.ttl,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      // fetch exclusive options
+      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
+      ignoreFetchAbort = this.ignoreFetchAbort,
+      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
+      fetchContext = this.fetchContext,
+      forceRefresh = false,
+      status,
+      signal,
+    } = {}
+  ) {
+    if (!this.fetchMethod) {
+      if (status) status.fetch = 'get'
+      return this.get(k, {
+        allowStale,
+        updateAgeOnGet,
+        noDeleteOnStaleGet,
+        status,
+      })
+    }
+
+    const options = {
+      allowStale,
+      updateAgeOnGet,
+      noDeleteOnStaleGet,
+      ttl,
+      noDisposeOnSet,
+      size,
+      sizeCalculation,
+      noUpdateTTL,
+      noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+      status,
+      signal,
+    }
+
+    let index = this.keyMap.get(k)
+    if (index === undefined) {
+      if (status) status.fetch = 'miss'
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      return (p.__returned = p)
+    } else {
+      // in cache, maybe already fetching
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        const stale =
+          allowStale && v.__staleWhileFetching !== undefined
+        if (status) {
+          status.fetch = 'inflight'
+          if (stale) status.returnedStale = true
+        }
+        return stale ? v.__staleWhileFetching : (v.__returned = v)
+      }
+
+      // if we force a refresh, that means do NOT serve the cached value,
+      // unless we are already in the process of refreshing the cache.
+      const isStale = this.isStale(index)
+      if (!forceRefresh && !isStale) {
+        if (status) status.fetch = 'hit'
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        this.statusTTL(status, index)
+        return v
+      }
+
+      // ok, it is stale or a forced refresh, and not already fetching.
+      // refresh the cache.
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      const hasStale = p.__staleWhileFetching !== undefined
+      const staleVal = hasStale && allowStale
+      if (status) {
+        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
+        if (staleVal && isStale) status.returnedStale = true
+      }
+      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
+    }
+  }
+
+  get(
+    k,
+    {
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      status,
+    } = {}
+  ) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      const value = this.valList[index]
+      const fetching = this.isBackgroundFetch(value)
+      this.statusTTL(status, index)
+      if (this.isStale(index)) {
+        if (status) status.get = 'stale'
+        // delete only if not an in-flight background fetch
+        if (!fetching) {
+          if (!noDeleteOnStaleGet) {
+            this.delete(k)
+          }
+          if (status) status.returnedStale = allowStale
+          return allowStale ? value : undefined
+        } else {
+          if (status) {
+            status.returnedStale =
+              allowStale && value.__staleWhileFetching !== undefined
+          }
+          return allowStale ? value.__staleWhileFetching : undefined
+        }
+      } else {
+        if (status) status.get = 'hit'
+        // if we're currently fetching it, we don't actually have it yet
+        // it's not stale, which means this isn't a staleWhileRefetching.
+        // If it's not stale, and fetching, AND has a __staleWhileFetching
+        // value, then that means the user fetched with {forceRefresh:true},
+        // so it's safe to return that value.
+        if (fetching) {
+          return value.__staleWhileFetching
+        }
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        return value
+      }
+    } else if (status) {
+      status.get = 'miss'
+    }
+  }
+
+  connect(p, n) {
+    this.prev[n] = p
+    this.next[p] = n
+  }
+
+  moveToTail(index) {
+    // if tail already, nothing to do
+    // if head, move head to next[index]
+    // else
+    //   move next[prev[index]] to next[index] (head has no prev)
+    //   move prev[next[index]] to prev[index]
+    // prev[index] = tail
+    // next[tail] = index
+    // tail = index
+    if (index !== this.tail) {
+      if (index === this.head) {
+        this.head = this.next[index]
+      } else {
+        this.connect(this.prev[index], this.next[index])
+      }
+      this.connect(this.tail, index)
+      this.tail = index
+    }
+  }
+
+  get del() {
+    deprecatedMethod('del', 'delete')
+    return this.delete
+  }
+
+  delete(k) {
+    let deleted = false
+    if (this.size !== 0) {
+      const index = this.keyMap.get(k)
+      if (index !== undefined) {
+        deleted = true
+        if (this.size === 1) {
+          this.clear()
+        } else {
+          this.removeItemSize(index)
+          const v = this.valList[index]
+          if (this.isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('deleted'))
+          } else {
+            this.dispose(v, k, 'delete')
+            if (this.disposeAfter) {
+              this.disposed.push([v, k, 'delete'])
+            }
+          }
+          this.keyMap.delete(k)
+          this.keyList[index] = null
+          this.valList[index] = null
+          if (index === this.tail) {
+            this.tail = this.prev[index]
+          } else if (index === this.head) {
+            this.head = this.next[index]
+          } else {
+            this.next[this.prev[index]] = this.next[index]
+            this.prev[this.next[index]] = this.prev[index]
+          }
+          this.size--
+          this.free.push(index)
+        }
+      }
+    }
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return deleted
+  }
+
+  clear() {
+    for (const index of this.rindexes({ allowStale: true })) {
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        v.__abortController.abort(new Error('deleted'))
+      } else {
+        const k = this.keyList[index]
+        this.dispose(v, k, 'delete')
+        if (this.disposeAfter) {
+          this.disposed.push([v, k, 'delete'])
+        }
+      }
+    }
+
+    this.keyMap.clear()
+    this.valList.fill(null)
+    this.keyList.fill(null)
+    if (this.ttls) {
+      this.ttls.fill(0)
+      this.starts.fill(0)
+    }
+    if (this.sizes) {
+      this.sizes.fill(0)
+    }
+    this.head = 0
+    this.tail = 0
+    this.initialFill = 1
+    this.free.length = 0
+    this.calculatedSize = 0
+    this.size = 0
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+  }
+
+  get reset() {
+    deprecatedMethod('reset', 'clear')
+    return this.clear
+  }
+
+  get length() {
+    deprecatedProperty('length', 'size')
+    return this.size
+  }
+
+  static get AbortController() {
+    return AC
+  }
+  static get AbortSignal() {
+    return AS
+  }
+}
+
+module.exports = LRUCache
diff --git a/node_modules/tuf-js/node_modules/lru-cache/index.mjs b/node_modules/tuf-js/node_modules/lru-cache/index.mjs
new file mode 100644
index 0000000000000..4a0b4813ec515
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/lru-cache/index.mjs
@@ -0,0 +1,1227 @@
+const perf =
+  typeof performance === 'object' &&
+  performance &&
+  typeof performance.now === 'function'
+    ? performance
+    : Date
+
+const hasAbortController = typeof AbortController === 'function'
+
+// minimal backwards-compatibility polyfill
+// this doesn't have nearly all the checks and whatnot that
+// actual AbortController/Signal has, but it's enough for
+// our purposes, and if used properly, behaves the same.
+const AC = hasAbortController
+  ? AbortController
+  : class AbortController {
+      constructor() {
+        this.signal = new AS()
+      }
+      abort(reason = new Error('This operation was aborted')) {
+        this.signal.reason = this.signal.reason || reason
+        this.signal.aborted = true
+        this.signal.dispatchEvent({
+          type: 'abort',
+          target: this.signal,
+        })
+      }
+    }
+
+const hasAbortSignal = typeof AbortSignal === 'function'
+// Some polyfills put this on the AC class, not global
+const hasACAbortSignal = typeof AC.AbortSignal === 'function'
+const AS = hasAbortSignal
+  ? AbortSignal
+  : hasACAbortSignal
+  ? AC.AbortController
+  : class AbortSignal {
+      constructor() {
+        this.reason = undefined
+        this.aborted = false
+        this._listeners = []
+      }
+      dispatchEvent(e) {
+        if (e.type === 'abort') {
+          this.aborted = true
+          this.onabort(e)
+          this._listeners.forEach(f => f(e), this)
+        }
+      }
+      onabort() {}
+      addEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners.push(fn)
+        }
+      }
+      removeEventListener(ev, fn) {
+        if (ev === 'abort') {
+          this._listeners = this._listeners.filter(f => f !== fn)
+        }
+      }
+    }
+
+const warned = new Set()
+const deprecatedOption = (opt, instead) => {
+  const code = `LRU_CACHE_OPTION_${opt}`
+  if (shouldWarn(code)) {
+    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
+  }
+}
+const deprecatedMethod = (method, instead) => {
+  const code = `LRU_CACHE_METHOD_${method}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
+    warn(code, `${method} method`, `cache.${instead}()`, get)
+  }
+}
+const deprecatedProperty = (field, instead) => {
+  const code = `LRU_CACHE_PROPERTY_${field}`
+  if (shouldWarn(code)) {
+    const { prototype } = LRUCache
+    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
+    warn(code, `${field} property`, `cache.${instead}`, get)
+  }
+}
+
+const emitWarning = (...a) => {
+  typeof process === 'object' &&
+  process &&
+  typeof process.emitWarning === 'function'
+    ? process.emitWarning(...a)
+    : console.error(...a)
+}
+
+const shouldWarn = code => !warned.has(code)
+
+const warn = (code, what, instead, fn) => {
+  warned.add(code)
+  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
+  emitWarning(msg, 'DeprecationWarning', code, fn)
+}
+
+const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
+
+/* istanbul ignore next - This is a little bit ridiculous, tbh.
+ * The maximum array length is 2^32-1 or thereabouts on most JS impls.
+ * And well before that point, you're caching the entire world, I mean,
+ * that's ~32GB of just integers for the next/prev links, plus whatever
+ * else to hold that many keys and values.  Just filling the memory with
+ * zeroes at init time is brutal when you get that big.
+ * But why not be complete?
+ * Maybe in the future, these limits will have expanded. */
+const getUintArray = max =>
+  !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+    ? Uint8Array
+    : max <= Math.pow(2, 16)
+    ? Uint16Array
+    : max <= Math.pow(2, 32)
+    ? Uint32Array
+    : max <= Number.MAX_SAFE_INTEGER
+    ? ZeroArray
+    : null
+
+class ZeroArray extends Array {
+  constructor(size) {
+    super(size)
+    this.fill(0)
+  }
+}
+
+class Stack {
+  constructor(max) {
+    if (max === 0) {
+      return []
+    }
+    const UintArray = getUintArray(max)
+    this.heap = new UintArray(max)
+    this.length = 0
+  }
+  push(n) {
+    this.heap[this.length++] = n
+  }
+  pop() {
+    return this.heap[--this.length]
+  }
+}
+
+class LRUCache {
+  constructor(options = {}) {
+    const {
+      max = 0,
+      ttl,
+      ttlResolution = 1,
+      ttlAutopurge,
+      updateAgeOnGet,
+      updateAgeOnHas,
+      allowStale,
+      dispose,
+      disposeAfter,
+      noDisposeOnSet,
+      noUpdateTTL,
+      maxSize = 0,
+      maxEntrySize = 0,
+      sizeCalculation,
+      fetchMethod,
+      fetchContext,
+      noDeleteOnFetchRejection,
+      noDeleteOnStaleGet,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+    } = options
+
+    // deprecated options, don't trigger a warning for getting them if
+    // the thing being passed in is another LRUCache we're copying.
+    const { length, maxAge, stale } =
+      options instanceof LRUCache ? {} : options
+
+    if (max !== 0 && !isPosInt(max)) {
+      throw new TypeError('max option must be a nonnegative integer')
+    }
+
+    const UintArray = max ? getUintArray(max) : Array
+    if (!UintArray) {
+      throw new Error('invalid max value: ' + max)
+    }
+
+    this.max = max
+    this.maxSize = maxSize
+    this.maxEntrySize = maxEntrySize || this.maxSize
+    this.sizeCalculation = sizeCalculation || length
+    if (this.sizeCalculation) {
+      if (!this.maxSize && !this.maxEntrySize) {
+        throw new TypeError(
+          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
+        )
+      }
+      if (typeof this.sizeCalculation !== 'function') {
+        throw new TypeError('sizeCalculation set to non-function')
+      }
+    }
+
+    this.fetchMethod = fetchMethod || null
+    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
+      throw new TypeError(
+        'fetchMethod must be a function if specified'
+      )
+    }
+
+    this.fetchContext = fetchContext
+    if (!this.fetchMethod && fetchContext !== undefined) {
+      throw new TypeError(
+        'cannot set fetchContext without fetchMethod'
+      )
+    }
+
+    this.keyMap = new Map()
+    this.keyList = new Array(max).fill(null)
+    this.valList = new Array(max).fill(null)
+    this.next = new UintArray(max)
+    this.prev = new UintArray(max)
+    this.head = 0
+    this.tail = 0
+    this.free = new Stack(max)
+    this.initialFill = 1
+    this.size = 0
+
+    if (typeof dispose === 'function') {
+      this.dispose = dispose
+    }
+    if (typeof disposeAfter === 'function') {
+      this.disposeAfter = disposeAfter
+      this.disposed = []
+    } else {
+      this.disposeAfter = null
+      this.disposed = null
+    }
+    this.noDisposeOnSet = !!noDisposeOnSet
+    this.noUpdateTTL = !!noUpdateTTL
+    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
+    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
+    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
+    this.ignoreFetchAbort = !!ignoreFetchAbort
+
+    // NB: maxEntrySize is set to maxSize if it's set
+    if (this.maxEntrySize !== 0) {
+      if (this.maxSize !== 0) {
+        if (!isPosInt(this.maxSize)) {
+          throw new TypeError(
+            'maxSize must be a positive integer if specified'
+          )
+        }
+      }
+      if (!isPosInt(this.maxEntrySize)) {
+        throw new TypeError(
+          'maxEntrySize must be a positive integer if specified'
+        )
+      }
+      this.initializeSizeTracking()
+    }
+
+    this.allowStale = !!allowStale || !!stale
+    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
+    this.updateAgeOnGet = !!updateAgeOnGet
+    this.updateAgeOnHas = !!updateAgeOnHas
+    this.ttlResolution =
+      isPosInt(ttlResolution) || ttlResolution === 0
+        ? ttlResolution
+        : 1
+    this.ttlAutopurge = !!ttlAutopurge
+    this.ttl = ttl || maxAge || 0
+    if (this.ttl) {
+      if (!isPosInt(this.ttl)) {
+        throw new TypeError(
+          'ttl must be a positive integer if specified'
+        )
+      }
+      this.initializeTTLTracking()
+    }
+
+    // do not allow completely unbounded caches
+    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
+      throw new TypeError(
+        'At least one of max, maxSize, or ttl is required'
+      )
+    }
+    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
+      const code = 'LRU_CACHE_UNBOUNDED'
+      if (shouldWarn(code)) {
+        warned.add(code)
+        const msg =
+          'TTL caching without ttlAutopurge, max, or maxSize can ' +
+          'result in unbounded memory consumption.'
+        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
+      }
+    }
+
+    if (stale) {
+      deprecatedOption('stale', 'allowStale')
+    }
+    if (maxAge) {
+      deprecatedOption('maxAge', 'ttl')
+    }
+    if (length) {
+      deprecatedOption('length', 'sizeCalculation')
+    }
+  }
+
+  getRemainingTTL(key) {
+    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
+  }
+
+  initializeTTLTracking() {
+    this.ttls = new ZeroArray(this.max)
+    this.starts = new ZeroArray(this.max)
+
+    this.setItemTTL = (index, ttl, start = perf.now()) => {
+      this.starts[index] = ttl !== 0 ? start : 0
+      this.ttls[index] = ttl
+      if (ttl !== 0 && this.ttlAutopurge) {
+        const t = setTimeout(() => {
+          if (this.isStale(index)) {
+            this.delete(this.keyList[index])
+          }
+        }, ttl + 1)
+        /* istanbul ignore else - unref() not supported on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+    }
+
+    this.updateItemAge = index => {
+      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
+    }
+
+    this.statusTTL = (status, index) => {
+      if (status) {
+        status.ttl = this.ttls[index]
+        status.start = this.starts[index]
+        status.now = cachedNow || getNow()
+        status.remainingTTL = status.now + status.ttl - status.start
+      }
+    }
+
+    // debounce calls to perf.now() to 1s so we're not hitting
+    // that costly call repeatedly.
+    let cachedNow = 0
+    const getNow = () => {
+      const n = perf.now()
+      if (this.ttlResolution > 0) {
+        cachedNow = n
+        const t = setTimeout(
+          () => (cachedNow = 0),
+          this.ttlResolution
+        )
+        /* istanbul ignore else - not available on all platforms */
+        if (t.unref) {
+          t.unref()
+        }
+      }
+      return n
+    }
+
+    this.getRemainingTTL = key => {
+      const index = this.keyMap.get(key)
+      if (index === undefined) {
+        return 0
+      }
+      return this.ttls[index] === 0 || this.starts[index] === 0
+        ? Infinity
+        : this.starts[index] +
+            this.ttls[index] -
+            (cachedNow || getNow())
+    }
+
+    this.isStale = index => {
+      return (
+        this.ttls[index] !== 0 &&
+        this.starts[index] !== 0 &&
+        (cachedNow || getNow()) - this.starts[index] >
+          this.ttls[index]
+      )
+    }
+  }
+  updateItemAge(_index) {}
+  statusTTL(_status, _index) {}
+  setItemTTL(_index, _ttl, _start) {}
+  isStale(_index) {
+    return false
+  }
+
+  initializeSizeTracking() {
+    this.calculatedSize = 0
+    this.sizes = new ZeroArray(this.max)
+    this.removeItemSize = index => {
+      this.calculatedSize -= this.sizes[index]
+      this.sizes[index] = 0
+    }
+    this.requireSize = (k, v, size, sizeCalculation) => {
+      // provisionally accept background fetches.
+      // actual value size will be checked when they return.
+      if (this.isBackgroundFetch(v)) {
+        return 0
+      }
+      if (!isPosInt(size)) {
+        if (sizeCalculation) {
+          if (typeof sizeCalculation !== 'function') {
+            throw new TypeError('sizeCalculation must be a function')
+          }
+          size = sizeCalculation(v, k)
+          if (!isPosInt(size)) {
+            throw new TypeError(
+              'sizeCalculation return invalid (expect positive integer)'
+            )
+          }
+        } else {
+          throw new TypeError(
+            'invalid size value (must be positive integer). ' +
+              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
+              'must be set.'
+          )
+        }
+      }
+      return size
+    }
+    this.addItemSize = (index, size, status) => {
+      this.sizes[index] = size
+      if (this.maxSize) {
+        const maxSize = this.maxSize - this.sizes[index]
+        while (this.calculatedSize > maxSize) {
+          this.evict(true)
+        }
+      }
+      this.calculatedSize += this.sizes[index]
+      if (status) {
+        status.entrySize = size
+        status.totalCalculatedSize = this.calculatedSize
+      }
+    }
+  }
+  removeItemSize(_index) {}
+  addItemSize(_index, _size) {}
+  requireSize(_k, _v, size, sizeCalculation) {
+    if (size || sizeCalculation) {
+      throw new TypeError(
+        'cannot set size without setting maxSize or maxEntrySize on cache'
+      )
+    }
+  }
+
+  *indexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.tail; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.head) {
+          break
+        } else {
+          i = this.prev[i]
+        }
+      }
+    }
+  }
+
+  *rindexes({ allowStale = this.allowStale } = {}) {
+    if (this.size) {
+      for (let i = this.head; true; ) {
+        if (!this.isValidIndex(i)) {
+          break
+        }
+        if (allowStale || !this.isStale(i)) {
+          yield i
+        }
+        if (i === this.tail) {
+          break
+        } else {
+          i = this.next[i]
+        }
+      }
+    }
+  }
+
+  isValidIndex(index) {
+    return (
+      index !== undefined &&
+      this.keyMap.get(this.keyList[index]) === index
+    )
+  }
+
+  *entries() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+  *rentries() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield [this.keyList[i], this.valList[i]]
+      }
+    }
+  }
+
+  *keys() {
+    for (const i of this.indexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+  *rkeys() {
+    for (const i of this.rindexes()) {
+      if (
+        this.keyList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.keyList[i]
+      }
+    }
+  }
+
+  *values() {
+    for (const i of this.indexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+  *rvalues() {
+    for (const i of this.rindexes()) {
+      if (
+        this.valList[i] !== undefined &&
+        !this.isBackgroundFetch(this.valList[i])
+      ) {
+        yield this.valList[i]
+      }
+    }
+  }
+
+  [Symbol.iterator]() {
+    return this.entries()
+  }
+
+  find(fn, getOptions) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      if (fn(value, this.keyList[i], this)) {
+        return this.get(this.keyList[i], getOptions)
+      }
+    }
+  }
+
+  forEach(fn, thisp = this) {
+    for (const i of this.indexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  rforEach(fn, thisp = this) {
+    for (const i of this.rindexes()) {
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      fn.call(thisp, value, this.keyList[i], this)
+    }
+  }
+
+  get prune() {
+    deprecatedMethod('prune', 'purgeStale')
+    return this.purgeStale
+  }
+
+  purgeStale() {
+    let deleted = false
+    for (const i of this.rindexes({ allowStale: true })) {
+      if (this.isStale(i)) {
+        this.delete(this.keyList[i])
+        deleted = true
+      }
+    }
+    return deleted
+  }
+
+  dump() {
+    const arr = []
+    for (const i of this.indexes({ allowStale: true })) {
+      const key = this.keyList[i]
+      const v = this.valList[i]
+      const value = this.isBackgroundFetch(v)
+        ? v.__staleWhileFetching
+        : v
+      if (value === undefined) continue
+      const entry = { value }
+      if (this.ttls) {
+        entry.ttl = this.ttls[i]
+        // always dump the start relative to a portable timestamp
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = perf.now() - this.starts[i]
+        entry.start = Math.floor(Date.now() - age)
+      }
+      if (this.sizes) {
+        entry.size = this.sizes[i]
+      }
+      arr.unshift([key, entry])
+    }
+    return arr
+  }
+
+  load(arr) {
+    this.clear()
+    for (const [key, entry] of arr) {
+      if (entry.start) {
+        // entry.start is a portable timestamp, but we may be using
+        // node's performance.now(), so calculate the offset.
+        // it's ok for this to be a bit slow, it's a rare operation.
+        const age = Date.now() - entry.start
+        entry.start = perf.now() - age
+      }
+      this.set(key, entry.value, entry)
+    }
+  }
+
+  dispose(_v, _k, _reason) {}
+
+  set(
+    k,
+    v,
+    {
+      ttl = this.ttl,
+      start,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      status,
+    } = {}
+  ) {
+    size = this.requireSize(k, v, size, sizeCalculation)
+    // if the item doesn't fit, don't do anything
+    // NB: maxEntrySize set to maxSize by default
+    if (this.maxEntrySize && size > this.maxEntrySize) {
+      if (status) {
+        status.set = 'miss'
+        status.maxEntrySizeExceeded = true
+      }
+      // have to delete, in case a background fetch is there already.
+      // in non-async cases, this is a no-op
+      this.delete(k)
+      return this
+    }
+    let index = this.size === 0 ? undefined : this.keyMap.get(k)
+    if (index === undefined) {
+      // addition
+      index = this.newIndex()
+      this.keyList[index] = k
+      this.valList[index] = v
+      this.keyMap.set(k, index)
+      this.next[this.tail] = index
+      this.prev[index] = this.tail
+      this.tail = index
+      this.size++
+      this.addItemSize(index, size, status)
+      if (status) {
+        status.set = 'add'
+      }
+      noUpdateTTL = false
+    } else {
+      // update
+      this.moveToTail(index)
+      const oldVal = this.valList[index]
+      if (v !== oldVal) {
+        if (this.isBackgroundFetch(oldVal)) {
+          oldVal.__abortController.abort(new Error('replaced'))
+        } else {
+          if (!noDisposeOnSet) {
+            this.dispose(oldVal, k, 'set')
+            if (this.disposeAfter) {
+              this.disposed.push([oldVal, k, 'set'])
+            }
+          }
+        }
+        this.removeItemSize(index)
+        this.valList[index] = v
+        this.addItemSize(index, size, status)
+        if (status) {
+          status.set = 'replace'
+          const oldValue =
+            oldVal && this.isBackgroundFetch(oldVal)
+              ? oldVal.__staleWhileFetching
+              : oldVal
+          if (oldValue !== undefined) status.oldValue = oldValue
+        }
+      } else if (status) {
+        status.set = 'update'
+      }
+    }
+    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
+      this.initializeTTLTracking()
+    }
+    if (!noUpdateTTL) {
+      this.setItemTTL(index, ttl, start)
+    }
+    this.statusTTL(status, index)
+    if (this.disposeAfter) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return this
+  }
+
+  newIndex() {
+    if (this.size === 0) {
+      return this.tail
+    }
+    if (this.size === this.max && this.max !== 0) {
+      return this.evict(false)
+    }
+    if (this.free.length !== 0) {
+      return this.free.pop()
+    }
+    // initial fill, just keep writing down the list
+    return this.initialFill++
+  }
+
+  pop() {
+    if (this.size) {
+      const val = this.valList[this.head]
+      this.evict(true)
+      return val
+    }
+  }
+
+  evict(free) {
+    const head = this.head
+    const k = this.keyList[head]
+    const v = this.valList[head]
+    if (this.isBackgroundFetch(v)) {
+      v.__abortController.abort(new Error('evicted'))
+    } else {
+      this.dispose(v, k, 'evict')
+      if (this.disposeAfter) {
+        this.disposed.push([v, k, 'evict'])
+      }
+    }
+    this.removeItemSize(head)
+    // if we aren't about to use the index, then null these out
+    if (free) {
+      this.keyList[head] = null
+      this.valList[head] = null
+      this.free.push(head)
+    }
+    this.head = this.next[head]
+    this.keyMap.delete(k)
+    this.size--
+    return head
+  }
+
+  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      if (!this.isStale(index)) {
+        if (updateAgeOnHas) {
+          this.updateItemAge(index)
+        }
+        if (status) status.has = 'hit'
+        this.statusTTL(status, index)
+        return true
+      } else if (status) {
+        status.has = 'stale'
+        this.statusTTL(status, index)
+      }
+    } else if (status) {
+      status.has = 'miss'
+    }
+    return false
+  }
+
+  // like get(), but without any LRU updating or TTL expiration
+  peek(k, { allowStale = this.allowStale } = {}) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined && (allowStale || !this.isStale(index))) {
+      const v = this.valList[index]
+      // either stale and allowed, or forcing a refresh of non-stale value
+      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
+    }
+  }
+
+  backgroundFetch(k, index, options, context) {
+    const v = index === undefined ? undefined : this.valList[index]
+    if (this.isBackgroundFetch(v)) {
+      return v
+    }
+    const ac = new AC()
+    if (options.signal) {
+      options.signal.addEventListener('abort', () =>
+        ac.abort(options.signal.reason)
+      )
+    }
+    const fetchOpts = {
+      signal: ac.signal,
+      options,
+      context,
+    }
+    const cb = (v, updateCache = false) => {
+      const { aborted } = ac.signal
+      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
+      if (options.status) {
+        if (aborted && !updateCache) {
+          options.status.fetchAborted = true
+          options.status.fetchError = ac.signal.reason
+          if (ignoreAbort) options.status.fetchAbortIgnored = true
+        } else {
+          options.status.fetchResolved = true
+        }
+      }
+      if (aborted && !ignoreAbort && !updateCache) {
+        return fetchFail(ac.signal.reason)
+      }
+      // either we didn't abort, and are still here, or we did, and ignored
+      if (this.valList[index] === p) {
+        if (v === undefined) {
+          if (p.__staleWhileFetching) {
+            this.valList[index] = p.__staleWhileFetching
+          } else {
+            this.delete(k)
+          }
+        } else {
+          if (options.status) options.status.fetchUpdated = true
+          this.set(k, v, fetchOpts.options)
+        }
+      }
+      return v
+    }
+    const eb = er => {
+      if (options.status) {
+        options.status.fetchRejected = true
+        options.status.fetchError = er
+      }
+      return fetchFail(er)
+    }
+    const fetchFail = er => {
+      const { aborted } = ac.signal
+      const allowStaleAborted =
+        aborted && options.allowStaleOnFetchAbort
+      const allowStale =
+        allowStaleAborted || options.allowStaleOnFetchRejection
+      const noDelete = allowStale || options.noDeleteOnFetchRejection
+      if (this.valList[index] === p) {
+        // if we allow stale on fetch rejections, then we need to ensure that
+        // the stale value is not removed from the cache when the fetch fails.
+        const del = !noDelete || p.__staleWhileFetching === undefined
+        if (del) {
+          this.delete(k)
+        } else if (!allowStaleAborted) {
+          // still replace the *promise* with the stale value,
+          // since we are done with the promise at this point.
+          // leave it untouched if we're still waiting for an
+          // aborted background fetch that hasn't yet returned.
+          this.valList[index] = p.__staleWhileFetching
+        }
+      }
+      if (allowStale) {
+        if (options.status && p.__staleWhileFetching !== undefined) {
+          options.status.returnedStale = true
+        }
+        return p.__staleWhileFetching
+      } else if (p.__returned === p) {
+        throw er
+      }
+    }
+    const pcall = (res, rej) => {
+      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
+      // ignored, we go until we finish, regardless.
+      // defer check until we are actually aborting,
+      // so fetchMethod can override.
+      ac.signal.addEventListener('abort', () => {
+        if (
+          !options.ignoreFetchAbort ||
+          options.allowStaleOnFetchAbort
+        ) {
+          res()
+          // when it eventually resolves, update the cache.
+          if (options.allowStaleOnFetchAbort) {
+            res = v => cb(v, true)
+          }
+        }
+      })
+    }
+    if (options.status) options.status.fetchDispatched = true
+    const p = new Promise(pcall).then(cb, eb)
+    p.__abortController = ac
+    p.__staleWhileFetching = v
+    p.__returned = null
+    if (index === undefined) {
+      // internal, don't expose status.
+      this.set(k, p, { ...fetchOpts.options, status: undefined })
+      index = this.keyMap.get(k)
+    } else {
+      this.valList[index] = p
+    }
+    return p
+  }
+
+  isBackgroundFetch(p) {
+    return (
+      p &&
+      typeof p === 'object' &&
+      typeof p.then === 'function' &&
+      Object.prototype.hasOwnProperty.call(
+        p,
+        '__staleWhileFetching'
+      ) &&
+      Object.prototype.hasOwnProperty.call(p, '__returned') &&
+      (p.__returned === p || p.__returned === null)
+    )
+  }
+
+  // this takes the union of get() and set() opts, because it does both
+  async fetch(
+    k,
+    {
+      // get options
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      // set options
+      ttl = this.ttl,
+      noDisposeOnSet = this.noDisposeOnSet,
+      size = 0,
+      sizeCalculation = this.sizeCalculation,
+      noUpdateTTL = this.noUpdateTTL,
+      // fetch exclusive options
+      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
+      ignoreFetchAbort = this.ignoreFetchAbort,
+      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
+      fetchContext = this.fetchContext,
+      forceRefresh = false,
+      status,
+      signal,
+    } = {}
+  ) {
+    if (!this.fetchMethod) {
+      if (status) status.fetch = 'get'
+      return this.get(k, {
+        allowStale,
+        updateAgeOnGet,
+        noDeleteOnStaleGet,
+        status,
+      })
+    }
+
+    const options = {
+      allowStale,
+      updateAgeOnGet,
+      noDeleteOnStaleGet,
+      ttl,
+      noDisposeOnSet,
+      size,
+      sizeCalculation,
+      noUpdateTTL,
+      noDeleteOnFetchRejection,
+      allowStaleOnFetchRejection,
+      allowStaleOnFetchAbort,
+      ignoreFetchAbort,
+      status,
+      signal,
+    }
+
+    let index = this.keyMap.get(k)
+    if (index === undefined) {
+      if (status) status.fetch = 'miss'
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      return (p.__returned = p)
+    } else {
+      // in cache, maybe already fetching
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        const stale =
+          allowStale && v.__staleWhileFetching !== undefined
+        if (status) {
+          status.fetch = 'inflight'
+          if (stale) status.returnedStale = true
+        }
+        return stale ? v.__staleWhileFetching : (v.__returned = v)
+      }
+
+      // if we force a refresh, that means do NOT serve the cached value,
+      // unless we are already in the process of refreshing the cache.
+      const isStale = this.isStale(index)
+      if (!forceRefresh && !isStale) {
+        if (status) status.fetch = 'hit'
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        this.statusTTL(status, index)
+        return v
+      }
+
+      // ok, it is stale or a forced refresh, and not already fetching.
+      // refresh the cache.
+      const p = this.backgroundFetch(k, index, options, fetchContext)
+      const hasStale = p.__staleWhileFetching !== undefined
+      const staleVal = hasStale && allowStale
+      if (status) {
+        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
+        if (staleVal && isStale) status.returnedStale = true
+      }
+      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
+    }
+  }
+
+  get(
+    k,
+    {
+      allowStale = this.allowStale,
+      updateAgeOnGet = this.updateAgeOnGet,
+      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
+      status,
+    } = {}
+  ) {
+    const index = this.keyMap.get(k)
+    if (index !== undefined) {
+      const value = this.valList[index]
+      const fetching = this.isBackgroundFetch(value)
+      this.statusTTL(status, index)
+      if (this.isStale(index)) {
+        if (status) status.get = 'stale'
+        // delete only if not an in-flight background fetch
+        if (!fetching) {
+          if (!noDeleteOnStaleGet) {
+            this.delete(k)
+          }
+          if (status) status.returnedStale = allowStale
+          return allowStale ? value : undefined
+        } else {
+          if (status) {
+            status.returnedStale =
+              allowStale && value.__staleWhileFetching !== undefined
+          }
+          return allowStale ? value.__staleWhileFetching : undefined
+        }
+      } else {
+        if (status) status.get = 'hit'
+        // if we're currently fetching it, we don't actually have it yet
+        // it's not stale, which means this isn't a staleWhileRefetching.
+        // If it's not stale, and fetching, AND has a __staleWhileFetching
+        // value, then that means the user fetched with {forceRefresh:true},
+        // so it's safe to return that value.
+        if (fetching) {
+          return value.__staleWhileFetching
+        }
+        this.moveToTail(index)
+        if (updateAgeOnGet) {
+          this.updateItemAge(index)
+        }
+        return value
+      }
+    } else if (status) {
+      status.get = 'miss'
+    }
+  }
+
+  connect(p, n) {
+    this.prev[n] = p
+    this.next[p] = n
+  }
+
+  moveToTail(index) {
+    // if tail already, nothing to do
+    // if head, move head to next[index]
+    // else
+    //   move next[prev[index]] to next[index] (head has no prev)
+    //   move prev[next[index]] to prev[index]
+    // prev[index] = tail
+    // next[tail] = index
+    // tail = index
+    if (index !== this.tail) {
+      if (index === this.head) {
+        this.head = this.next[index]
+      } else {
+        this.connect(this.prev[index], this.next[index])
+      }
+      this.connect(this.tail, index)
+      this.tail = index
+    }
+  }
+
+  get del() {
+    deprecatedMethod('del', 'delete')
+    return this.delete
+  }
+
+  delete(k) {
+    let deleted = false
+    if (this.size !== 0) {
+      const index = this.keyMap.get(k)
+      if (index !== undefined) {
+        deleted = true
+        if (this.size === 1) {
+          this.clear()
+        } else {
+          this.removeItemSize(index)
+          const v = this.valList[index]
+          if (this.isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('deleted'))
+          } else {
+            this.dispose(v, k, 'delete')
+            if (this.disposeAfter) {
+              this.disposed.push([v, k, 'delete'])
+            }
+          }
+          this.keyMap.delete(k)
+          this.keyList[index] = null
+          this.valList[index] = null
+          if (index === this.tail) {
+            this.tail = this.prev[index]
+          } else if (index === this.head) {
+            this.head = this.next[index]
+          } else {
+            this.next[this.prev[index]] = this.next[index]
+            this.prev[this.next[index]] = this.prev[index]
+          }
+          this.size--
+          this.free.push(index)
+        }
+      }
+    }
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+    return deleted
+  }
+
+  clear() {
+    for (const index of this.rindexes({ allowStale: true })) {
+      const v = this.valList[index]
+      if (this.isBackgroundFetch(v)) {
+        v.__abortController.abort(new Error('deleted'))
+      } else {
+        const k = this.keyList[index]
+        this.dispose(v, k, 'delete')
+        if (this.disposeAfter) {
+          this.disposed.push([v, k, 'delete'])
+        }
+      }
+    }
+
+    this.keyMap.clear()
+    this.valList.fill(null)
+    this.keyList.fill(null)
+    if (this.ttls) {
+      this.ttls.fill(0)
+      this.starts.fill(0)
+    }
+    if (this.sizes) {
+      this.sizes.fill(0)
+    }
+    this.head = 0
+    this.tail = 0
+    this.initialFill = 1
+    this.free.length = 0
+    this.calculatedSize = 0
+    this.size = 0
+    if (this.disposed) {
+      while (this.disposed.length) {
+        this.disposeAfter(...this.disposed.shift())
+      }
+    }
+  }
+
+  get reset() {
+    deprecatedMethod('reset', 'clear')
+    return this.clear
+  }
+
+  get length() {
+    deprecatedProperty('length', 'size')
+    return this.size
+  }
+
+  static get AbortController() {
+    return AC
+  }
+  static get AbortSignal() {
+    return AS
+  }
+}
+
+export default LRUCache
diff --git a/node_modules/tuf-js/node_modules/lru-cache/package.json b/node_modules/tuf-js/node_modules/lru-cache/package.json
new file mode 100644
index 0000000000000..9684991727e7a
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/lru-cache/package.json
@@ -0,0 +1,96 @@
+{
+  "name": "lru-cache",
+  "description": "A cache object that deletes the least-recently-used items.",
+  "version": "7.18.3",
+  "author": "Isaac Z. Schlueter ",
+  "keywords": [
+    "mru",
+    "lru",
+    "cache"
+  ],
+  "sideEffects": false,
+  "scripts": {
+    "build": "npm run prepare",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "prepare": "node ./scripts/transpile-to-esm.js",
+    "size": "size-limit",
+    "test": "tap",
+    "snap": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write .",
+    "typedoc": "typedoc ./index.d.ts"
+  },
+  "type": "commonjs",
+  "main": "./index.js",
+  "module": "./index.mjs",
+  "types": "./index.d.ts",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./index.d.ts",
+        "default": "./index.mjs"
+      },
+      "require": {
+        "types": "./index.d.ts",
+        "default": "./index.js"
+      }
+    },
+    "./package.json": "./package.json"
+  },
+  "repository": "git://github.com/isaacs/node-lru-cache.git",
+  "devDependencies": {
+    "@size-limit/preset-small-lib": "^7.0.8",
+    "@types/node": "^17.0.31",
+    "@types/tap": "^15.0.6",
+    "benchmark": "^2.1.4",
+    "c8": "^7.11.2",
+    "clock-mock": "^1.0.6",
+    "eslint-config-prettier": "^8.5.0",
+    "prettier": "^2.6.2",
+    "size-limit": "^7.0.8",
+    "tap": "^16.3.4",
+    "ts-node": "^10.7.0",
+    "tslib": "^2.4.0",
+    "typedoc": "^0.23.24",
+    "typescript": "^4.6.4"
+  },
+  "license": "ISC",
+  "files": [
+    "index.js",
+    "index.mjs",
+    "index.d.ts"
+  ],
+  "engines": {
+    "node": ">=12"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 70,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "tap": {
+    "nyc-arg": [
+      "--include=index.js"
+    ],
+    "node-arg": [
+      "--expose-gc",
+      "--require",
+      "ts-node/register"
+    ],
+    "ts": false
+  },
+  "size-limit": [
+    {
+      "path": "./index.js"
+    }
+  ]
+}
diff --git a/package-lock.json b/package-lock.json
index f6716731f65bf..c9990ac4f6087 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -2353,15 +2353,6 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/@npmcli/agent/node_modules/lru-cache": {
-      "version": "10.0.1",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz",
-      "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==",
-      "inBundle": true,
-      "engines": {
-        "node": "14 || >=16.14"
-      }
-    },
     "node_modules/@npmcli/arborist": {
       "resolved": "workspaces/arborist",
       "link": true
@@ -2437,15 +2428,6 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/@npmcli/git/node_modules/lru-cache": {
-      "version": "10.0.1",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz",
-      "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==",
-      "inBundle": true,
-      "engines": {
-        "node": "14 || >=16.14"
-      }
-    },
     "node_modules/@npmcli/installed-package-contents": {
       "version": "2.0.2",
       "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-2.0.2.tgz",
@@ -2513,6 +2495,14 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache": {
+      "version": "7.18.3",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
+      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
+      "engines": {
+        "node": ">=12"
+      }
+    },
     "node_modules/@npmcli/mock-globals": {
       "resolved": "mock-globals",
       "link": true
@@ -2706,6 +2696,15 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/lru-cache": {
+      "version": "7.18.3",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
+      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
+      "dev": true,
+      "engines": {
+        "node": ">=12"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/npm-package-arg": {
       "version": "10.1.0",
       "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
@@ -3635,15 +3634,6 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/cacache/node_modules/lru-cache": {
-      "version": "10.0.1",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz",
-      "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==",
-      "inBundle": true,
-      "engines": {
-        "node": "14 || >=16.14"
-      }
-    },
     "node_modules/caching-transform": {
       "version": "4.0.0",
       "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz",
@@ -6536,15 +6526,6 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/hosted-git-info/node_modules/lru-cache": {
-      "version": "10.0.1",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz",
-      "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==",
-      "inBundle": true,
-      "engines": {
-        "node": "14 || >=16.14"
-      }
-    },
     "node_modules/html-encoding-sniffer": {
       "version": "3.0.0",
       "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-3.0.0.tgz",
@@ -6791,6 +6772,15 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/init-package-json/node_modules/lru-cache": {
+      "version": "7.18.3",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
+      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
+      "inBundle": true,
+      "engines": {
+        "node": ">=12"
+      }
+    },
     "node_modules/init-package-json/node_modules/npm-package-arg": {
       "version": "10.1.0",
       "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
@@ -8004,12 +7994,12 @@
       }
     },
     "node_modules/lru-cache": {
-      "version": "7.18.3",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
-      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
+      "version": "10.0.1",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz",
+      "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==",
       "inBundle": true,
       "engines": {
-        "node": ">=12"
+        "node": "14 || >=16.14"
       }
     },
     "node_modules/make-dir": {
@@ -9598,6 +9588,15 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
+    "node_modules/node-gyp/node_modules/lru-cache": {
+      "version": "7.18.3",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
+      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
+      "inBundle": true,
+      "engines": {
+        "node": ">=12"
+      }
+    },
     "node_modules/node-gyp/node_modules/make-fetch-happen": {
       "version": "11.1.1",
       "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz",
@@ -9780,6 +9779,15 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/normalize-package-data/node_modules/lru-cache": {
+      "version": "7.18.3",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
+      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
+      "inBundle": true,
+      "engines": {
+        "node": ">=12"
+      }
+    },
     "node_modules/normalize-path": {
       "version": "3.0.0",
       "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
@@ -9891,6 +9899,15 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/npm-pick-manifest/node_modules/lru-cache": {
+      "version": "7.18.3",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
+      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
+      "inBundle": true,
+      "engines": {
+        "node": ">=12"
+      }
+    },
     "node_modules/npm-pick-manifest/node_modules/npm-package-arg": {
       "version": "10.1.0",
       "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
@@ -9985,6 +10002,15 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/npm-registry-fetch/node_modules/lru-cache": {
+      "version": "7.18.3",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
+      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
+      "inBundle": true,
+      "engines": {
+        "node": ">=12"
+      }
+    },
     "node_modules/npm-registry-fetch/node_modules/make-fetch-happen": {
       "version": "12.0.0",
       "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-12.0.0.tgz",
@@ -10641,6 +10667,15 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/pacote/node_modules/lru-cache": {
+      "version": "7.18.3",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
+      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
+      "inBundle": true,
+      "engines": {
+        "node": ">=12"
+      }
+    },
     "node_modules/pacote/node_modules/npm-package-arg": {
       "version": "10.1.0",
       "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
@@ -10791,15 +10826,6 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
-    "node_modules/path-scurry/node_modules/lru-cache": {
-      "version": "10.0.1",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz",
-      "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==",
-      "inBundle": true,
-      "engines": {
-        "node": "14 || >=16.14"
-      }
-    },
     "node_modules/path-type": {
       "version": "4.0.0",
       "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz",
@@ -12036,6 +12062,15 @@
         "node": ">=16 || 14 >=14.17"
       }
     },
+    "node_modules/sigstore/node_modules/lru-cache": {
+      "version": "7.18.3",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
+      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
+      "inBundle": true,
+      "engines": {
+        "node": ">=12"
+      }
+    },
     "node_modules/sigstore/node_modules/make-fetch-happen": {
       "version": "11.1.1",
       "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz",
@@ -15229,6 +15264,15 @@
         "node": ">=16 || 14 >=14.17"
       }
     },
+    "node_modules/tuf-js/node_modules/lru-cache": {
+      "version": "7.18.3",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
+      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
+      "inBundle": true,
+      "engines": {
+        "node": ">=12"
+      }
+    },
     "node_modules/tuf-js/node_modules/make-fetch-happen": {
       "version": "11.1.1",
       "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz",

From c993bfc79ad9b40caf0f028d7302939e27d2198b Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Tue, 15 Aug 2023 13:36:34 -0700
Subject: [PATCH 36/68] deps: npm-pick-manifest@9.0.0

---
 DEPENDENCIES.md                               |   1 -
 node_modules/.gitignore                       |  14 +-
 .../git}/node_modules/hosted-git-info/LICENSE |   0
 .../hosted-git-info/lib/from-url.js           |   0
 .../node_modules/hosted-git-info/lib/hosts.js |   0
 .../node_modules/hosted-git-info/lib/index.js |   0
 .../hosted-git-info/lib/parse-url.js          |   0
 .../node_modules/lru-cache/LICENSE            |   0
 .../node_modules/lru-cache/index.js           |   0
 .../node_modules/lru-cache/index.mjs          |   0
 .../node_modules/lru-cache/package.json       |   0
 .../node_modules/hosted-git-info/package.json |   0
 .../git}/node_modules/npm-package-arg/LICENSE |   0
 .../node_modules/npm-package-arg/lib/npa.js   |   0
 .../node_modules/npm-package-arg/package.json |   0
 .../node_modules/npm-pick-manifest/LICENSE.md |  16 ++
 .../npm-pick-manifest/lib/index.js            | 218 ++++++++++++++++++
 .../npm-pick-manifest/package.json            |  57 +++++
 node_modules/npm-pick-manifest/package.json   |  14 +-
 .../node_modules/npm-pick-manifest/LICENSE.md |  16 ++
 .../npm-pick-manifest/lib/index.js            | 218 ++++++++++++++++++
 .../npm-pick-manifest/package.json            |  57 +++++
 package-lock.json                             | 131 +++++++----
 package.json                                  |   2 +-
 workspaces/arborist/package.json              |   2 +-
 25 files changed, 691 insertions(+), 55 deletions(-)
 rename node_modules/{npm-pick-manifest => @npmcli/git}/node_modules/hosted-git-info/LICENSE (100%)
 rename node_modules/{npm-pick-manifest => @npmcli/git}/node_modules/hosted-git-info/lib/from-url.js (100%)
 rename node_modules/{npm-pick-manifest => @npmcli/git}/node_modules/hosted-git-info/lib/hosts.js (100%)
 rename node_modules/{npm-pick-manifest => @npmcli/git}/node_modules/hosted-git-info/lib/index.js (100%)
 rename node_modules/{npm-pick-manifest => @npmcli/git}/node_modules/hosted-git-info/lib/parse-url.js (100%)
 rename node_modules/{npm-pick-manifest => @npmcli/git/node_modules/hosted-git-info}/node_modules/lru-cache/LICENSE (100%)
 rename node_modules/{npm-pick-manifest => @npmcli/git/node_modules/hosted-git-info}/node_modules/lru-cache/index.js (100%)
 rename node_modules/{npm-pick-manifest => @npmcli/git/node_modules/hosted-git-info}/node_modules/lru-cache/index.mjs (100%)
 rename node_modules/{npm-pick-manifest => @npmcli/git/node_modules/hosted-git-info}/node_modules/lru-cache/package.json (100%)
 rename node_modules/{npm-pick-manifest => @npmcli/git}/node_modules/hosted-git-info/package.json (100%)
 rename node_modules/{npm-pick-manifest => @npmcli/git}/node_modules/npm-package-arg/LICENSE (100%)
 rename node_modules/{npm-pick-manifest => @npmcli/git}/node_modules/npm-package-arg/lib/npa.js (100%)
 rename node_modules/{npm-pick-manifest => @npmcli/git}/node_modules/npm-package-arg/package.json (100%)
 create mode 100644 node_modules/@npmcli/git/node_modules/npm-pick-manifest/LICENSE.md
 create mode 100644 node_modules/@npmcli/git/node_modules/npm-pick-manifest/lib/index.js
 create mode 100644 node_modules/@npmcli/git/node_modules/npm-pick-manifest/package.json
 create mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/LICENSE.md
 create mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/lib/index.js
 create mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/package.json

diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md
index 4a2ff45175b42..324dbb190ca34 100644
--- a/DEPENDENCIES.md
+++ b/DEPENDENCIES.md
@@ -524,7 +524,6 @@ graph LR;
   npm-->libnpmteam;
   npm-->libnpmversion;
   npm-->licensee;
-  npm-->lru-cache;
   npm-->make-fetch-happen;
   npm-->minimatch;
   npm-->minipass-pipeline;
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index e39385838321d..c71880acbc340 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -22,6 +22,14 @@
 !/@npmcli/disparity-colors
 !/@npmcli/fs
 !/@npmcli/git
+!/@npmcli/git/node_modules/
+/@npmcli/git/node_modules/*
+!/@npmcli/git/node_modules/hosted-git-info
+!/@npmcli/git/node_modules/hosted-git-info/node_modules/
+/@npmcli/git/node_modules/hosted-git-info/node_modules/*
+!/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache
+!/@npmcli/git/node_modules/npm-package-arg
+!/@npmcli/git/node_modules/npm-pick-manifest
 !/@npmcli/installed-package-contents
 !/@npmcli/map-workspaces
 !/@npmcli/metavuln-calculator
@@ -218,11 +226,6 @@
 !/npm-package-arg
 !/npm-packlist
 !/npm-pick-manifest
-!/npm-pick-manifest/node_modules/
-/npm-pick-manifest/node_modules/*
-!/npm-pick-manifest/node_modules/hosted-git-info
-!/npm-pick-manifest/node_modules/lru-cache
-!/npm-pick-manifest/node_modules/npm-package-arg
 !/npm-profile
 !/npm-registry-fetch
 !/npm-registry-fetch/node_modules/
@@ -249,6 +252,7 @@
 !/pacote/node_modules/hosted-git-info
 !/pacote/node_modules/lru-cache
 !/pacote/node_modules/npm-package-arg
+!/pacote/node_modules/npm-pick-manifest
 !/parse-conflict-json
 !/path-is-absolute
 !/path-key
diff --git a/node_modules/npm-pick-manifest/node_modules/hosted-git-info/LICENSE b/node_modules/@npmcli/git/node_modules/hosted-git-info/LICENSE
similarity index 100%
rename from node_modules/npm-pick-manifest/node_modules/hosted-git-info/LICENSE
rename to node_modules/@npmcli/git/node_modules/hosted-git-info/LICENSE
diff --git a/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/from-url.js b/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/from-url.js
similarity index 100%
rename from node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/from-url.js
rename to node_modules/@npmcli/git/node_modules/hosted-git-info/lib/from-url.js
diff --git a/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/hosts.js b/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/hosts.js
similarity index 100%
rename from node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/hosts.js
rename to node_modules/@npmcli/git/node_modules/hosted-git-info/lib/hosts.js
diff --git a/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/index.js b/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/index.js
similarity index 100%
rename from node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/index.js
rename to node_modules/@npmcli/git/node_modules/hosted-git-info/lib/index.js
diff --git a/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/parse-url.js
similarity index 100%
rename from node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/parse-url.js
rename to node_modules/@npmcli/git/node_modules/hosted-git-info/lib/parse-url.js
diff --git a/node_modules/npm-pick-manifest/node_modules/lru-cache/LICENSE b/node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/LICENSE
similarity index 100%
rename from node_modules/npm-pick-manifest/node_modules/lru-cache/LICENSE
rename to node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/LICENSE
diff --git a/node_modules/npm-pick-manifest/node_modules/lru-cache/index.js b/node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/index.js
similarity index 100%
rename from node_modules/npm-pick-manifest/node_modules/lru-cache/index.js
rename to node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/index.js
diff --git a/node_modules/npm-pick-manifest/node_modules/lru-cache/index.mjs b/node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/index.mjs
similarity index 100%
rename from node_modules/npm-pick-manifest/node_modules/lru-cache/index.mjs
rename to node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/index.mjs
diff --git a/node_modules/npm-pick-manifest/node_modules/lru-cache/package.json b/node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/package.json
similarity index 100%
rename from node_modules/npm-pick-manifest/node_modules/lru-cache/package.json
rename to node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/package.json
diff --git a/node_modules/npm-pick-manifest/node_modules/hosted-git-info/package.json b/node_modules/@npmcli/git/node_modules/hosted-git-info/package.json
similarity index 100%
rename from node_modules/npm-pick-manifest/node_modules/hosted-git-info/package.json
rename to node_modules/@npmcli/git/node_modules/hosted-git-info/package.json
diff --git a/node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE b/node_modules/@npmcli/git/node_modules/npm-package-arg/LICENSE
similarity index 100%
rename from node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE
rename to node_modules/@npmcli/git/node_modules/npm-package-arg/LICENSE
diff --git a/node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js b/node_modules/@npmcli/git/node_modules/npm-package-arg/lib/npa.js
similarity index 100%
rename from node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js
rename to node_modules/@npmcli/git/node_modules/npm-package-arg/lib/npa.js
diff --git a/node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json b/node_modules/@npmcli/git/node_modules/npm-package-arg/package.json
similarity index 100%
rename from node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json
rename to node_modules/@npmcli/git/node_modules/npm-package-arg/package.json
diff --git a/node_modules/@npmcli/git/node_modules/npm-pick-manifest/LICENSE.md b/node_modules/@npmcli/git/node_modules/npm-pick-manifest/LICENSE.md
new file mode 100644
index 0000000000000..8d28acf866d93
--- /dev/null
+++ b/node_modules/@npmcli/git/node_modules/npm-pick-manifest/LICENSE.md
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/git/node_modules/npm-pick-manifest/lib/index.js b/node_modules/@npmcli/git/node_modules/npm-pick-manifest/lib/index.js
new file mode 100644
index 0000000000000..8dbd2721c8996
--- /dev/null
+++ b/node_modules/@npmcli/git/node_modules/npm-pick-manifest/lib/index.js
@@ -0,0 +1,218 @@
+'use strict'
+
+const npa = require('npm-package-arg')
+const semver = require('semver')
+const { checkEngine } = require('npm-install-checks')
+const normalizeBin = require('npm-normalize-package-bin')
+
+const engineOk = (manifest, npmVersion, nodeVersion) => {
+  try {
+    checkEngine(manifest, npmVersion, nodeVersion)
+    return true
+  } catch (_) {
+    return false
+  }
+}
+
+const isBefore = (verTimes, ver, time) =>
+  !verTimes || !verTimes[ver] || Date.parse(verTimes[ver]) <= time
+
+const avoidSemverOpt = { includePrerelease: true, loose: true }
+const shouldAvoid = (ver, avoid) =>
+  avoid && semver.satisfies(ver, avoid, avoidSemverOpt)
+
+const decorateAvoid = (result, avoid) =>
+  result && shouldAvoid(result.version, avoid)
+    ? { ...result, _shouldAvoid: true }
+    : result
+
+const pickManifest = (packument, wanted, opts) => {
+  const {
+    defaultTag = 'latest',
+    before = null,
+    nodeVersion = process.version,
+    npmVersion = null,
+    includeStaged = false,
+    avoid = null,
+    avoidStrict = false,
+  } = opts
+
+  const { name, time: verTimes } = packument
+  const versions = packument.versions || {}
+
+  if (avoidStrict) {
+    const looseOpts = {
+      ...opts,
+      avoidStrict: false,
+    }
+
+    const result = pickManifest(packument, wanted, looseOpts)
+    if (!result || !result._shouldAvoid) {
+      return result
+    }
+
+    const caret = pickManifest(packument, `^${result.version}`, looseOpts)
+    if (!caret || !caret._shouldAvoid) {
+      return {
+        ...caret,
+        _outsideDependencyRange: true,
+        _isSemVerMajor: false,
+      }
+    }
+
+    const star = pickManifest(packument, '*', looseOpts)
+    if (!star || !star._shouldAvoid) {
+      return {
+        ...star,
+        _outsideDependencyRange: true,
+        _isSemVerMajor: true,
+      }
+    }
+
+    throw Object.assign(new Error(`No avoidable versions for ${name}`), {
+      code: 'ETARGET',
+      name,
+      wanted,
+      avoid,
+      before,
+      versions: Object.keys(versions),
+    })
+  }
+
+  const staged = (includeStaged && packument.stagedVersions &&
+    packument.stagedVersions.versions) || {}
+  const restricted = (packument.policyRestrictions &&
+    packument.policyRestrictions.versions) || {}
+
+  const time = before && verTimes ? +(new Date(before)) : Infinity
+  const spec = npa.resolve(name, wanted || defaultTag)
+  const type = spec.type
+  const distTags = packument['dist-tags'] || {}
+
+  if (type !== 'tag' && type !== 'version' && type !== 'range') {
+    throw new Error('Only tag, version, and range are supported')
+  }
+
+  // if the type is 'tag', and not just the implicit default, then it must
+  // be that exactly, or nothing else will do.
+  if (wanted && type === 'tag') {
+    const ver = distTags[wanted]
+    // if the version in the dist-tags is before the before date, then
+    // we use that.  Otherwise, we get the highest precedence version
+    // prior to the dist-tag.
+    if (isBefore(verTimes, ver, time)) {
+      return decorateAvoid(versions[ver] || staged[ver] || restricted[ver], avoid)
+    } else {
+      return pickManifest(packument, `<=${ver}`, opts)
+    }
+  }
+
+  // similarly, if a specific version, then only that version will do
+  if (wanted && type === 'version') {
+    const ver = semver.clean(wanted, { loose: true })
+    const mani = versions[ver] || staged[ver] || restricted[ver]
+    return isBefore(verTimes, ver, time) ? decorateAvoid(mani, avoid) : null
+  }
+
+  // ok, sort based on our heuristics, and pick the best fit
+  const range = type === 'range' ? wanted : '*'
+
+  // if the range is *, then we prefer the 'latest' if available
+  // but skip this if it should be avoided, in that case we have
+  // to try a little harder.
+  const defaultVer = distTags[defaultTag]
+  if (defaultVer &&
+      (range === '*' || semver.satisfies(defaultVer, range, { loose: true })) &&
+      !shouldAvoid(defaultVer, avoid)) {
+    const mani = versions[defaultVer]
+    if (mani && isBefore(verTimes, defaultVer, time)) {
+      return mani
+    }
+  }
+
+  // ok, actually have to sort the list and take the winner
+  const allEntries = Object.entries(versions)
+    .concat(Object.entries(staged))
+    .concat(Object.entries(restricted))
+    .filter(([ver, mani]) => isBefore(verTimes, ver, time))
+
+  if (!allEntries.length) {
+    throw Object.assign(new Error(`No versions available for ${name}`), {
+      code: 'ENOVERSIONS',
+      name,
+      type,
+      wanted,
+      before,
+      versions: Object.keys(versions),
+    })
+  }
+
+  const sortSemverOpt = { loose: true }
+  const entries = allEntries.filter(([ver, mani]) =>
+    semver.satisfies(ver, range, { loose: true }))
+    .sort((a, b) => {
+      const [vera, mania] = a
+      const [verb, manib] = b
+      const notavoida = !shouldAvoid(vera, avoid)
+      const notavoidb = !shouldAvoid(verb, avoid)
+      const notrestra = !restricted[a]
+      const notrestrb = !restricted[b]
+      const notstagea = !staged[a]
+      const notstageb = !staged[b]
+      const notdepra = !mania.deprecated
+      const notdeprb = !manib.deprecated
+      const enginea = engineOk(mania, npmVersion, nodeVersion)
+      const engineb = engineOk(manib, npmVersion, nodeVersion)
+      // sort by:
+      // - not an avoided version
+      // - not restricted
+      // - not staged
+      // - not deprecated and engine ok
+      // - engine ok
+      // - not deprecated
+      // - semver
+      return (notavoidb - notavoida) ||
+        (notrestrb - notrestra) ||
+        (notstageb - notstagea) ||
+        ((notdeprb && engineb) - (notdepra && enginea)) ||
+        (engineb - enginea) ||
+        (notdeprb - notdepra) ||
+        semver.rcompare(vera, verb, sortSemverOpt)
+    })
+
+  return decorateAvoid(entries[0] && entries[0][1], avoid)
+}
+
+module.exports = (packument, wanted, opts = {}) => {
+  const mani = pickManifest(packument, wanted, opts)
+  const picked = mani && normalizeBin(mani)
+  const policyRestrictions = packument.policyRestrictions
+  const restricted = (policyRestrictions && policyRestrictions.versions) || {}
+
+  if (picked && !restricted[picked.version]) {
+    return picked
+  }
+
+  const { before = null, defaultTag = 'latest' } = opts
+  const bstr = before ? new Date(before).toLocaleString() : ''
+  const { name } = packument
+  const pckg = `${name}@${wanted}` +
+    (before ? ` with a date before ${bstr}` : '')
+
+  const isForbidden = picked && !!restricted[picked.version]
+  const polMsg = isForbidden ? policyRestrictions.message : ''
+
+  const msg = !isForbidden ? `No matching version found for ${pckg}.`
+    : `Could not download ${pckg} due to policy violations:\n${polMsg}`
+
+  const code = isForbidden ? 'E403' : 'ETARGET'
+  throw Object.assign(new Error(msg), {
+    code,
+    type: npa.resolve(packument.name, wanted).type,
+    wanted,
+    versions: Object.keys(packument.versions ?? {}),
+    name,
+    distTags: packument['dist-tags'],
+    defaultTag,
+  })
+}
diff --git a/node_modules/@npmcli/git/node_modules/npm-pick-manifest/package.json b/node_modules/@npmcli/git/node_modules/npm-pick-manifest/package.json
new file mode 100644
index 0000000000000..feff81f5b2fee
--- /dev/null
+++ b/node_modules/@npmcli/git/node_modules/npm-pick-manifest/package.json
@@ -0,0 +1,57 @@
+{
+  "name": "npm-pick-manifest",
+  "version": "8.0.2",
+  "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.",
+  "main": "./lib",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "coverage": "tap",
+    "lint": "eslint \"**/*.js\"",
+    "test": "tap",
+    "posttest": "npm run lint",
+    "postlint": "template-oss-check",
+    "lintfix": "npm run lint -- --fix",
+    "snap": "tap",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/npm-pick-manifest.git"
+  },
+  "keywords": [
+    "npm",
+    "semver",
+    "package manager"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "dependencies": {
+    "npm-install-checks": "^6.0.0",
+    "npm-normalize-package-bin": "^3.0.0",
+    "npm-package-arg": "^10.0.0",
+    "semver": "^7.3.5"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "tap": "^16.0.1"
+  },
+  "tap": {
+    "check-coverage": true,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.18.0",
+    "publish": true
+  }
+}
diff --git a/node_modules/npm-pick-manifest/package.json b/node_modules/npm-pick-manifest/package.json
index feff81f5b2fee..e30c2cfe341fc 100644
--- a/node_modules/npm-pick-manifest/package.json
+++ b/node_modules/npm-pick-manifest/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-pick-manifest",
-  "version": "8.0.2",
+  "version": "9.0.0",
   "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.",
   "main": "./lib",
   "files": [
@@ -31,7 +31,7 @@
   "dependencies": {
     "npm-install-checks": "^6.0.0",
     "npm-normalize-package-bin": "^3.0.0",
-    "npm-package-arg": "^10.0.0",
+    "npm-package-arg": "^11.0.0",
     "semver": "^7.3.5"
   },
   "devDependencies": {
@@ -47,11 +47,17 @@
     ]
   },
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "version": "4.18.0",
-    "publish": true
+    "publish": true,
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ]
   }
 }
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/LICENSE.md b/node_modules/pacote/node_modules/npm-pick-manifest/LICENSE.md
new file mode 100644
index 0000000000000..8d28acf866d93
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-pick-manifest/LICENSE.md
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/lib/index.js b/node_modules/pacote/node_modules/npm-pick-manifest/lib/index.js
new file mode 100644
index 0000000000000..8dbd2721c8996
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-pick-manifest/lib/index.js
@@ -0,0 +1,218 @@
+'use strict'
+
+const npa = require('npm-package-arg')
+const semver = require('semver')
+const { checkEngine } = require('npm-install-checks')
+const normalizeBin = require('npm-normalize-package-bin')
+
+const engineOk = (manifest, npmVersion, nodeVersion) => {
+  try {
+    checkEngine(manifest, npmVersion, nodeVersion)
+    return true
+  } catch (_) {
+    return false
+  }
+}
+
+const isBefore = (verTimes, ver, time) =>
+  !verTimes || !verTimes[ver] || Date.parse(verTimes[ver]) <= time
+
+const avoidSemverOpt = { includePrerelease: true, loose: true }
+const shouldAvoid = (ver, avoid) =>
+  avoid && semver.satisfies(ver, avoid, avoidSemverOpt)
+
+const decorateAvoid = (result, avoid) =>
+  result && shouldAvoid(result.version, avoid)
+    ? { ...result, _shouldAvoid: true }
+    : result
+
+const pickManifest = (packument, wanted, opts) => {
+  const {
+    defaultTag = 'latest',
+    before = null,
+    nodeVersion = process.version,
+    npmVersion = null,
+    includeStaged = false,
+    avoid = null,
+    avoidStrict = false,
+  } = opts
+
+  const { name, time: verTimes } = packument
+  const versions = packument.versions || {}
+
+  if (avoidStrict) {
+    const looseOpts = {
+      ...opts,
+      avoidStrict: false,
+    }
+
+    const result = pickManifest(packument, wanted, looseOpts)
+    if (!result || !result._shouldAvoid) {
+      return result
+    }
+
+    const caret = pickManifest(packument, `^${result.version}`, looseOpts)
+    if (!caret || !caret._shouldAvoid) {
+      return {
+        ...caret,
+        _outsideDependencyRange: true,
+        _isSemVerMajor: false,
+      }
+    }
+
+    const star = pickManifest(packument, '*', looseOpts)
+    if (!star || !star._shouldAvoid) {
+      return {
+        ...star,
+        _outsideDependencyRange: true,
+        _isSemVerMajor: true,
+      }
+    }
+
+    throw Object.assign(new Error(`No avoidable versions for ${name}`), {
+      code: 'ETARGET',
+      name,
+      wanted,
+      avoid,
+      before,
+      versions: Object.keys(versions),
+    })
+  }
+
+  const staged = (includeStaged && packument.stagedVersions &&
+    packument.stagedVersions.versions) || {}
+  const restricted = (packument.policyRestrictions &&
+    packument.policyRestrictions.versions) || {}
+
+  const time = before && verTimes ? +(new Date(before)) : Infinity
+  const spec = npa.resolve(name, wanted || defaultTag)
+  const type = spec.type
+  const distTags = packument['dist-tags'] || {}
+
+  if (type !== 'tag' && type !== 'version' && type !== 'range') {
+    throw new Error('Only tag, version, and range are supported')
+  }
+
+  // if the type is 'tag', and not just the implicit default, then it must
+  // be that exactly, or nothing else will do.
+  if (wanted && type === 'tag') {
+    const ver = distTags[wanted]
+    // if the version in the dist-tags is before the before date, then
+    // we use that.  Otherwise, we get the highest precedence version
+    // prior to the dist-tag.
+    if (isBefore(verTimes, ver, time)) {
+      return decorateAvoid(versions[ver] || staged[ver] || restricted[ver], avoid)
+    } else {
+      return pickManifest(packument, `<=${ver}`, opts)
+    }
+  }
+
+  // similarly, if a specific version, then only that version will do
+  if (wanted && type === 'version') {
+    const ver = semver.clean(wanted, { loose: true })
+    const mani = versions[ver] || staged[ver] || restricted[ver]
+    return isBefore(verTimes, ver, time) ? decorateAvoid(mani, avoid) : null
+  }
+
+  // ok, sort based on our heuristics, and pick the best fit
+  const range = type === 'range' ? wanted : '*'
+
+  // if the range is *, then we prefer the 'latest' if available
+  // but skip this if it should be avoided, in that case we have
+  // to try a little harder.
+  const defaultVer = distTags[defaultTag]
+  if (defaultVer &&
+      (range === '*' || semver.satisfies(defaultVer, range, { loose: true })) &&
+      !shouldAvoid(defaultVer, avoid)) {
+    const mani = versions[defaultVer]
+    if (mani && isBefore(verTimes, defaultVer, time)) {
+      return mani
+    }
+  }
+
+  // ok, actually have to sort the list and take the winner
+  const allEntries = Object.entries(versions)
+    .concat(Object.entries(staged))
+    .concat(Object.entries(restricted))
+    .filter(([ver, mani]) => isBefore(verTimes, ver, time))
+
+  if (!allEntries.length) {
+    throw Object.assign(new Error(`No versions available for ${name}`), {
+      code: 'ENOVERSIONS',
+      name,
+      type,
+      wanted,
+      before,
+      versions: Object.keys(versions),
+    })
+  }
+
+  const sortSemverOpt = { loose: true }
+  const entries = allEntries.filter(([ver, mani]) =>
+    semver.satisfies(ver, range, { loose: true }))
+    .sort((a, b) => {
+      const [vera, mania] = a
+      const [verb, manib] = b
+      const notavoida = !shouldAvoid(vera, avoid)
+      const notavoidb = !shouldAvoid(verb, avoid)
+      const notrestra = !restricted[a]
+      const notrestrb = !restricted[b]
+      const notstagea = !staged[a]
+      const notstageb = !staged[b]
+      const notdepra = !mania.deprecated
+      const notdeprb = !manib.deprecated
+      const enginea = engineOk(mania, npmVersion, nodeVersion)
+      const engineb = engineOk(manib, npmVersion, nodeVersion)
+      // sort by:
+      // - not an avoided version
+      // - not restricted
+      // - not staged
+      // - not deprecated and engine ok
+      // - engine ok
+      // - not deprecated
+      // - semver
+      return (notavoidb - notavoida) ||
+        (notrestrb - notrestra) ||
+        (notstageb - notstagea) ||
+        ((notdeprb && engineb) - (notdepra && enginea)) ||
+        (engineb - enginea) ||
+        (notdeprb - notdepra) ||
+        semver.rcompare(vera, verb, sortSemverOpt)
+    })
+
+  return decorateAvoid(entries[0] && entries[0][1], avoid)
+}
+
+module.exports = (packument, wanted, opts = {}) => {
+  const mani = pickManifest(packument, wanted, opts)
+  const picked = mani && normalizeBin(mani)
+  const policyRestrictions = packument.policyRestrictions
+  const restricted = (policyRestrictions && policyRestrictions.versions) || {}
+
+  if (picked && !restricted[picked.version]) {
+    return picked
+  }
+
+  const { before = null, defaultTag = 'latest' } = opts
+  const bstr = before ? new Date(before).toLocaleString() : ''
+  const { name } = packument
+  const pckg = `${name}@${wanted}` +
+    (before ? ` with a date before ${bstr}` : '')
+
+  const isForbidden = picked && !!restricted[picked.version]
+  const polMsg = isForbidden ? policyRestrictions.message : ''
+
+  const msg = !isForbidden ? `No matching version found for ${pckg}.`
+    : `Could not download ${pckg} due to policy violations:\n${polMsg}`
+
+  const code = isForbidden ? 'E403' : 'ETARGET'
+  throw Object.assign(new Error(msg), {
+    code,
+    type: npa.resolve(packument.name, wanted).type,
+    wanted,
+    versions: Object.keys(packument.versions ?? {}),
+    name,
+    distTags: packument['dist-tags'],
+    defaultTag,
+  })
+}
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/package.json b/node_modules/pacote/node_modules/npm-pick-manifest/package.json
new file mode 100644
index 0000000000000..feff81f5b2fee
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-pick-manifest/package.json
@@ -0,0 +1,57 @@
+{
+  "name": "npm-pick-manifest",
+  "version": "8.0.2",
+  "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.",
+  "main": "./lib",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "coverage": "tap",
+    "lint": "eslint \"**/*.js\"",
+    "test": "tap",
+    "posttest": "npm run lint",
+    "postlint": "template-oss-check",
+    "lintfix": "npm run lint -- --fix",
+    "snap": "tap",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/npm-pick-manifest.git"
+  },
+  "keywords": [
+    "npm",
+    "semver",
+    "package manager"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "dependencies": {
+    "npm-install-checks": "^6.0.0",
+    "npm-normalize-package-bin": "^3.0.0",
+    "npm-package-arg": "^10.0.0",
+    "semver": "^7.3.5"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "tap": "^16.0.1"
+  },
+  "tap": {
+    "check-coverage": true,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.18.0",
+    "publish": true
+  }
+}
diff --git a/package-lock.json b/package-lock.json
index c9990ac4f6087..693e245a4e45b 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -132,7 +132,7 @@
         "npm-audit-report": "^5.0.0",
         "npm-install-checks": "^6.2.0",
         "npm-package-arg": "^11.0.0",
-        "npm-pick-manifest": "^8.0.2",
+        "npm-pick-manifest": "^9.0.0",
         "npm-profile": "^8.0.0",
         "npm-registry-fetch": "^15.0.0",
         "npm-user-validate": "^2.0.0",
@@ -2428,6 +2428,57 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/git/node_modules/hosted-git-info": {
+      "version": "6.1.1",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
+      "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
+      "inBundle": true,
+      "dependencies": {
+        "lru-cache": "^7.5.1"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache": {
+      "version": "7.18.3",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
+      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
+      "inBundle": true,
+      "engines": {
+        "node": ">=12"
+      }
+    },
+    "node_modules/@npmcli/git/node_modules/npm-package-arg": {
+      "version": "10.1.0",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
+      "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==",
+      "inBundle": true,
+      "dependencies": {
+        "hosted-git-info": "^6.0.0",
+        "proc-log": "^3.0.0",
+        "semver": "^7.3.5",
+        "validate-npm-package-name": "^5.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/@npmcli/git/node_modules/npm-pick-manifest": {
+      "version": "8.0.2",
+      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-8.0.2.tgz",
+      "integrity": "sha512-1dKY+86/AIiq1tkKVD3l0WI+Gd3vkknVGAggsFeBkTvbhMQ1OND/LKkYv4JtXPKUJ8bOTCyLiqEg2P6QNdK+Gg==",
+      "inBundle": true,
+      "dependencies": {
+        "npm-install-checks": "^6.0.0",
+        "npm-normalize-package-bin": "^3.0.0",
+        "npm-package-arg": "^10.0.0",
+        "semver": "^7.3.5"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/installed-package-contents": {
       "version": "2.0.2",
       "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-2.0.2.tgz",
@@ -2720,6 +2771,21 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest": {
+      "version": "8.0.2",
+      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-8.0.2.tgz",
+      "integrity": "sha512-1dKY+86/AIiq1tkKVD3l0WI+Gd3vkknVGAggsFeBkTvbhMQ1OND/LKkYv4JtXPKUJ8bOTCyLiqEg2P6QNdK+Gg==",
+      "dev": true,
+      "dependencies": {
+        "npm-install-checks": "^6.0.0",
+        "npm-normalize-package-bin": "^3.0.0",
+        "npm-package-arg": "^10.0.0",
+        "semver": "^7.3.5"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@octokit/auth-token": {
       "version": "3.0.3",
       "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-3.0.3.tgz",
@@ -9873,54 +9939,18 @@
       }
     },
     "node_modules/npm-pick-manifest": {
-      "version": "8.0.2",
-      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-8.0.2.tgz",
-      "integrity": "sha512-1dKY+86/AIiq1tkKVD3l0WI+Gd3vkknVGAggsFeBkTvbhMQ1OND/LKkYv4JtXPKUJ8bOTCyLiqEg2P6QNdK+Gg==",
+      "version": "9.0.0",
+      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-9.0.0.tgz",
+      "integrity": "sha512-VfvRSs/b6n9ol4Qb+bDwNGUXutpy76x6MARw/XssevE0TnctIKcmklJZM5Z7nqs5z5aW+0S63pgCNbpkUNNXBg==",
       "inBundle": true,
       "dependencies": {
         "npm-install-checks": "^6.0.0",
         "npm-normalize-package-bin": "^3.0.0",
-        "npm-package-arg": "^10.0.0",
+        "npm-package-arg": "^11.0.0",
         "semver": "^7.3.5"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/npm-pick-manifest/node_modules/hosted-git-info": {
-      "version": "6.1.1",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
-      "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
-      "inBundle": true,
-      "dependencies": {
-        "lru-cache": "^7.5.1"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/npm-pick-manifest/node_modules/lru-cache": {
-      "version": "7.18.3",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
-      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
-      "inBundle": true,
-      "engines": {
-        "node": ">=12"
-      }
-    },
-    "node_modules/npm-pick-manifest/node_modules/npm-package-arg": {
-      "version": "10.1.0",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
-      "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==",
-      "inBundle": true,
-      "dependencies": {
-        "hosted-git-info": "^6.0.0",
-        "proc-log": "^3.0.0",
-        "semver": "^7.3.5",
-        "validate-npm-package-name": "^5.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "node_modules/npm-profile": {
@@ -10691,6 +10721,21 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/pacote/node_modules/npm-pick-manifest": {
+      "version": "8.0.2",
+      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-8.0.2.tgz",
+      "integrity": "sha512-1dKY+86/AIiq1tkKVD3l0WI+Gd3vkknVGAggsFeBkTvbhMQ1OND/LKkYv4JtXPKUJ8bOTCyLiqEg2P6QNdK+Gg==",
+      "inBundle": true,
+      "dependencies": {
+        "npm-install-checks": "^6.0.0",
+        "npm-normalize-package-bin": "^3.0.0",
+        "npm-package-arg": "^10.0.0",
+        "semver": "^7.3.5"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/parent-module": {
       "version": "1.0.1",
       "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
@@ -16284,7 +16329,7 @@
         "nopt": "^7.0.0",
         "npm-install-checks": "^6.2.0",
         "npm-package-arg": "^11.0.0",
-        "npm-pick-manifest": "^8.0.2",
+        "npm-pick-manifest": "^9.0.0",
         "npm-registry-fetch": "^15.0.0",
         "npmlog": "^7.0.1",
         "pacote": "^16.0.0",
diff --git a/package.json b/package.json
index 6c5504b8f3520..2cc6dcb9e33f1 100644
--- a/package.json
+++ b/package.json
@@ -97,7 +97,7 @@
     "npm-audit-report": "^5.0.0",
     "npm-install-checks": "^6.2.0",
     "npm-package-arg": "^11.0.0",
-    "npm-pick-manifest": "^8.0.2",
+    "npm-pick-manifest": "^9.0.0",
     "npm-profile": "^8.0.0",
     "npm-registry-fetch": "^15.0.0",
     "npm-user-validate": "^2.0.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index c05be0d03659d..7a134ec5d4c91 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -23,7 +23,7 @@
     "nopt": "^7.0.0",
     "npm-install-checks": "^6.2.0",
     "npm-package-arg": "^11.0.0",
-    "npm-pick-manifest": "^8.0.2",
+    "npm-pick-manifest": "^9.0.0",
     "npm-registry-fetch": "^15.0.0",
     "npmlog": "^7.0.1",
     "pacote": "^16.0.0",

From 4b70fc05a6e07871d05153821366424eee53b7dc Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Tue, 15 Aug 2023 13:39:16 -0700
Subject: [PATCH 37/68] deps: pacote@17.0.0

---
 mock-registry/package.json                    |   2 +-
 node_modules/.gitignore                       |  19 +-
 .../node_modules/@npmcli/git/LICENSE          |   0
 .../node_modules/@npmcli/git/lib/clone.js     |   0
 .../node_modules/@npmcli/git/lib/errors.js    |   0
 .../node_modules/@npmcli/git/lib/find.js      |   0
 .../node_modules/@npmcli/git/lib/index.js     |   0
 .../node_modules/@npmcli/git/lib/is-clean.js  |   0
 .../node_modules/@npmcli/git/lib/is.js        |   0
 .../@npmcli/git/lib/lines-to-revs.js          |   0
 .../@npmcli/git/lib/make-error.js             |   0
 .../node_modules/@npmcli/git/lib/opts.js      |   0
 .../node_modules/@npmcli/git/lib/revs.js      |   0
 .../node_modules/@npmcli/git/lib/spawn.js     |   0
 .../node_modules/@npmcli/git/lib/utils.js     |   0
 .../node_modules/@npmcli/git/lib/which.js     |   0
 .../node_modules/@npmcli/git/package.json     |   0
 .../node_modules/hosted-git-info/LICENSE      |   0
 .../hosted-git-info/lib/from-url.js           |   0
 .../node_modules/hosted-git-info/lib/hosts.js |   0
 .../node_modules/hosted-git-info/lib/index.js |   0
 .../hosted-git-info/lib/parse-url.js          |   0
 .../node_modules/hosted-git-info/package.json |   0
 .../node_modules/npm-package-arg/LICENSE      |   0
 .../node_modules/npm-package-arg/lib/npa.js   |   0
 .../node_modules/npm-package-arg/package.json |   0
 .../npm-pick-manifest}/LICENSE.md             |   0
 .../npm-pick-manifest/lib/index.js            | 218 +++++++
 .../npm-pick-manifest/package.json            |  57 ++
 .../node_modules/pacote/LICENSE               |  15 +
 .../node_modules/pacote/lib/bin.js            | 158 +++++
 .../node_modules/pacote/lib/dir.js            | 108 ++++
 .../node_modules/pacote/lib/fetcher.js        | 505 +++++++++++++++
 .../node_modules/pacote/lib/file.js           |  96 +++
 .../node_modules/pacote/lib/git.js            | 327 ++++++++++
 .../node_modules/pacote/lib/index.js          |  23 +
 .../node_modules/pacote/lib/registry.js       | 344 ++++++++++
 .../node_modules/pacote/lib/remote.js         |  91 +++
 .../pacote/lib/util/add-git-sha.js            |  15 +
 .../node_modules/pacote/lib/util/cache-dir.js |  15 +
 .../pacote/lib/util/is-package-bin.js         |  25 +
 .../node_modules/pacote/lib/util/npm.js       |  14 +
 .../pacote/lib/util/tar-create-options.js     |  31 +
 .../pacote/lib/util/trailing-slashes.js       |  10 +
 .../node_modules/pacote/package.json          |  85 +++
 .../node_modules/cacache/lib/content/path.js  |  29 -
 .../node_modules/cacache/lib/content/read.js  | 166 -----
 .../node_modules/cacache/lib/content/rm.js    |  18 -
 .../node_modules/cacache/lib/content/write.js | 205 ------
 .../node_modules/cacache/lib/entry-index.js   | 330 ----------
 .../pacote/node_modules/cacache/lib/get.js    | 170 -----
 .../pacote/node_modules/cacache/lib/index.js  |  42 --
 .../node_modules/cacache/lib/memoization.js   |  72 ---
 .../pacote/node_modules/cacache/lib/put.js    |  80 ---
 .../pacote/node_modules/cacache/lib/rm.js     |  31 -
 .../node_modules/cacache/lib/util/glob.js     |   7 -
 .../cacache/lib/util/hash-to-segments.js      |   7 -
 .../node_modules/cacache/lib/util/tmp.js      |  26 -
 .../pacote/node_modules/cacache/lib/verify.js | 257 --------
 .../pacote/node_modules/cacache/package.json  |  82 ---
 .../normalize-package-data/LICENSE            |  15 +
 .../lib/extract_description.js                |  24 +
 .../normalize-package-data/lib/fixer.js       | 475 ++++++++++++++
 .../lib/make_warning.js                       |  22 +
 .../normalize-package-data/lib/normalize.js   |  48 ++
 .../normalize-package-data/lib/safe_format.js |  11 +
 .../normalize-package-data/lib/typos.json     |  25 +
 .../lib/warning_messages.json                 |  30 +
 .../normalize-package-data/package.json       |  62 ++
 .../node_modules/hosted-git-info/LICENSE      |  13 +
 .../hosted-git-info/lib/from-url.js           | 122 ++++
 .../node_modules/hosted-git-info/lib/hosts.js | 228 +++++++
 .../node_modules/hosted-git-info/lib/index.js | 179 ++++++
 .../hosted-git-info/lib/parse-url.js          |  78 +++
 .../node_modules/hosted-git-info/package.json |  59 ++
 .../node_modules/npm-package-arg/LICENSE      |  15 +
 .../node_modules/npm-package-arg/lib/npa.js   | 431 +++++++++++++
 .../node_modules/npm-package-arg/package.json |  59 ++
 .../node_modules/read-package-json/LICENSE    |  15 +
 .../read-package-json/lib/read-json.js        | 589 ++++++++++++++++++
 .../read-package-json/package.json            |  65 ++
 node_modules/pacote/package.json              |  16 +-
 package-lock.json                             | 200 ++++--
 package.json                                  |   2 +-
 workspaces/arborist/package.json              |   2 +-
 workspaces/libnpmdiff/package.json            |   2 +-
 workspaces/libnpmexec/package.json            |   2 +-
 workspaces/libnpmpack/package.json            |   2 +-
 88 files changed, 4867 insertions(+), 1604 deletions(-)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/git/LICENSE (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/git/lib/clone.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/git/lib/errors.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/git/lib/find.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/git/lib/index.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/git/lib/is-clean.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/git/lib/is.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/git/lib/lines-to-revs.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/git/lib/make-error.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/git/lib/opts.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/git/lib/revs.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/git/lib/spawn.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/git/lib/utils.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/git/lib/which.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/@npmcli/git/package.json (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/hosted-git-info/LICENSE (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/hosted-git-info/lib/from-url.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/hosted-git-info/lib/hosts.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/hosted-git-info/lib/index.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/hosted-git-info/lib/parse-url.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/hosted-git-info/package.json (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-package-arg/LICENSE (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-package-arg/lib/npa.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/npm-package-arg/package.json (100%)
 rename node_modules/{pacote/node_modules/cacache => @npmcli/metavuln-calculator/node_modules/npm-pick-manifest}/LICENSE.md (100%)
 create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/lib/index.js
 create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/package.json
 create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE
 create mode 100755 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/bin.js
 create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js
 create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js
 create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js
 create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js
 create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js
 create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js
 create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js
 create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js
 create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js
 create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js
 create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js
 create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js
 create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js
 create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json
 delete mode 100644 node_modules/pacote/node_modules/cacache/lib/content/path.js
 delete mode 100644 node_modules/pacote/node_modules/cacache/lib/content/read.js
 delete mode 100644 node_modules/pacote/node_modules/cacache/lib/content/rm.js
 delete mode 100644 node_modules/pacote/node_modules/cacache/lib/content/write.js
 delete mode 100644 node_modules/pacote/node_modules/cacache/lib/entry-index.js
 delete mode 100644 node_modules/pacote/node_modules/cacache/lib/get.js
 delete mode 100644 node_modules/pacote/node_modules/cacache/lib/index.js
 delete mode 100644 node_modules/pacote/node_modules/cacache/lib/memoization.js
 delete mode 100644 node_modules/pacote/node_modules/cacache/lib/put.js
 delete mode 100644 node_modules/pacote/node_modules/cacache/lib/rm.js
 delete mode 100644 node_modules/pacote/node_modules/cacache/lib/util/glob.js
 delete mode 100644 node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js
 delete mode 100644 node_modules/pacote/node_modules/cacache/lib/util/tmp.js
 delete mode 100644 node_modules/pacote/node_modules/cacache/lib/verify.js
 delete mode 100644 node_modules/pacote/node_modules/cacache/package.json
 create mode 100644 node_modules/pacote/node_modules/normalize-package-data/LICENSE
 create mode 100644 node_modules/pacote/node_modules/normalize-package-data/lib/extract_description.js
 create mode 100644 node_modules/pacote/node_modules/normalize-package-data/lib/fixer.js
 create mode 100644 node_modules/pacote/node_modules/normalize-package-data/lib/make_warning.js
 create mode 100644 node_modules/pacote/node_modules/normalize-package-data/lib/normalize.js
 create mode 100644 node_modules/pacote/node_modules/normalize-package-data/lib/safe_format.js
 create mode 100644 node_modules/pacote/node_modules/normalize-package-data/lib/typos.json
 create mode 100644 node_modules/pacote/node_modules/normalize-package-data/lib/warning_messages.json
 create mode 100644 node_modules/pacote/node_modules/normalize-package-data/package.json
 create mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/LICENSE
 create mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/from-url.js
 create mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/hosts.js
 create mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/index.js
 create mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/parse-url.js
 create mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/package.json
 create mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE
 create mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js
 create mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json
 create mode 100644 node_modules/pacote/node_modules/read-package-json/LICENSE
 create mode 100644 node_modules/pacote/node_modules/read-package-json/lib/read-json.js
 create mode 100644 node_modules/pacote/node_modules/read-package-json/package.json

diff --git a/mock-registry/package.json b/mock-registry/package.json
index fa885b2cdb548..5f9598b08e4fb 100644
--- a/mock-registry/package.json
+++ b/mock-registry/package.json
@@ -56,7 +56,7 @@
     "json-stringify-safe": "^5.0.1",
     "nock": "^13.3.0",
     "npm-package-arg": "^11.0.0",
-    "pacote": "^16.0.0",
+    "pacote": "^17.0.0",
     "tap": "^16.3.4"
   }
 }
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index c71880acbc340..7bc0a96e9f6ef 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -35,8 +35,15 @@
 !/@npmcli/metavuln-calculator
 !/@npmcli/metavuln-calculator/node_modules/
 /@npmcli/metavuln-calculator/node_modules/*
+!/@npmcli/metavuln-calculator/node_modules/@npmcli/
+/@npmcli/metavuln-calculator/node_modules/@npmcli/*
+!/@npmcli/metavuln-calculator/node_modules/@npmcli/git
 !/@npmcli/metavuln-calculator/node_modules/cacache
+!/@npmcli/metavuln-calculator/node_modules/hosted-git-info
 !/@npmcli/metavuln-calculator/node_modules/lru-cache
+!/@npmcli/metavuln-calculator/node_modules/npm-package-arg
+!/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest
+!/@npmcli/metavuln-calculator/node_modules/pacote
 !/@npmcli/name-from-folder
 !/@npmcli/node-gyp
 !/@npmcli/package-json
@@ -245,14 +252,14 @@
 !/pacote
 !/pacote/node_modules/
 /pacote/node_modules/*
-!/pacote/node_modules/@npmcli/
-/pacote/node_modules/@npmcli/*
-!/pacote/node_modules/@npmcli/git
-!/pacote/node_modules/cacache
-!/pacote/node_modules/hosted-git-info
 !/pacote/node_modules/lru-cache
-!/pacote/node_modules/npm-package-arg
+!/pacote/node_modules/normalize-package-data
 !/pacote/node_modules/npm-pick-manifest
+!/pacote/node_modules/npm-pick-manifest/node_modules/
+/pacote/node_modules/npm-pick-manifest/node_modules/*
+!/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info
+!/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg
+!/pacote/node_modules/read-package-json
 !/parse-conflict-json
 !/path-is-absolute
 !/path-key
diff --git a/node_modules/pacote/node_modules/@npmcli/git/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/LICENSE
similarity index 100%
rename from node_modules/pacote/node_modules/@npmcli/git/LICENSE
rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/LICENSE
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/clone.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/clone.js
similarity index 100%
rename from node_modules/pacote/node_modules/@npmcli/git/lib/clone.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/clone.js
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/errors.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/errors.js
similarity index 100%
rename from node_modules/pacote/node_modules/@npmcli/git/lib/errors.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/errors.js
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/find.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/find.js
similarity index 100%
rename from node_modules/pacote/node_modules/@npmcli/git/lib/find.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/find.js
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/@npmcli/git/lib/index.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/index.js
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/is-clean.js
similarity index 100%
rename from node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/is-clean.js
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/is.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/is.js
similarity index 100%
rename from node_modules/pacote/node_modules/@npmcli/git/lib/is.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/is.js
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/lines-to-revs.js
similarity index 100%
rename from node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/lines-to-revs.js
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/make-error.js
similarity index 100%
rename from node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/make-error.js
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/opts.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/opts.js
similarity index 100%
rename from node_modules/pacote/node_modules/@npmcli/git/lib/opts.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/opts.js
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/revs.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/revs.js
similarity index 100%
rename from node_modules/pacote/node_modules/@npmcli/git/lib/revs.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/revs.js
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/spawn.js
similarity index 100%
rename from node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/spawn.js
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/utils.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/utils.js
similarity index 100%
rename from node_modules/pacote/node_modules/@npmcli/git/lib/utils.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/utils.js
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/which.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/which.js
similarity index 100%
rename from node_modules/pacote/node_modules/@npmcli/git/lib/which.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/which.js
diff --git a/node_modules/pacote/node_modules/@npmcli/git/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/@npmcli/git/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/package.json
diff --git a/node_modules/pacote/node_modules/hosted-git-info/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/LICENSE
similarity index 100%
rename from node_modules/pacote/node_modules/hosted-git-info/LICENSE
rename to node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/LICENSE
diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/from-url.js b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/from-url.js
similarity index 100%
rename from node_modules/pacote/node_modules/hosted-git-info/lib/from-url.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/from-url.js
diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/hosts.js b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/hosts.js
similarity index 100%
rename from node_modules/pacote/node_modules/hosted-git-info/lib/hosts.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/hosts.js
diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/hosted-git-info/lib/index.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/index.js
diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/parse-url.js
similarity index 100%
rename from node_modules/pacote/node_modules/hosted-git-info/lib/parse-url.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/parse-url.js
diff --git a/node_modules/pacote/node_modules/hosted-git-info/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/hosted-git-info/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/package.json
diff --git a/node_modules/pacote/node_modules/npm-package-arg/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/LICENSE
similarity index 100%
rename from node_modules/pacote/node_modules/npm-package-arg/LICENSE
rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/LICENSE
diff --git a/node_modules/pacote/node_modules/npm-package-arg/lib/npa.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/lib/npa.js
similarity index 100%
rename from node_modules/pacote/node_modules/npm-package-arg/lib/npa.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/lib/npa.js
diff --git a/node_modules/pacote/node_modules/npm-package-arg/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/npm-package-arg/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/package.json
diff --git a/node_modules/pacote/node_modules/cacache/LICENSE.md b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/LICENSE.md
similarity index 100%
rename from node_modules/pacote/node_modules/cacache/LICENSE.md
rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/LICENSE.md
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/lib/index.js
new file mode 100644
index 0000000000000..8dbd2721c8996
--- /dev/null
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/lib/index.js
@@ -0,0 +1,218 @@
+'use strict'
+
+const npa = require('npm-package-arg')
+const semver = require('semver')
+const { checkEngine } = require('npm-install-checks')
+const normalizeBin = require('npm-normalize-package-bin')
+
+const engineOk = (manifest, npmVersion, nodeVersion) => {
+  try {
+    checkEngine(manifest, npmVersion, nodeVersion)
+    return true
+  } catch (_) {
+    return false
+  }
+}
+
+const isBefore = (verTimes, ver, time) =>
+  !verTimes || !verTimes[ver] || Date.parse(verTimes[ver]) <= time
+
+const avoidSemverOpt = { includePrerelease: true, loose: true }
+const shouldAvoid = (ver, avoid) =>
+  avoid && semver.satisfies(ver, avoid, avoidSemverOpt)
+
+const decorateAvoid = (result, avoid) =>
+  result && shouldAvoid(result.version, avoid)
+    ? { ...result, _shouldAvoid: true }
+    : result
+
+const pickManifest = (packument, wanted, opts) => {
+  const {
+    defaultTag = 'latest',
+    before = null,
+    nodeVersion = process.version,
+    npmVersion = null,
+    includeStaged = false,
+    avoid = null,
+    avoidStrict = false,
+  } = opts
+
+  const { name, time: verTimes } = packument
+  const versions = packument.versions || {}
+
+  if (avoidStrict) {
+    const looseOpts = {
+      ...opts,
+      avoidStrict: false,
+    }
+
+    const result = pickManifest(packument, wanted, looseOpts)
+    if (!result || !result._shouldAvoid) {
+      return result
+    }
+
+    const caret = pickManifest(packument, `^${result.version}`, looseOpts)
+    if (!caret || !caret._shouldAvoid) {
+      return {
+        ...caret,
+        _outsideDependencyRange: true,
+        _isSemVerMajor: false,
+      }
+    }
+
+    const star = pickManifest(packument, '*', looseOpts)
+    if (!star || !star._shouldAvoid) {
+      return {
+        ...star,
+        _outsideDependencyRange: true,
+        _isSemVerMajor: true,
+      }
+    }
+
+    throw Object.assign(new Error(`No avoidable versions for ${name}`), {
+      code: 'ETARGET',
+      name,
+      wanted,
+      avoid,
+      before,
+      versions: Object.keys(versions),
+    })
+  }
+
+  const staged = (includeStaged && packument.stagedVersions &&
+    packument.stagedVersions.versions) || {}
+  const restricted = (packument.policyRestrictions &&
+    packument.policyRestrictions.versions) || {}
+
+  const time = before && verTimes ? +(new Date(before)) : Infinity
+  const spec = npa.resolve(name, wanted || defaultTag)
+  const type = spec.type
+  const distTags = packument['dist-tags'] || {}
+
+  if (type !== 'tag' && type !== 'version' && type !== 'range') {
+    throw new Error('Only tag, version, and range are supported')
+  }
+
+  // if the type is 'tag', and not just the implicit default, then it must
+  // be that exactly, or nothing else will do.
+  if (wanted && type === 'tag') {
+    const ver = distTags[wanted]
+    // if the version in the dist-tags is before the before date, then
+    // we use that.  Otherwise, we get the highest precedence version
+    // prior to the dist-tag.
+    if (isBefore(verTimes, ver, time)) {
+      return decorateAvoid(versions[ver] || staged[ver] || restricted[ver], avoid)
+    } else {
+      return pickManifest(packument, `<=${ver}`, opts)
+    }
+  }
+
+  // similarly, if a specific version, then only that version will do
+  if (wanted && type === 'version') {
+    const ver = semver.clean(wanted, { loose: true })
+    const mani = versions[ver] || staged[ver] || restricted[ver]
+    return isBefore(verTimes, ver, time) ? decorateAvoid(mani, avoid) : null
+  }
+
+  // ok, sort based on our heuristics, and pick the best fit
+  const range = type === 'range' ? wanted : '*'
+
+  // if the range is *, then we prefer the 'latest' if available
+  // but skip this if it should be avoided, in that case we have
+  // to try a little harder.
+  const defaultVer = distTags[defaultTag]
+  if (defaultVer &&
+      (range === '*' || semver.satisfies(defaultVer, range, { loose: true })) &&
+      !shouldAvoid(defaultVer, avoid)) {
+    const mani = versions[defaultVer]
+    if (mani && isBefore(verTimes, defaultVer, time)) {
+      return mani
+    }
+  }
+
+  // ok, actually have to sort the list and take the winner
+  const allEntries = Object.entries(versions)
+    .concat(Object.entries(staged))
+    .concat(Object.entries(restricted))
+    .filter(([ver, mani]) => isBefore(verTimes, ver, time))
+
+  if (!allEntries.length) {
+    throw Object.assign(new Error(`No versions available for ${name}`), {
+      code: 'ENOVERSIONS',
+      name,
+      type,
+      wanted,
+      before,
+      versions: Object.keys(versions),
+    })
+  }
+
+  const sortSemverOpt = { loose: true }
+  const entries = allEntries.filter(([ver, mani]) =>
+    semver.satisfies(ver, range, { loose: true }))
+    .sort((a, b) => {
+      const [vera, mania] = a
+      const [verb, manib] = b
+      const notavoida = !shouldAvoid(vera, avoid)
+      const notavoidb = !shouldAvoid(verb, avoid)
+      const notrestra = !restricted[a]
+      const notrestrb = !restricted[b]
+      const notstagea = !staged[a]
+      const notstageb = !staged[b]
+      const notdepra = !mania.deprecated
+      const notdeprb = !manib.deprecated
+      const enginea = engineOk(mania, npmVersion, nodeVersion)
+      const engineb = engineOk(manib, npmVersion, nodeVersion)
+      // sort by:
+      // - not an avoided version
+      // - not restricted
+      // - not staged
+      // - not deprecated and engine ok
+      // - engine ok
+      // - not deprecated
+      // - semver
+      return (notavoidb - notavoida) ||
+        (notrestrb - notrestra) ||
+        (notstageb - notstagea) ||
+        ((notdeprb && engineb) - (notdepra && enginea)) ||
+        (engineb - enginea) ||
+        (notdeprb - notdepra) ||
+        semver.rcompare(vera, verb, sortSemverOpt)
+    })
+
+  return decorateAvoid(entries[0] && entries[0][1], avoid)
+}
+
+module.exports = (packument, wanted, opts = {}) => {
+  const mani = pickManifest(packument, wanted, opts)
+  const picked = mani && normalizeBin(mani)
+  const policyRestrictions = packument.policyRestrictions
+  const restricted = (policyRestrictions && policyRestrictions.versions) || {}
+
+  if (picked && !restricted[picked.version]) {
+    return picked
+  }
+
+  const { before = null, defaultTag = 'latest' } = opts
+  const bstr = before ? new Date(before).toLocaleString() : ''
+  const { name } = packument
+  const pckg = `${name}@${wanted}` +
+    (before ? ` with a date before ${bstr}` : '')
+
+  const isForbidden = picked && !!restricted[picked.version]
+  const polMsg = isForbidden ? policyRestrictions.message : ''
+
+  const msg = !isForbidden ? `No matching version found for ${pckg}.`
+    : `Could not download ${pckg} due to policy violations:\n${polMsg}`
+
+  const code = isForbidden ? 'E403' : 'ETARGET'
+  throw Object.assign(new Error(msg), {
+    code,
+    type: npa.resolve(packument.name, wanted).type,
+    wanted,
+    versions: Object.keys(packument.versions ?? {}),
+    name,
+    distTags: packument['dist-tags'],
+    defaultTag,
+  })
+}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/package.json
new file mode 100644
index 0000000000000..feff81f5b2fee
--- /dev/null
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/package.json
@@ -0,0 +1,57 @@
+{
+  "name": "npm-pick-manifest",
+  "version": "8.0.2",
+  "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.",
+  "main": "./lib",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "coverage": "tap",
+    "lint": "eslint \"**/*.js\"",
+    "test": "tap",
+    "posttest": "npm run lint",
+    "postlint": "template-oss-check",
+    "lintfix": "npm run lint -- --fix",
+    "snap": "tap",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/npm-pick-manifest.git"
+  },
+  "keywords": [
+    "npm",
+    "semver",
+    "package manager"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "dependencies": {
+    "npm-install-checks": "^6.0.0",
+    "npm-normalize-package-bin": "^3.0.0",
+    "npm-package-arg": "^10.0.0",
+    "semver": "^7.3.5"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "tap": "^16.0.1"
+  },
+  "tap": {
+    "check-coverage": true,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.18.0",
+    "publish": true
+  }
+}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE
new file mode 100644
index 0000000000000..a03cd0ed0b338
--- /dev/null
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter, Kat Marchán, npm, Inc., and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/bin.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/bin.js
new file mode 100755
index 0000000000000..f35b62ca71a53
--- /dev/null
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/bin.js
@@ -0,0 +1,158 @@
+#!/usr/bin/env node
+
+const run = conf => {
+  const pacote = require('../')
+  switch (conf._[0]) {
+    case 'resolve':
+    case 'manifest':
+    case 'packument':
+      if (conf._[0] === 'resolve' && conf.long) {
+        return pacote.manifest(conf._[1], conf).then(mani => ({
+          resolved: mani._resolved,
+          integrity: mani._integrity,
+          from: mani._from,
+        }))
+      }
+      return pacote[conf._[0]](conf._[1], conf)
+
+    case 'tarball':
+      if (!conf._[2] || conf._[2] === '-') {
+        return pacote.tarball.stream(conf._[1], stream => {
+          stream.pipe(
+            conf.testStdout ||
+            /* istanbul ignore next */
+            process.stdout
+          )
+          // make sure it resolves something falsey
+          return stream.promise().then(() => {
+            return false
+          })
+        }, conf)
+      } else {
+        return pacote.tarball.file(conf._[1], conf._[2], conf)
+      }
+
+    case 'extract':
+      return pacote.extract(conf._[1], conf._[2], conf)
+
+    default: /* istanbul ignore next */ {
+      throw new Error(`bad command: ${conf._[0]}`)
+    }
+  }
+}
+
+const version = require('../package.json').version
+const usage = () =>
+`Pacote - The JavaScript Package Handler, v${version}
+
+Usage:
+
+  pacote resolve 
+    Resolve a specifier and output the fully resolved target
+    Returns integrity and from if '--long' flag is set.
+
+  pacote manifest 
+    Fetch a manifest and print to stdout
+
+  pacote packument 
+    Fetch a full packument and print to stdout
+
+  pacote tarball  []
+    Fetch a package tarball and save to 
+    If  is missing or '-', the tarball will be streamed to stdout.
+
+  pacote extract  
+    Extract a package to the destination folder.
+
+Configuration values all match the names of configs passed to npm, or
+options passed to Pacote.  Additional flags for this executable:
+
+  --long     Print an object from 'resolve', including integrity and spec.
+  --json     Print result objects as JSON rather than node's default.
+             (This is the default if stdout is not a TTY.)
+  --help -h  Print this helpful text.
+
+For example '--cache=/path/to/folder' will use that folder as the cache.
+`
+
+const shouldJSON = (conf, result) =>
+  conf.json ||
+  !process.stdout.isTTY &&
+  conf.json === undefined &&
+  result &&
+  typeof result === 'object'
+
+const pretty = (conf, result) =>
+  shouldJSON(conf, result) ? JSON.stringify(result, 0, 2) : result
+
+let addedLogListener = false
+const main = args => {
+  const conf = parse(args)
+  if (conf.help || conf.h) {
+    return console.log(usage())
+  }
+
+  if (!addedLogListener) {
+    process.on('log', console.error)
+    addedLogListener = true
+  }
+
+  try {
+    return run(conf)
+      .then(result => result && console.log(pretty(conf, result)))
+      .catch(er => {
+        console.error(er)
+        process.exit(1)
+      })
+  } catch (er) {
+    console.error(er.message)
+    console.error(usage())
+  }
+}
+
+const parseArg = arg => {
+  const split = arg.slice(2).split('=')
+  const k = split.shift()
+  const v = split.join('=')
+  const no = /^no-/.test(k) && !v
+  const key = (no ? k.slice(3) : k)
+    .replace(/^tag$/, 'defaultTag')
+    .replace(/-([a-z])/g, (_, c) => c.toUpperCase())
+  const value = v ? v.replace(/^~/, process.env.HOME) : !no
+  return { key, value }
+}
+
+const parse = args => {
+  const conf = {
+    _: [],
+    cache: process.env.HOME + '/.npm/_cacache',
+  }
+  let dashdash = false
+  args.forEach(arg => {
+    if (dashdash) {
+      conf._.push(arg)
+    } else if (arg === '--') {
+      dashdash = true
+    } else if (arg === '-h') {
+      conf.help = true
+    } else if (/^--/.test(arg)) {
+      const { key, value } = parseArg(arg)
+      conf[key] = value
+    } else {
+      conf._.push(arg)
+    }
+  })
+  return conf
+}
+
+if (module === require.main) {
+  main(process.argv.slice(2))
+} else {
+  module.exports = {
+    main,
+    run,
+    usage,
+    parseArg,
+    parse,
+  }
+}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js
new file mode 100644
index 0000000000000..420afc5802cb2
--- /dev/null
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js
@@ -0,0 +1,108 @@
+const Fetcher = require('./fetcher.js')
+const FileFetcher = require('./file.js')
+const { Minipass } = require('minipass')
+const tarCreateOptions = require('./util/tar-create-options.js')
+const packlist = require('npm-packlist')
+const tar = require('tar')
+const _prepareDir = Symbol('_prepareDir')
+const { resolve } = require('path')
+const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson')
+
+const runScript = require('@npmcli/run-script')
+
+const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
+class DirFetcher extends Fetcher {
+  constructor (spec, opts) {
+    super(spec, opts)
+    // just the fully resolved filename
+    this.resolved = this.spec.fetchSpec
+
+    this.tree = opts.tree || null
+    this.Arborist = opts.Arborist || null
+  }
+
+  // exposes tarCreateOptions as public API
+  static tarCreateOptions (manifest) {
+    return tarCreateOptions(manifest)
+  }
+
+  get types () {
+    return ['directory']
+  }
+
+  [_prepareDir] () {
+    return this.manifest().then(mani => {
+      if (!mani.scripts || !mani.scripts.prepare) {
+        return
+      }
+
+      // we *only* run prepare.
+      // pre/post-pack is run by the npm CLI for publish and pack,
+      // but this function is *also* run when installing git deps
+      const stdio = this.opts.foregroundScripts ? 'inherit' : 'pipe'
+
+      // hide the banner if silent opt is passed in, or if prepare running
+      // in the background.
+      const banner = this.opts.silent ? false : stdio === 'inherit'
+
+      return runScript({
+        pkg: mani,
+        event: 'prepare',
+        path: this.resolved,
+        stdio,
+        banner,
+        env: {
+          npm_package_resolved: this.resolved,
+          npm_package_integrity: this.integrity,
+          npm_package_json: resolve(this.resolved, 'package.json'),
+        },
+      })
+    })
+  }
+
+  [_tarballFromResolved] () {
+    if (!this.tree && !this.Arborist) {
+      throw new Error('DirFetcher requires either a tree or an Arborist constructor to pack')
+    }
+
+    const stream = new Minipass()
+    stream.resolved = this.resolved
+    stream.integrity = this.integrity
+
+    const { prefix, workspaces } = this.opts
+
+    // run the prepare script, get the list of files, and tar it up
+    // pipe to the stream, and proxy errors the chain.
+    this[_prepareDir]()
+      .then(async () => {
+        if (!this.tree) {
+          const arb = new this.Arborist({ path: this.resolved })
+          this.tree = await arb.loadActual()
+        }
+        return packlist(this.tree, { path: this.resolved, prefix, workspaces })
+      })
+      .then(files => tar.c(tarCreateOptions(this.package), files)
+        .on('error', er => stream.emit('error', er)).pipe(stream))
+      .catch(er => stream.emit('error', er))
+    return stream
+  }
+
+  manifest () {
+    if (this.package) {
+      return Promise.resolve(this.package)
+    }
+
+    return this[_readPackageJson](this.resolved + '/package.json')
+      .then(mani => this.package = {
+        ...mani,
+        _integrity: this.integrity && String(this.integrity),
+        _resolved: this.resolved,
+        _from: this.from,
+      })
+  }
+
+  packument () {
+    return FileFetcher.prototype.packument.apply(this)
+  }
+}
+module.exports = DirFetcher
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js
new file mode 100644
index 0000000000000..f961a45c7d346
--- /dev/null
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js
@@ -0,0 +1,505 @@
+// This is the base class that the other fetcher types in lib
+// all descend from.
+// It handles the unpacking and retry logic that is shared among
+// all of the other Fetcher types.
+
+const npa = require('npm-package-arg')
+const ssri = require('ssri')
+const { promisify } = require('util')
+const { basename, dirname } = require('path')
+const tar = require('tar')
+const log = require('proc-log')
+const retry = require('promise-retry')
+const fs = require('fs/promises')
+const fsm = require('fs-minipass')
+const cacache = require('cacache')
+const isPackageBin = require('./util/is-package-bin.js')
+const removeTrailingSlashes = require('./util/trailing-slashes.js')
+const getContents = require('@npmcli/installed-package-contents')
+const readPackageJsonFast = require('read-package-json-fast')
+const readPackageJson = promisify(require('read-package-json'))
+const { Minipass } = require('minipass')
+
+const cacheDir = require('./util/cache-dir.js')
+
+// Private methods.
+// Child classes should not have to override these.
+// Users should never call them.
+const _extract = Symbol('_extract')
+const _mkdir = Symbol('_mkdir')
+const _empty = Symbol('_empty')
+const _toFile = Symbol('_toFile')
+const _tarxOptions = Symbol('_tarxOptions')
+const _entryMode = Symbol('_entryMode')
+const _istream = Symbol('_istream')
+const _assertType = Symbol('_assertType')
+const _tarballFromCache = Symbol('_tarballFromCache')
+const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
+const _cacheFetches = Symbol.for('pacote.Fetcher._cacheFetches')
+const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson')
+
+class FetcherBase {
+  constructor (spec, opts) {
+    if (!opts || typeof opts !== 'object') {
+      throw new TypeError('options object is required')
+    }
+    this.spec = npa(spec, opts.where)
+
+    this.allowGitIgnore = !!opts.allowGitIgnore
+
+    // a bit redundant because presumably the caller already knows this,
+    // but it makes it easier to not have to keep track of the requested
+    // spec when we're dispatching thousands of these at once, and normalizing
+    // is nice.  saveSpec is preferred if set, because it turns stuff like
+    // x/y#committish into github:x/y#committish.  use name@rawSpec for
+    // registry deps so that we turn xyz and xyz@ -> xyz@
+    this.from = this.spec.registry
+      ? `${this.spec.name}@${this.spec.rawSpec}` : this.spec.saveSpec
+
+    this[_assertType]()
+    // clone the opts object so that others aren't upset when we mutate it
+    // by adding/modifying the integrity value.
+    this.opts = { ...opts }
+
+    this.cache = opts.cache || cacheDir().cacache
+    this.tufCache = opts.tufCache || cacheDir().tufcache
+    this.resolved = opts.resolved || null
+
+    // default to caching/verifying with sha512, that's what we usually have
+    // need to change this default, or start overriding it, when sha512
+    // is no longer strong enough.
+    this.defaultIntegrityAlgorithm = opts.defaultIntegrityAlgorithm || 'sha512'
+
+    if (typeof opts.integrity === 'string') {
+      this.opts.integrity = ssri.parse(opts.integrity)
+    }
+
+    this.package = null
+    this.type = this.constructor.name
+    this.fmode = opts.fmode || 0o666
+    this.dmode = opts.dmode || 0o777
+    // we don't need a default umask, because we don't chmod files coming
+    // out of package tarballs.  they're forced to have a mode that is
+    // valid, regardless of what's in the tarball entry, and then we let
+    // the process's umask setting do its job.  but if configured, we do
+    // respect it.
+    this.umask = opts.umask || 0
+
+    this.preferOnline = !!opts.preferOnline
+    this.preferOffline = !!opts.preferOffline
+    this.offline = !!opts.offline
+
+    this.before = opts.before
+    this.fullMetadata = this.before ? true : !!opts.fullMetadata
+    this.fullReadJson = !!opts.fullReadJson
+    if (this.fullReadJson) {
+      this[_readPackageJson] = readPackageJson
+    } else {
+      this[_readPackageJson] = readPackageJsonFast
+    }
+
+    // rrh is a registry hostname or 'never' or 'always'
+    // defaults to registry.npmjs.org
+    this.replaceRegistryHost = (!opts.replaceRegistryHost || opts.replaceRegistryHost === 'npmjs') ?
+      'registry.npmjs.org' : opts.replaceRegistryHost
+
+    this.defaultTag = opts.defaultTag || 'latest'
+    this.registry = removeTrailingSlashes(opts.registry || 'https://registry.npmjs.org')
+
+    // command to run 'prepare' scripts on directories and git dirs
+    // To use pacote with yarn, for example, set npmBin to 'yarn'
+    // and npmCliConfig with yarn's equivalents.
+    this.npmBin = opts.npmBin || 'npm'
+
+    // command to install deps for preparing
+    this.npmInstallCmd = opts.npmInstallCmd || ['install', '--force']
+
+    // XXX fill more of this in based on what we know from this.opts
+    // we explicitly DO NOT fill in --tag, though, since we are often
+    // going to be packing in the context of a publish, which may set
+    // a dist-tag, but certainly wants to keep defaulting to latest.
+    this.npmCliConfig = opts.npmCliConfig || [
+      `--cache=${dirname(this.cache)}`,
+      `--prefer-offline=${!!this.preferOffline}`,
+      `--prefer-online=${!!this.preferOnline}`,
+      `--offline=${!!this.offline}`,
+      ...(this.before ? [`--before=${this.before.toISOString()}`] : []),
+      '--no-progress',
+      '--no-save',
+      '--no-audit',
+      // override any omit settings from the environment
+      '--include=dev',
+      '--include=peer',
+      '--include=optional',
+      // we need the actual things, not just the lockfile
+      '--no-package-lock-only',
+      '--no-dry-run',
+    ]
+  }
+
+  get integrity () {
+    return this.opts.integrity || null
+  }
+
+  set integrity (i) {
+    if (!i) {
+      return
+    }
+
+    i = ssri.parse(i)
+    const current = this.opts.integrity
+
+    // do not ever update an existing hash value, but do
+    // merge in NEW algos and hashes that we don't already have.
+    if (current) {
+      current.merge(i)
+    } else {
+      this.opts.integrity = i
+    }
+  }
+
+  get notImplementedError () {
+    return new Error('not implemented in this fetcher type: ' + this.type)
+  }
+
+  // override in child classes
+  // Returns a Promise that resolves to this.resolved string value
+  resolve () {
+    return this.resolved ? Promise.resolve(this.resolved)
+      : Promise.reject(this.notImplementedError)
+  }
+
+  packument () {
+    return Promise.reject(this.notImplementedError)
+  }
+
+  // override in child class
+  // returns a manifest containing:
+  // - name
+  // - version
+  // - _resolved
+  // - _integrity
+  // - plus whatever else was in there (corgi, full metadata, or pj file)
+  manifest () {
+    return Promise.reject(this.notImplementedError)
+  }
+
+  // private, should be overridden.
+  // Note that they should *not* calculate or check integrity or cache,
+  // but *just*  return the raw tarball data stream.
+  [_tarballFromResolved] () {
+    throw this.notImplementedError
+  }
+
+  // public, should not be overridden
+  tarball () {
+    return this.tarballStream(stream => stream.concat().then(data => {
+      data.integrity = this.integrity && String(this.integrity)
+      data.resolved = this.resolved
+      data.from = this.from
+      return data
+    }))
+  }
+
+  // private
+  // Note: cacache will raise a EINTEGRITY error if the integrity doesn't match
+  [_tarballFromCache] () {
+    return cacache.get.stream.byDigest(this.cache, this.integrity, this.opts)
+  }
+
+  get [_cacheFetches] () {
+    return true
+  }
+
+  [_istream] (stream) {
+    // if not caching this, just return it
+    if (!this.opts.cache || !this[_cacheFetches]) {
+      // instead of creating a new integrity stream, we only piggyback on the
+      // provided stream's events
+      if (stream.hasIntegrityEmitter) {
+        stream.on('integrity', i => this.integrity = i)
+        return stream
+      }
+
+      const istream = ssri.integrityStream(this.opts)
+      istream.on('integrity', i => this.integrity = i)
+      stream.on('error', err => istream.emit('error', err))
+      return stream.pipe(istream)
+    }
+
+    // we have to return a stream that gets ALL the data, and proxies errors,
+    // but then pipe from the original tarball stream into the cache as well.
+    // To do this without losing any data, and since the cacache put stream
+    // is not a passthrough, we have to pipe from the original stream into
+    // the cache AFTER we pipe into the middleStream.  Since the cache stream
+    // has an asynchronous flush to write its contents to disk, we need to
+    // defer the middleStream end until the cache stream ends.
+    const middleStream = new Minipass()
+    stream.on('error', err => middleStream.emit('error', err))
+    stream.pipe(middleStream, { end: false })
+    const cstream = cacache.put.stream(
+      this.opts.cache,
+      `pacote:tarball:${this.from}`,
+      this.opts
+    )
+    cstream.on('integrity', i => this.integrity = i)
+    cstream.on('error', err => stream.emit('error', err))
+    stream.pipe(cstream)
+
+    // eslint-disable-next-line promise/catch-or-return
+    cstream.promise().catch(() => {}).then(() => middleStream.end())
+    return middleStream
+  }
+
+  pickIntegrityAlgorithm () {
+    return this.integrity ? this.integrity.pickAlgorithm(this.opts)
+      : this.defaultIntegrityAlgorithm
+  }
+
+  // TODO: check error class, once those are rolled out to our deps
+  isDataCorruptionError (er) {
+    return er.code === 'EINTEGRITY' || er.code === 'Z_DATA_ERROR'
+  }
+
+  // override the types getter
+  get types () {
+    return false
+  }
+
+  [_assertType] () {
+    if (this.types && !this.types.includes(this.spec.type)) {
+      throw new TypeError(`Wrong spec type (${
+        this.spec.type
+      }) for ${
+        this.constructor.name
+      }. Supported types: ${this.types.join(', ')}`)
+    }
+  }
+
+  // We allow ENOENTs from cacache, but not anywhere else.
+  // An ENOENT trying to read a tgz file, for example, is Right Out.
+  isRetriableError (er) {
+    // TODO: check error class, once those are rolled out to our deps
+    return this.isDataCorruptionError(er) ||
+      er.code === 'ENOENT' ||
+      er.code === 'EISDIR'
+  }
+
+  // Mostly internal, but has some uses
+  // Pass in a function which returns a promise
+  // Function will be called 1 or more times with streams that may fail.
+  // Retries:
+  // Function MUST handle errors on the stream by rejecting the promise,
+  // so that retry logic can pick it up and either retry or fail whatever
+  // promise it was making (ie, failing extraction, etc.)
+  //
+  // The return value of this method is a Promise that resolves the same
+  // as whatever the streamHandler resolves to.
+  //
+  // This should never be overridden by child classes, but it is public.
+  tarballStream (streamHandler) {
+    // Only short-circuit via cache if we have everything else we'll need,
+    // and the user has not expressed a preference for checking online.
+
+    const fromCache = (
+      !this.preferOnline &&
+      this.integrity &&
+      this.resolved
+    ) ? streamHandler(this[_tarballFromCache]()).catch(er => {
+        if (this.isDataCorruptionError(er)) {
+          log.warn('tarball', `cached data for ${
+          this.spec
+        } (${this.integrity}) seems to be corrupted. Refreshing cache.`)
+          return this.cleanupCached().then(() => {
+            throw er
+          })
+        } else {
+          throw er
+        }
+      }) : null
+
+    const fromResolved = er => {
+      if (er) {
+        if (!this.isRetriableError(er)) {
+          throw er
+        }
+        log.silly('tarball', `no local data for ${
+          this.spec
+        }. Extracting by manifest.`)
+      }
+      return this.resolve().then(() => retry(tryAgain =>
+        streamHandler(this[_istream](this[_tarballFromResolved]()))
+          .catch(streamErr => {
+          // Most likely data integrity.  A cache ENOENT error is unlikely
+          // here, since we're definitely not reading from the cache, but it
+          // IS possible that the fetch subsystem accessed the cache, and the
+          // entry got blown away or something.  Try one more time to be sure.
+            if (this.isRetriableError(streamErr)) {
+              log.warn('tarball', `tarball data for ${
+              this.spec
+            } (${this.integrity}) seems to be corrupted. Trying again.`)
+              return this.cleanupCached().then(() => tryAgain(streamErr))
+            }
+            throw streamErr
+          }), { retries: 1, minTimeout: 0, maxTimeout: 0 }))
+    }
+
+    return fromCache ? fromCache.catch(fromResolved) : fromResolved()
+  }
+
+  cleanupCached () {
+    return cacache.rm.content(this.cache, this.integrity, this.opts)
+  }
+
+  [_empty] (path) {
+    return getContents({ path, depth: 1 }).then(contents => Promise.all(
+      contents.map(entry => fs.rm(entry, { recursive: true, force: true }))))
+  }
+
+  async [_mkdir] (dest) {
+    await this[_empty](dest)
+    return await fs.mkdir(dest, { recursive: true })
+  }
+
+  // extraction is always the same.  the only difference is where
+  // the tarball comes from.
+  async extract (dest) {
+    await this[_mkdir](dest)
+    return this.tarballStream((tarball) => this[_extract](dest, tarball))
+  }
+
+  [_toFile] (dest) {
+    return this.tarballStream(str => new Promise((res, rej) => {
+      const writer = new fsm.WriteStream(dest)
+      str.on('error', er => writer.emit('error', er))
+      writer.on('error', er => rej(er))
+      writer.on('close', () => res({
+        integrity: this.integrity && String(this.integrity),
+        resolved: this.resolved,
+        from: this.from,
+      }))
+      str.pipe(writer)
+    }))
+  }
+
+  // don't use this[_mkdir] because we don't want to rimraf anything
+  async tarballFile (dest) {
+    const dir = dirname(dest)
+    await fs.mkdir(dir, { recursive: true })
+    return this[_toFile](dest)
+  }
+
+  [_extract] (dest, tarball) {
+    const extractor = tar.x(this[_tarxOptions]({ cwd: dest }))
+    const p = new Promise((resolve, reject) => {
+      extractor.on('end', () => {
+        resolve({
+          resolved: this.resolved,
+          integrity: this.integrity && String(this.integrity),
+          from: this.from,
+        })
+      })
+
+      extractor.on('error', er => {
+        log.warn('tar', er.message)
+        log.silly('tar', er)
+        reject(er)
+      })
+
+      tarball.on('error', er => reject(er))
+    })
+
+    tarball.pipe(extractor)
+    return p
+  }
+
+  // always ensure that entries are at least as permissive as our configured
+  // dmode/fmode, but never more permissive than the umask allows.
+  [_entryMode] (path, mode, type) {
+    const m = /Directory|GNUDumpDir/.test(type) ? this.dmode
+      : /File$/.test(type) ? this.fmode
+      : /* istanbul ignore next - should never happen in a pkg */ 0
+
+    // make sure package bins are executable
+    const exe = isPackageBin(this.package, path) ? 0o111 : 0
+    // always ensure that files are read/writable by the owner
+    return ((mode | m) & ~this.umask) | exe | 0o600
+  }
+
+  [_tarxOptions] ({ cwd }) {
+    const sawIgnores = new Set()
+    return {
+      cwd,
+      noChmod: true,
+      noMtime: true,
+      filter: (name, entry) => {
+        if (/Link$/.test(entry.type)) {
+          return false
+        }
+        entry.mode = this[_entryMode](entry.path, entry.mode, entry.type)
+        // this replicates the npm pack behavior where .gitignore files
+        // are treated like .npmignore files, but only if a .npmignore
+        // file is not present.
+        if (/File$/.test(entry.type)) {
+          const base = basename(entry.path)
+          if (base === '.npmignore') {
+            sawIgnores.add(entry.path)
+          } else if (base === '.gitignore' && !this.allowGitIgnore) {
+            // rename, but only if there's not already a .npmignore
+            const ni = entry.path.replace(/\.gitignore$/, '.npmignore')
+            if (sawIgnores.has(ni)) {
+              return false
+            }
+            entry.path = ni
+          }
+          return true
+        }
+      },
+      strip: 1,
+      onwarn: /* istanbul ignore next - we can trust that tar logs */
+      (code, msg, data) => {
+        log.warn('tar', code, msg)
+        log.silly('tar', code, msg, data)
+      },
+      umask: this.umask,
+      // always ignore ownership info from tarball metadata
+      preserveOwner: false,
+    }
+  }
+}
+
+module.exports = FetcherBase
+
+// Child classes
+const GitFetcher = require('./git.js')
+const RegistryFetcher = require('./registry.js')
+const FileFetcher = require('./file.js')
+const DirFetcher = require('./dir.js')
+const RemoteFetcher = require('./remote.js')
+
+// Get an appropriate fetcher object from a spec and options
+FetcherBase.get = (rawSpec, opts = {}) => {
+  const spec = npa(rawSpec, opts.where)
+  switch (spec.type) {
+    case 'git':
+      return new GitFetcher(spec, opts)
+
+    case 'remote':
+      return new RemoteFetcher(spec, opts)
+
+    case 'version':
+    case 'range':
+    case 'tag':
+    case 'alias':
+      return new RegistryFetcher(spec.subSpec || spec, opts)
+
+    case 'file':
+      return new FileFetcher(spec, opts)
+
+    case 'directory':
+      return new DirFetcher(spec, opts)
+
+    default:
+      throw new TypeError('Unknown spec type: ' + spec.type)
+  }
+}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js
new file mode 100644
index 0000000000000..bf99bb86e359e
--- /dev/null
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js
@@ -0,0 +1,96 @@
+const Fetcher = require('./fetcher.js')
+const fsm = require('fs-minipass')
+const cacache = require('cacache')
+const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
+const _exeBins = Symbol('_exeBins')
+const { resolve } = require('path')
+const fs = require('fs')
+const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson')
+
+class FileFetcher extends Fetcher {
+  constructor (spec, opts) {
+    super(spec, opts)
+    // just the fully resolved filename
+    this.resolved = this.spec.fetchSpec
+  }
+
+  get types () {
+    return ['file']
+  }
+
+  manifest () {
+    if (this.package) {
+      return Promise.resolve(this.package)
+    }
+
+    // have to unpack the tarball for this.
+    return cacache.tmp.withTmp(this.cache, this.opts, dir =>
+      this.extract(dir)
+        .then(() => this[_readPackageJson](dir + '/package.json'))
+        .then(mani => this.package = {
+          ...mani,
+          _integrity: this.integrity && String(this.integrity),
+          _resolved: this.resolved,
+          _from: this.from,
+        }))
+  }
+
+  [_exeBins] (pkg, dest) {
+    if (!pkg.bin) {
+      return Promise.resolve()
+    }
+
+    return Promise.all(Object.keys(pkg.bin).map(k => new Promise(res => {
+      const script = resolve(dest, pkg.bin[k])
+      // Best effort.  Ignore errors here, the only result is that
+      // a bin script is not executable.  But if it's missing or
+      // something, we just leave it for a later stage to trip over
+      // when we can provide a more useful contextual error.
+      fs.stat(script, (er, st) => {
+        if (er) {
+          return res()
+        }
+        const mode = st.mode | 0o111
+        if (mode === st.mode) {
+          return res()
+        }
+        fs.chmod(script, mode, res)
+      })
+    })))
+  }
+
+  extract (dest) {
+    // if we've already loaded the manifest, then the super got it.
+    // but if not, read the unpacked manifest and chmod properly.
+    return super.extract(dest)
+      .then(result => this.package ? result
+      : this[_readPackageJson](dest + '/package.json').then(pkg =>
+        this[_exeBins](pkg, dest)).then(() => result))
+  }
+
+  [_tarballFromResolved] () {
+    // create a read stream and return it
+    return new fsm.ReadStream(this.resolved)
+  }
+
+  packument () {
+    // simulate based on manifest
+    return this.manifest().then(mani => ({
+      name: mani.name,
+      'dist-tags': {
+        [this.defaultTag]: mani.version,
+      },
+      versions: {
+        [mani.version]: {
+          ...mani,
+          dist: {
+            tarball: `file:${this.resolved}`,
+            integrity: this.integrity && String(this.integrity),
+          },
+        },
+      },
+    }))
+  }
+}
+
+module.exports = FileFetcher
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js
new file mode 100644
index 0000000000000..5d24f72497ec9
--- /dev/null
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js
@@ -0,0 +1,327 @@
+const Fetcher = require('./fetcher.js')
+const FileFetcher = require('./file.js')
+const RemoteFetcher = require('./remote.js')
+const DirFetcher = require('./dir.js')
+const hashre = /^[a-f0-9]{40}$/
+const git = require('@npmcli/git')
+const pickManifest = require('npm-pick-manifest')
+const npa = require('npm-package-arg')
+const { Minipass } = require('minipass')
+const cacache = require('cacache')
+const log = require('proc-log')
+const npm = require('./util/npm.js')
+
+const _resolvedFromRepo = Symbol('_resolvedFromRepo')
+const _resolvedFromHosted = Symbol('_resolvedFromHosted')
+const _resolvedFromClone = Symbol('_resolvedFromClone')
+const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
+const _addGitSha = Symbol('_addGitSha')
+const addGitSha = require('./util/add-git-sha.js')
+const _clone = Symbol('_clone')
+const _cloneHosted = Symbol('_cloneHosted')
+const _cloneRepo = Symbol('_cloneRepo')
+const _setResolvedWithSha = Symbol('_setResolvedWithSha')
+const _prepareDir = Symbol('_prepareDir')
+const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson')
+
+// get the repository url.
+// prefer https if there's auth, since ssh will drop that.
+// otherwise, prefer ssh if available (more secure).
+// We have to add the git+ back because npa suppresses it.
+const repoUrl = (h, opts) =>
+  h.sshurl && !(h.https && h.auth) && addGitPlus(h.sshurl(opts)) ||
+  h.https && addGitPlus(h.https(opts))
+
+// add git+ to the url, but only one time.
+const addGitPlus = url => url && `git+${url}`.replace(/^(git\+)+/, 'git+')
+
+class GitFetcher extends Fetcher {
+  constructor (spec, opts) {
+    super(spec, opts)
+
+    // we never want to compare integrity for git dependencies: npm/rfcs#525
+    if (this.opts.integrity) {
+      delete this.opts.integrity
+      log.warn(`skipping integrity check for git dependency ${this.spec.fetchSpec}`)
+    }
+
+    this.resolvedRef = null
+    if (this.spec.hosted) {
+      this.from = this.spec.hosted.shortcut({ noCommittish: false })
+    }
+
+    // shortcut: avoid full clone when we can go straight to the tgz
+    // if we have the full sha and it's a hosted git platform
+    if (this.spec.gitCommittish && hashre.test(this.spec.gitCommittish)) {
+      this.resolvedSha = this.spec.gitCommittish
+      // use hosted.tarball() when we shell to RemoteFetcher later
+      this.resolved = this.spec.hosted
+        ? repoUrl(this.spec.hosted, { noCommittish: false })
+        : this.spec.rawSpec
+    } else {
+      this.resolvedSha = ''
+    }
+
+    this.Arborist = opts.Arborist || null
+  }
+
+  // just exposed to make it easier to test all the combinations
+  static repoUrl (hosted, opts) {
+    return repoUrl(hosted, opts)
+  }
+
+  get types () {
+    return ['git']
+  }
+
+  resolve () {
+    // likely a hosted git repo with a sha, so get the tarball url
+    // but in general, no reason to resolve() more than necessary!
+    if (this.resolved) {
+      return super.resolve()
+    }
+
+    // fetch the git repo and then look at the current hash
+    const h = this.spec.hosted
+    // try to use ssh, fall back to git.
+    return h ? this[_resolvedFromHosted](h)
+      : this[_resolvedFromRepo](this.spec.fetchSpec)
+  }
+
+  // first try https, since that's faster and passphrase-less for
+  // public repos, and supports private repos when auth is provided.
+  // Fall back to SSH to support private repos
+  // NB: we always store the https url in resolved field if auth
+  // is present, otherwise ssh if the hosted type provides it
+  [_resolvedFromHosted] (hosted) {
+    return this[_resolvedFromRepo](hosted.https && hosted.https())
+      .catch(er => {
+        // Throw early since we know pathspec errors will fail again if retried
+        if (er instanceof git.errors.GitPathspecError) {
+          throw er
+        }
+        const ssh = hosted.sshurl && hosted.sshurl()
+        // no fallthrough if we can't fall through or have https auth
+        if (!ssh || hosted.auth) {
+          throw er
+        }
+        return this[_resolvedFromRepo](ssh)
+      })
+  }
+
+  [_resolvedFromRepo] (gitRemote) {
+    // XXX make this a custom error class
+    if (!gitRemote) {
+      return Promise.reject(new Error(`No git url for ${this.spec}`))
+    }
+    const gitRange = this.spec.gitRange
+    const name = this.spec.name
+    return git.revs(gitRemote, this.opts).then(remoteRefs => {
+      return gitRange ? pickManifest({
+        versions: remoteRefs.versions,
+        'dist-tags': remoteRefs['dist-tags'],
+        name,
+      }, gitRange, this.opts)
+        : this.spec.gitCommittish ?
+          remoteRefs.refs[this.spec.gitCommittish] ||
+          remoteRefs.refs[remoteRefs.shas[this.spec.gitCommittish]]
+          : remoteRefs.refs.HEAD // no git committish, get default head
+    }).then(revDoc => {
+      // the committish provided isn't in the rev list
+      // things like HEAD~3 or @yesterday can land here.
+      if (!revDoc || !revDoc.sha) {
+        return this[_resolvedFromClone]()
+      }
+
+      this.resolvedRef = revDoc
+      this.resolvedSha = revDoc.sha
+      this[_addGitSha](revDoc.sha)
+      return this.resolved
+    })
+  }
+
+  [_setResolvedWithSha] (withSha) {
+    // we haven't cloned, so a tgz download is still faster
+    // of course, if it's not a known host, we can't do that.
+    this.resolved = !this.spec.hosted ? withSha
+      : repoUrl(npa(withSha).hosted, { noCommittish: false })
+  }
+
+  // when we get the git sha, we affix it to our spec to build up
+  // either a git url with a hash, or a tarball download URL
+  [_addGitSha] (sha) {
+    this[_setResolvedWithSha](addGitSha(this.spec, sha))
+  }
+
+  [_resolvedFromClone] () {
+    // do a full or shallow clone, then look at the HEAD
+    // kind of wasteful, but no other option, really
+    return this[_clone](dir => this.resolved)
+  }
+
+  [_prepareDir] (dir) {
+    return this[_readPackageJson](dir + '/package.json').then(mani => {
+      // no need if we aren't going to do any preparation.
+      const scripts = mani.scripts
+      if (!mani.workspaces && (!scripts || !(
+        scripts.postinstall ||
+          scripts.build ||
+          scripts.preinstall ||
+          scripts.install ||
+          scripts.prepack ||
+          scripts.prepare))) {
+        return
+      }
+
+      // to avoid cases where we have an cycle of git deps that depend
+      // on one another, we only ever do preparation for one instance
+      // of a given git dep along the chain of installations.
+      // Note that this does mean that a dependency MAY in theory end up
+      // trying to run its prepare script using a dependency that has not
+      // been properly prepared itself, but that edge case is smaller
+      // and less hazardous than a fork bomb of npm and git commands.
+      const noPrepare = !process.env._PACOTE_NO_PREPARE_ ? []
+        : process.env._PACOTE_NO_PREPARE_.split('\n')
+      if (noPrepare.includes(this.resolved)) {
+        log.info('prepare', 'skip prepare, already seen', this.resolved)
+        return
+      }
+      noPrepare.push(this.resolved)
+
+      // the DirFetcher will do its own preparation to run the prepare scripts
+      // All we have to do is put the deps in place so that it can succeed.
+      return npm(
+        this.npmBin,
+        [].concat(this.npmInstallCmd).concat(this.npmCliConfig),
+        dir,
+        { ...process.env, _PACOTE_NO_PREPARE_: noPrepare.join('\n') },
+        { message: 'git dep preparation failed' }
+      )
+    })
+  }
+
+  [_tarballFromResolved] () {
+    const stream = new Minipass()
+    stream.resolved = this.resolved
+    stream.from = this.from
+
+    // check it out and then shell out to the DirFetcher tarball packer
+    this[_clone](dir => this[_prepareDir](dir)
+      .then(() => new Promise((res, rej) => {
+        if (!this.Arborist) {
+          throw new Error('GitFetcher requires an Arborist constructor to pack a tarball')
+        }
+        const df = new DirFetcher(`file:${dir}`, {
+          ...this.opts,
+          Arborist: this.Arborist,
+          resolved: null,
+          integrity: null,
+        })
+        const dirStream = df[_tarballFromResolved]()
+        dirStream.on('error', rej)
+        dirStream.on('end', res)
+        dirStream.pipe(stream)
+      }))).catch(
+      /* istanbul ignore next: very unlikely and hard to test */
+      er => stream.emit('error', er)
+    )
+    return stream
+  }
+
+  // clone a git repo into a temp folder (or fetch and unpack if possible)
+  // handler accepts a directory, and returns a promise that resolves
+  // when we're done with it, at which point, cacache deletes it
+  //
+  // TODO: after cloning, create a tarball of the folder, and add to the cache
+  // with cacache.put.stream(), using a key that's deterministic based on the
+  // spec and repo, so that we don't ever clone the same thing multiple times.
+  [_clone] (handler, tarballOk = true) {
+    const o = { tmpPrefix: 'git-clone' }
+    const ref = this.resolvedSha || this.spec.gitCommittish
+    const h = this.spec.hosted
+    const resolved = this.resolved
+
+    // can be set manually to false to fall back to actual git clone
+    tarballOk = tarballOk &&
+      h && resolved === repoUrl(h, { noCommittish: false }) && h.tarball
+
+    return cacache.tmp.withTmp(this.cache, o, async tmp => {
+      // if we're resolved, and have a tarball url, shell out to RemoteFetcher
+      if (tarballOk) {
+        const nameat = this.spec.name ? `${this.spec.name}@` : ''
+        return new RemoteFetcher(h.tarball({ noCommittish: false }), {
+          ...this.opts,
+          allowGitIgnore: true,
+          pkgid: `git:${nameat}${this.resolved}`,
+          resolved: this.resolved,
+          integrity: null, // it'll always be different, if we have one
+        }).extract(tmp).then(() => handler(tmp), er => {
+          // fall back to ssh download if tarball fails
+          if (er.constructor.name.match(/^Http/)) {
+            return this[_clone](handler, false)
+          } else {
+            throw er
+          }
+        })
+      }
+
+      const sha = await (
+        h ? this[_cloneHosted](ref, tmp)
+        : this[_cloneRepo](this.spec.fetchSpec, ref, tmp)
+      )
+      this.resolvedSha = sha
+      if (!this.resolved) {
+        await this[_addGitSha](sha)
+      }
+      return handler(tmp)
+    })
+  }
+
+  // first try https, since that's faster and passphrase-less for
+  // public repos, and supports private repos when auth is provided.
+  // Fall back to SSH to support private repos
+  // NB: we always store the https url in resolved field if auth
+  // is present, otherwise ssh if the hosted type provides it
+  [_cloneHosted] (ref, tmp) {
+    const hosted = this.spec.hosted
+    return this[_cloneRepo](hosted.https({ noCommittish: true }), ref, tmp)
+      .catch(er => {
+        // Throw early since we know pathspec errors will fail again if retried
+        if (er instanceof git.errors.GitPathspecError) {
+          throw er
+        }
+        const ssh = hosted.sshurl && hosted.sshurl({ noCommittish: true })
+        // no fallthrough if we can't fall through or have https auth
+        if (!ssh || hosted.auth) {
+          throw er
+        }
+        return this[_cloneRepo](ssh, ref, tmp)
+      })
+  }
+
+  [_cloneRepo] (repo, ref, tmp) {
+    const { opts, spec } = this
+    return git.clone(repo, ref, tmp, { ...opts, spec })
+  }
+
+  manifest () {
+    if (this.package) {
+      return Promise.resolve(this.package)
+    }
+
+    return this.spec.hosted && this.resolved
+      ? FileFetcher.prototype.manifest.apply(this)
+      : this[_clone](dir =>
+        this[_readPackageJson](dir + '/package.json')
+          .then(mani => this.package = {
+            ...mani,
+            _resolved: this.resolved,
+            _from: this.from,
+          }))
+  }
+
+  packument () {
+    return FileFetcher.prototype.packument.apply(this)
+  }
+}
+module.exports = GitFetcher
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js
new file mode 100644
index 0000000000000..cbcbd7c92d15f
--- /dev/null
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js
@@ -0,0 +1,23 @@
+const { get } = require('./fetcher.js')
+const GitFetcher = require('./git.js')
+const RegistryFetcher = require('./registry.js')
+const FileFetcher = require('./file.js')
+const DirFetcher = require('./dir.js')
+const RemoteFetcher = require('./remote.js')
+
+module.exports = {
+  GitFetcher,
+  RegistryFetcher,
+  FileFetcher,
+  DirFetcher,
+  RemoteFetcher,
+  resolve: (spec, opts) => get(spec, opts).resolve(),
+  extract: (spec, dest, opts) => get(spec, opts).extract(dest),
+  manifest: (spec, opts) => get(spec, opts).manifest(),
+  tarball: (spec, opts) => get(spec, opts).tarball(),
+  packument: (spec, opts) => get(spec, opts).packument(),
+}
+module.exports.tarball.stream = (spec, handler, opts) =>
+  get(spec, opts).tarballStream(handler)
+module.exports.tarball.file = (spec, dest, opts) =>
+  get(spec, opts).tarballFile(dest)
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js
new file mode 100644
index 0000000000000..34d9b2b87f3f3
--- /dev/null
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js
@@ -0,0 +1,344 @@
+const Fetcher = require('./fetcher.js')
+const RemoteFetcher = require('./remote.js')
+const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
+const pacoteVersion = require('../package.json').version
+const removeTrailingSlashes = require('./util/trailing-slashes.js')
+const rpj = require('read-package-json-fast')
+const pickManifest = require('npm-pick-manifest')
+const ssri = require('ssri')
+const crypto = require('crypto')
+const npa = require('npm-package-arg')
+const { sigstore } = require('sigstore')
+
+// Corgis are cute. 🐕🐶
+const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*'
+const fullDoc = 'application/json'
+
+const fetch = require('npm-registry-fetch')
+
+const _headers = Symbol('_headers')
+class RegistryFetcher extends Fetcher {
+  constructor (spec, opts) {
+    super(spec, opts)
+
+    // you usually don't want to fetch the same packument multiple times in
+    // the span of a given script or command, no matter how many pacote calls
+    // are made, so this lets us avoid doing that.  It's only relevant for
+    // registry fetchers, because other types simulate their packument from
+    // the manifest, which they memoize on this.package, so it's very cheap
+    // already.
+    this.packumentCache = this.opts.packumentCache || null
+
+    this.registry = fetch.pickRegistry(spec, opts)
+    this.packumentUrl = removeTrailingSlashes(this.registry) + '/' +
+      this.spec.escapedName
+
+    const parsed = new URL(this.registry)
+    const regKey = `//${parsed.host}${parsed.pathname}`
+    // unlike the nerf-darted auth keys, this one does *not* allow a mismatch
+    // of trailing slashes.  It must match exactly.
+    if (this.opts[`${regKey}:_keys`]) {
+      this.registryKeys = this.opts[`${regKey}:_keys`]
+    }
+
+    // XXX pacote <=9 has some logic to ignore opts.resolved if
+    // the resolved URL doesn't go to the same registry.
+    // Consider reproducing that here, to throw away this.resolved
+    // in that case.
+  }
+
+  async resolve () {
+    // fetching the manifest sets resolved and (if present) integrity
+    await this.manifest()
+    if (!this.resolved) {
+      throw Object.assign(
+        new Error('Invalid package manifest: no `dist.tarball` field'),
+        { package: this.spec.toString() }
+      )
+    }
+    return this.resolved
+  }
+
+  [_headers] () {
+    return {
+      // npm will override UA, but ensure that we always send *something*
+      'user-agent': this.opts.userAgent ||
+        `pacote/${pacoteVersion} node/${process.version}`,
+      ...(this.opts.headers || {}),
+      'pacote-version': pacoteVersion,
+      'pacote-req-type': 'packument',
+      'pacote-pkg-id': `registry:${this.spec.name}`,
+      accept: this.fullMetadata ? fullDoc : corgiDoc,
+    }
+  }
+
+  async packument () {
+    // note this might be either an in-flight promise for a request,
+    // or the actual packument, but we never want to make more than
+    // one request at a time for the same thing regardless.
+    if (this.packumentCache && this.packumentCache.has(this.packumentUrl)) {
+      return this.packumentCache.get(this.packumentUrl)
+    }
+
+    // npm-registry-fetch the packument
+    // set the appropriate header for corgis if fullMetadata isn't set
+    // return the res.json() promise
+    try {
+      const res = await fetch(this.packumentUrl, {
+        ...this.opts,
+        headers: this[_headers](),
+        spec: this.spec,
+        // never check integrity for packuments themselves
+        integrity: null,
+      })
+      const packument = await res.json()
+      packument._contentLength = +res.headers.get('content-length')
+      if (this.packumentCache) {
+        this.packumentCache.set(this.packumentUrl, packument)
+      }
+      return packument
+    } catch (err) {
+      if (this.packumentCache) {
+        this.packumentCache.delete(this.packumentUrl)
+      }
+      if (err.code !== 'E404' || this.fullMetadata) {
+        throw err
+      }
+      // possible that corgis are not supported by this registry
+      this.fullMetadata = true
+      return this.packument()
+    }
+  }
+
+  async manifest () {
+    if (this.package) {
+      return this.package
+    }
+
+    const packument = await this.packument()
+    let mani = await pickManifest(packument, this.spec.fetchSpec, {
+      ...this.opts,
+      defaultTag: this.defaultTag,
+      before: this.before,
+    })
+    mani = rpj.normalize(mani)
+    /* XXX add ETARGET and E403 revalidation of cached packuments here */
+
+    // add _resolved and _integrity from dist object
+    const { dist } = mani
+    if (dist) {
+      this.resolved = mani._resolved = dist.tarball
+      mani._from = this.from
+      const distIntegrity = dist.integrity ? ssri.parse(dist.integrity)
+        : dist.shasum ? ssri.fromHex(dist.shasum, 'sha1', { ...this.opts })
+        : null
+      if (distIntegrity) {
+        if (this.integrity && !this.integrity.match(distIntegrity)) {
+          // only bork if they have algos in common.
+          // otherwise we end up breaking if we have saved a sha512
+          // previously for the tarball, but the manifest only
+          // provides a sha1, which is possible for older publishes.
+          // Otherwise, this is almost certainly a case of holding it
+          // wrong, and will result in weird or insecure behavior
+          // later on when building package tree.
+          for (const algo of Object.keys(this.integrity)) {
+            if (distIntegrity[algo]) {
+              throw Object.assign(new Error(
+                `Integrity checksum failed when using ${algo}: ` +
+                `wanted ${this.integrity} but got ${distIntegrity}.`
+              ), { code: 'EINTEGRITY' })
+            }
+          }
+        }
+        // made it this far, the integrity is worthwhile.  accept it.
+        // the setter here will take care of merging it into what we already
+        // had.
+        this.integrity = distIntegrity
+      }
+    }
+    if (this.integrity) {
+      mani._integrity = String(this.integrity)
+      if (dist.signatures) {
+        if (this.opts.verifySignatures) {
+          // validate and throw on error, then set _signatures
+          const message = `${mani._id}:${mani._integrity}`
+          for (const signature of dist.signatures) {
+            const publicKey = this.registryKeys &&
+              this.registryKeys.filter(key => (key.keyid === signature.keyid))[0]
+            if (!publicKey) {
+              throw Object.assign(new Error(
+                  `${mani._id} has a registry signature with keyid: ${signature.keyid} ` +
+                  'but no corresponding public key can be found'
+              ), { code: 'EMISSINGSIGNATUREKEY' })
+            }
+            const validPublicKey =
+              !publicKey.expires || (Date.parse(publicKey.expires) > Date.now())
+            if (!validPublicKey) {
+              throw Object.assign(new Error(
+                  `${mani._id} has a registry signature with keyid: ${signature.keyid} ` +
+                  `but the corresponding public key has expired ${publicKey.expires}`
+              ), { code: 'EEXPIREDSIGNATUREKEY' })
+            }
+            const verifier = crypto.createVerify('SHA256')
+            verifier.write(message)
+            verifier.end()
+            const valid = verifier.verify(
+              publicKey.pemkey,
+              signature.sig,
+              'base64'
+            )
+            if (!valid) {
+              throw Object.assign(new Error(
+                  `${mani._id} has an invalid registry signature with ` +
+                  `keyid: ${publicKey.keyid} and signature: ${signature.sig}`
+              ), {
+                code: 'EINTEGRITYSIGNATURE',
+                keyid: publicKey.keyid,
+                signature: signature.sig,
+                resolved: mani._resolved,
+                integrity: mani._integrity,
+              })
+            }
+          }
+          mani._signatures = dist.signatures
+        } else {
+          mani._signatures = dist.signatures
+        }
+      }
+
+      if (dist.attestations) {
+        if (this.opts.verifyAttestations) {
+          // Always fetch attestations from the current registry host
+          const attestationsPath = new URL(dist.attestations.url).pathname
+          const attestationsUrl = removeTrailingSlashes(this.registry) + attestationsPath
+          const res = await fetch(attestationsUrl, {
+            ...this.opts,
+            // disable integrity check for attestations json payload, we check the
+            // integrity in the verification steps below
+            integrity: null,
+          })
+          const { attestations } = await res.json()
+          const bundles = attestations.map(({ predicateType, bundle }) => {
+            const statement = JSON.parse(
+              Buffer.from(bundle.dsseEnvelope.payload, 'base64').toString('utf8')
+            )
+            const keyid = bundle.dsseEnvelope.signatures[0].keyid
+            const signature = bundle.dsseEnvelope.signatures[0].sig
+
+            return {
+              predicateType,
+              bundle,
+              statement,
+              keyid,
+              signature,
+            }
+          })
+
+          const attestationKeyIds = bundles.map((b) => b.keyid).filter((k) => !!k)
+          const attestationRegistryKeys = (this.registryKeys || [])
+            .filter(key => attestationKeyIds.includes(key.keyid))
+          if (!attestationRegistryKeys.length) {
+            throw Object.assign(new Error(
+              `${mani._id} has attestations but no corresponding public key(s) can be found`
+            ), { code: 'EMISSINGSIGNATUREKEY' })
+          }
+
+          for (const { predicateType, bundle, keyid, signature, statement } of bundles) {
+            const publicKey = attestationRegistryKeys.find(key => key.keyid === keyid)
+            // Publish attestations have a keyid set and a valid public key must be found
+            if (keyid) {
+              if (!publicKey) {
+                throw Object.assign(new Error(
+                  `${mani._id} has attestations with keyid: ${keyid} ` +
+                  'but no corresponding public key can be found'
+                ), { code: 'EMISSINGSIGNATUREKEY' })
+              }
+
+              const validPublicKey =
+                !publicKey.expires || (Date.parse(publicKey.expires) > Date.now())
+              if (!validPublicKey) {
+                throw Object.assign(new Error(
+                  `${mani._id} has attestations with keyid: ${keyid} ` +
+                  `but the corresponding public key has expired ${publicKey.expires}`
+                ), { code: 'EEXPIREDSIGNATUREKEY' })
+              }
+            }
+
+            const subject = {
+              name: statement.subject[0].name,
+              sha512: statement.subject[0].digest.sha512,
+            }
+
+            // Only type 'version' can be turned into a PURL
+            const purl = this.spec.type === 'version' ? npa.toPurl(this.spec) : this.spec
+            // Verify the statement subject matches the package, version
+            if (subject.name !== purl) {
+              throw Object.assign(new Error(
+                `${mani._id} package name and version (PURL): ${purl} ` +
+                `doesn't match what was signed: ${subject.name}`
+              ), { code: 'EATTESTATIONSUBJECT' })
+            }
+
+            // Verify the statement subject matches the tarball integrity
+            const integrityHexDigest = ssri.parse(this.integrity).hexDigest()
+            if (subject.sha512 !== integrityHexDigest) {
+              throw Object.assign(new Error(
+                `${mani._id} package integrity (hex digest): ` +
+                `${integrityHexDigest} ` +
+                `doesn't match what was signed: ${subject.sha512}`
+              ), { code: 'EATTESTATIONSUBJECT' })
+            }
+
+            try {
+              // Provenance attestations are signed with a signing certificate
+              // (including the key) so we don't need to return a public key.
+              //
+              // Publish attestations are signed with a keyid so we need to
+              // specify a public key from the keys endpoint: `registry-host.tld/-/npm/v1/keys`
+              const options = {
+                tufCachePath: this.tufCache,
+                keySelector: publicKey ? () => publicKey.pemkey : undefined,
+              }
+              await sigstore.verify(bundle, null, options)
+            } catch (e) {
+              throw Object.assign(new Error(
+                `${mani._id} failed to verify attestation: ${e.message}`
+              ), {
+                code: 'EATTESTATIONVERIFY',
+                predicateType,
+                keyid,
+                signature,
+                resolved: mani._resolved,
+                integrity: mani._integrity,
+              })
+            }
+          }
+          mani._attestations = dist.attestations
+        } else {
+          mani._attestations = dist.attestations
+        }
+      }
+    }
+
+    this.package = mani
+    return this.package
+  }
+
+  [_tarballFromResolved] () {
+    // we use a RemoteFetcher to get the actual tarball stream
+    return new RemoteFetcher(this.resolved, {
+      ...this.opts,
+      resolved: this.resolved,
+      pkgid: `registry:${this.spec.name}@${this.resolved}`,
+    })[_tarballFromResolved]()
+  }
+
+  get types () {
+    return [
+      'tag',
+      'version',
+      'range',
+    ]
+  }
+}
+module.exports = RegistryFetcher
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js
new file mode 100644
index 0000000000000..fd617459fb031
--- /dev/null
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js
@@ -0,0 +1,91 @@
+const Fetcher = require('./fetcher.js')
+const FileFetcher = require('./file.js')
+const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
+const pacoteVersion = require('../package.json').version
+const fetch = require('npm-registry-fetch')
+const { Minipass } = require('minipass')
+
+const _cacheFetches = Symbol.for('pacote.Fetcher._cacheFetches')
+const _headers = Symbol('_headers')
+class RemoteFetcher extends Fetcher {
+  constructor (spec, opts) {
+    super(spec, opts)
+    this.resolved = this.spec.fetchSpec
+    const resolvedURL = new URL(this.resolved)
+    if (this.replaceRegistryHost !== 'never'
+      && (this.replaceRegistryHost === 'always'
+      || this.replaceRegistryHost === resolvedURL.host)) {
+      this.resolved = new URL(resolvedURL.pathname, this.registry).href
+    }
+
+    // nam is a fermented pork sausage that is good to eat
+    const nameat = this.spec.name ? `${this.spec.name}@` : ''
+    this.pkgid = opts.pkgid ? opts.pkgid : `remote:${nameat}${this.resolved}`
+  }
+
+  // Don't need to cache tarball fetches in pacote, because make-fetch-happen
+  // will write into cacache anyway.
+  get [_cacheFetches] () {
+    return false
+  }
+
+  [_tarballFromResolved] () {
+    const stream = new Minipass()
+    stream.hasIntegrityEmitter = true
+
+    const fetchOpts = {
+      ...this.opts,
+      headers: this[_headers](),
+      spec: this.spec,
+      integrity: this.integrity,
+      algorithms: [this.pickIntegrityAlgorithm()],
+    }
+
+    // eslint-disable-next-line promise/always-return
+    fetch(this.resolved, fetchOpts).then(res => {
+      res.body.on('error',
+        /* istanbul ignore next - exceedingly rare and hard to simulate */
+        er => stream.emit('error', er)
+      )
+
+      res.body.on('integrity', i => {
+        this.integrity = i
+        stream.emit('integrity', i)
+      })
+
+      res.body.pipe(stream)
+    }).catch(er => stream.emit('error', er))
+
+    return stream
+  }
+
+  [_headers] () {
+    return {
+      // npm will override this, but ensure that we always send *something*
+      'user-agent': this.opts.userAgent ||
+        `pacote/${pacoteVersion} node/${process.version}`,
+      ...(this.opts.headers || {}),
+      'pacote-version': pacoteVersion,
+      'pacote-req-type': 'tarball',
+      'pacote-pkg-id': this.pkgid,
+      ...(this.integrity ? { 'pacote-integrity': String(this.integrity) }
+      : {}),
+      ...(this.opts.headers || {}),
+    }
+  }
+
+  get types () {
+    return ['remote']
+  }
+
+  // getting a packument and/or manifest is the same as with a file: spec.
+  // unpack the tarball stream, and then read from the package.json file.
+  packument () {
+    return FileFetcher.prototype.packument.apply(this)
+  }
+
+  manifest () {
+    return FileFetcher.prototype.manifest.apply(this)
+  }
+}
+module.exports = RemoteFetcher
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js
new file mode 100644
index 0000000000000..843fe5b600caf
--- /dev/null
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js
@@ -0,0 +1,15 @@
+// add a sha to a git remote url spec
+const addGitSha = (spec, sha) => {
+  if (spec.hosted) {
+    const h = spec.hosted
+    const opt = { noCommittish: true }
+    const base = h.https && h.auth ? h.https(opt) : h.shortcut(opt)
+
+    return `${base}#${sha}`
+  } else {
+    // don't use new URL for this, because it doesn't handle scp urls
+    return spec.rawSpec.replace(/#.*$/, '') + `#${sha}`
+  }
+}
+
+module.exports = addGitSha
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js
new file mode 100644
index 0000000000000..ac83b1793f199
--- /dev/null
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js
@@ -0,0 +1,15 @@
+const os = require('os')
+const { resolve } = require('path')
+
+module.exports = (fakePlatform = false) => {
+  const temp = os.tmpdir()
+  const uidOrPid = process.getuid ? process.getuid() : process.pid
+  const home = os.homedir() || resolve(temp, 'npm-' + uidOrPid)
+  const platform = fakePlatform || process.platform
+  const cacheExtra = platform === 'win32' ? 'npm-cache' : '.npm'
+  const cacheRoot = (platform === 'win32' && process.env.LOCALAPPDATA) || home
+  return {
+    cacache: resolve(cacheRoot, cacheExtra, '_cacache'),
+    tufcache: resolve(cacheRoot, cacheExtra, '_tuf'),
+  }
+}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js
new file mode 100644
index 0000000000000..49a3f73f537ce
--- /dev/null
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js
@@ -0,0 +1,25 @@
+// Function to determine whether a path is in the package.bin set.
+// Used to prevent issues when people publish a package from a
+// windows machine, and then install with --no-bin-links.
+//
+// Note: this is not possible in remote or file fetchers, since
+// we don't have the manifest until AFTER we've unpacked.  But the
+// main use case is registry fetching with git a distant second,
+// so that's an acceptable edge case to not handle.
+
+const binObj = (name, bin) =>
+  typeof bin === 'string' ? { [name]: bin } : bin
+
+const hasBin = (pkg, path) => {
+  const bin = binObj(pkg.name, pkg.bin)
+  const p = path.replace(/^[^\\/]*\//, '')
+  for (const kv of Object.entries(bin)) {
+    if (kv[1] === p) {
+      return true
+    }
+  }
+  return false
+}
+
+module.exports = (pkg, path) =>
+  pkg && pkg.bin ? hasBin(pkg, path) : false
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js
new file mode 100644
index 0000000000000..a3005c255565f
--- /dev/null
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js
@@ -0,0 +1,14 @@
+// run an npm command
+const spawn = require('@npmcli/promise-spawn')
+
+module.exports = (npmBin, npmCommand, cwd, env, extra) => {
+  const isJS = npmBin.endsWith('.js')
+  const cmd = isJS ? process.execPath : npmBin
+  const args = (isJS ? [npmBin] : []).concat(npmCommand)
+  // when installing to run the `prepare` script for a git dep, we need
+  // to ensure that we don't run into a cycle of checking out packages
+  // in temp directories.  this lets us link previously-seen repos that
+  // are also being prepared.
+
+  return spawn(cmd, args, { cwd, env }, extra)
+}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js
new file mode 100644
index 0000000000000..d070f0f7ba2d4
--- /dev/null
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js
@@ -0,0 +1,31 @@
+const isPackageBin = require('./is-package-bin.js')
+
+const tarCreateOptions = manifest => ({
+  cwd: manifest._resolved,
+  prefix: 'package/',
+  portable: true,
+  gzip: {
+    // forcing the level to 9 seems to avoid some
+    // platform specific optimizations that cause
+    // integrity mismatch errors due to differing
+    // end results after compression
+    level: 9,
+  },
+
+  // ensure that package bins are always executable
+  // Note that npm-packlist is already filtering out
+  // anything that is not a regular file, ignored by
+  // .npmignore or package.json "files", etc.
+  filter: (path, stat) => {
+    if (isPackageBin(manifest, path)) {
+      stat.mode |= 0o111
+    }
+    return true
+  },
+
+  // Provide a specific date in the 1980s for the benefit of zip,
+  // which is confounded by files dated at the Unix epoch 0.
+  mtime: new Date('1985-10-26T08:15:00.000Z'),
+})
+
+module.exports = tarCreateOptions
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js
new file mode 100644
index 0000000000000..c50cb6173b92e
--- /dev/null
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js
@@ -0,0 +1,10 @@
+const removeTrailingSlashes = (input) => {
+  // in order to avoid regexp redos detection
+  let output = input
+  while (output.endsWith('/')) {
+    output = output.slice(0, -1)
+  }
+  return output
+}
+
+module.exports = removeTrailingSlashes
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json
new file mode 100644
index 0000000000000..d9119065bfc3d
--- /dev/null
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json
@@ -0,0 +1,85 @@
+{
+  "name": "pacote",
+  "version": "16.0.0",
+  "description": "JavaScript package downloader",
+  "author": "GitHub Inc.",
+  "bin": {
+    "pacote": "lib/bin.js"
+  },
+  "license": "ISC",
+  "main": "lib/index.js",
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "lint": "eslint \"**/*.js\"",
+    "postlint": "template-oss-check",
+    "lintfix": "npm run lint -- --fix",
+    "posttest": "npm run lint",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "tap": {
+    "timeout": 300,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "devDependencies": {
+    "@npmcli/arborist": "^6.0.0 || ^6.0.0-pre.0",
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "hosted-git-info": "^6.0.0",
+    "mutate-fs": "^2.1.1",
+    "nock": "^13.2.4",
+    "npm-registry-mock": "^1.3.2",
+    "tap": "^16.0.1"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "keywords": [
+    "packages",
+    "npm",
+    "git"
+  ],
+  "dependencies": {
+    "@npmcli/git": "^4.0.0",
+    "@npmcli/installed-package-contents": "^2.0.1",
+    "@npmcli/promise-spawn": "^6.0.1",
+    "@npmcli/run-script": "^6.0.0",
+    "cacache": "^17.0.0",
+    "fs-minipass": "^3.0.0",
+    "minipass": "^7.0.2",
+    "npm-package-arg": "^10.0.0",
+    "npm-packlist": "^7.0.0",
+    "npm-pick-manifest": "^8.0.0",
+    "npm-registry-fetch": "^15.0.0",
+    "proc-log": "^3.0.0",
+    "promise-retry": "^2.0.1",
+    "read-package-json": "^6.0.0",
+    "read-package-json-fast": "^3.0.0",
+    "sigstore": "^1.3.0",
+    "ssri": "^10.0.0",
+    "tar": "^6.1.11"
+  },
+  "engines": {
+    "node": "^16.13.0 || >=18.0.0"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/pacote.git"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "ciVersions": [
+      "16.13.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ],
+    "version": "4.18.0",
+    "windowsCI": false,
+    "publish": "true"
+  }
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/path.js b/node_modules/pacote/node_modules/cacache/lib/content/path.js
deleted file mode 100644
index ad5a76a4f73f2..0000000000000
--- a/node_modules/pacote/node_modules/cacache/lib/content/path.js
+++ /dev/null
@@ -1,29 +0,0 @@
-'use strict'
-
-const contentVer = require('../../package.json')['cache-version'].content
-const hashToSegments = require('../util/hash-to-segments')
-const path = require('path')
-const ssri = require('ssri')
-
-// Current format of content file path:
-//
-// sha512-BaSE64Hex= ->
-// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
-//
-module.exports = contentPath
-
-function contentPath (cache, integrity) {
-  const sri = ssri.parse(integrity, { single: true })
-  // contentPath is the *strongest* algo given
-  return path.join(
-    contentDir(cache),
-    sri.algorithm,
-    ...hashToSegments(sri.hexDigest())
-  )
-}
-
-module.exports.contentDir = contentDir
-
-function contentDir (cache) {
-  return path.join(cache, `content-v${contentVer}`)
-}
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/read.js b/node_modules/pacote/node_modules/cacache/lib/content/read.js
deleted file mode 100644
index f41b539df65dc..0000000000000
--- a/node_modules/pacote/node_modules/cacache/lib/content/read.js
+++ /dev/null
@@ -1,166 +0,0 @@
-'use strict'
-
-const fs = require('fs/promises')
-const fsm = require('fs-minipass')
-const ssri = require('ssri')
-const contentPath = require('./path')
-const Pipeline = require('minipass-pipeline')
-
-module.exports = read
-
-const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
-async function read (cache, integrity, opts = {}) {
-  const { size } = opts
-  const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
-    // get size
-    const stat = await fs.stat(cpath)
-    return { stat, cpath, sri }
-  })
-  if (typeof size === 'number' && stat.size !== size) {
-    throw sizeError(size, stat.size)
-  }
-
-  if (stat.size > MAX_SINGLE_READ_SIZE) {
-    return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
-  }
-
-  const data = await fs.readFile(cpath, { encoding: null })
-  if (!ssri.checkData(data, sri)) {
-    throw integrityError(sri, cpath)
-  }
-
-  return data
-}
-
-const readPipeline = (cpath, size, sri, stream) => {
-  stream.push(
-    new fsm.ReadStream(cpath, {
-      size,
-      readSize: MAX_SINGLE_READ_SIZE,
-    }),
-    ssri.integrityStream({
-      integrity: sri,
-      size,
-    })
-  )
-  return stream
-}
-
-module.exports.stream = readStream
-module.exports.readStream = readStream
-
-function readStream (cache, integrity, opts = {}) {
-  const { size } = opts
-  const stream = new Pipeline()
-  // Set all this up to run on the stream and then just return the stream
-  Promise.resolve().then(async () => {
-    const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
-      // just stat to ensure it exists
-      const stat = await fs.stat(cpath)
-      return { stat, cpath, sri }
-    })
-    if (typeof size === 'number' && size !== stat.size) {
-      return stream.emit('error', sizeError(size, stat.size))
-    }
-
-    return readPipeline(cpath, stat.size, sri, stream)
-  }).catch(err => stream.emit('error', err))
-
-  return stream
-}
-
-module.exports.copy = copy
-
-function copy (cache, integrity, dest) {
-  return withContentSri(cache, integrity, (cpath, sri) => {
-    return fs.copyFile(cpath, dest)
-  })
-}
-
-module.exports.hasContent = hasContent
-
-async function hasContent (cache, integrity) {
-  if (!integrity) {
-    return false
-  }
-
-  try {
-    return await withContentSri(cache, integrity, async (cpath, sri) => {
-      const stat = await fs.stat(cpath)
-      return { size: stat.size, sri, stat }
-    })
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return false
-    }
-
-    if (err.code === 'EPERM') {
-      /* istanbul ignore else */
-      if (process.platform !== 'win32') {
-        throw err
-      } else {
-        return false
-      }
-    }
-  }
-}
-
-async function withContentSri (cache, integrity, fn) {
-  const sri = ssri.parse(integrity)
-  // If `integrity` has multiple entries, pick the first digest
-  // with available local data.
-  const algo = sri.pickAlgorithm()
-  const digests = sri[algo]
-
-  if (digests.length <= 1) {
-    const cpath = contentPath(cache, digests[0])
-    return fn(cpath, digests[0])
-  } else {
-    // Can't use race here because a generic error can happen before
-    // a ENOENT error, and can happen before a valid result
-    const results = await Promise.all(digests.map(async (meta) => {
-      try {
-        return await withContentSri(cache, meta, fn)
-      } catch (err) {
-        if (err.code === 'ENOENT') {
-          return Object.assign(
-            new Error('No matching content found for ' + sri.toString()),
-            { code: 'ENOENT' }
-          )
-        }
-        return err
-      }
-    }))
-    // Return the first non error if it is found
-    const result = results.find((r) => !(r instanceof Error))
-    if (result) {
-      return result
-    }
-
-    // Throw the No matching content found error
-    const enoentError = results.find((r) => r.code === 'ENOENT')
-    if (enoentError) {
-      throw enoentError
-    }
-
-    // Throw generic error
-    throw results.find((r) => r instanceof Error)
-  }
-}
-
-function sizeError (expected, found) {
-  /* eslint-disable-next-line max-len */
-  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
-  err.expected = expected
-  err.found = found
-  err.code = 'EBADSIZE'
-  return err
-}
-
-function integrityError (sri, path) {
-  const err = new Error(`Integrity verification failed for ${sri} (${path})`)
-  err.code = 'EINTEGRITY'
-  err.sri = sri
-  err.path = path
-  return err
-}
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/rm.js b/node_modules/pacote/node_modules/cacache/lib/content/rm.js
deleted file mode 100644
index ce58d679e4cb2..0000000000000
--- a/node_modules/pacote/node_modules/cacache/lib/content/rm.js
+++ /dev/null
@@ -1,18 +0,0 @@
-'use strict'
-
-const fs = require('fs/promises')
-const contentPath = require('./path')
-const { hasContent } = require('./read')
-
-module.exports = rm
-
-async function rm (cache, integrity) {
-  const content = await hasContent(cache, integrity)
-  // ~pretty~ sure we can't end up with a content lacking sri, but be safe
-  if (content && content.sri) {
-    await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
-    return true
-  } else {
-    return false
-  }
-}
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/write.js b/node_modules/pacote/node_modules/cacache/lib/content/write.js
deleted file mode 100644
index 7146146581287..0000000000000
--- a/node_modules/pacote/node_modules/cacache/lib/content/write.js
+++ /dev/null
@@ -1,205 +0,0 @@
-'use strict'
-
-const events = require('events')
-
-const contentPath = require('./path')
-const fs = require('fs/promises')
-const { moveFile } = require('@npmcli/fs')
-const { Minipass } = require('minipass')
-const Pipeline = require('minipass-pipeline')
-const Flush = require('minipass-flush')
-const path = require('path')
-const ssri = require('ssri')
-const uniqueFilename = require('unique-filename')
-const fsm = require('fs-minipass')
-
-module.exports = write
-
-// Cache of move operations in process so we don't duplicate
-const moveOperations = new Map()
-
-async function write (cache, data, opts = {}) {
-  const { algorithms, size, integrity } = opts
-
-  if (typeof size === 'number' && data.length !== size) {
-    throw sizeError(size, data.length)
-  }
-
-  const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
-  if (integrity && !ssri.checkData(data, integrity, opts)) {
-    throw checksumError(integrity, sri)
-  }
-
-  for (const algo in sri) {
-    const tmp = await makeTmp(cache, opts)
-    const hash = sri[algo].toString()
-    try {
-      await fs.writeFile(tmp.target, data, { flag: 'wx' })
-      await moveToDestination(tmp, cache, hash, opts)
-    } finally {
-      if (!tmp.moved) {
-        await fs.rm(tmp.target, { recursive: true, force: true })
-      }
-    }
-  }
-  return { integrity: sri, size: data.length }
-}
-
-module.exports.stream = writeStream
-
-// writes proxied to the 'inputStream' that is passed to the Promise
-// 'end' is deferred until content is handled.
-class CacacheWriteStream extends Flush {
-  constructor (cache, opts) {
-    super()
-    this.opts = opts
-    this.cache = cache
-    this.inputStream = new Minipass()
-    this.inputStream.on('error', er => this.emit('error', er))
-    this.inputStream.on('drain', () => this.emit('drain'))
-    this.handleContentP = null
-  }
-
-  write (chunk, encoding, cb) {
-    if (!this.handleContentP) {
-      this.handleContentP = handleContent(
-        this.inputStream,
-        this.cache,
-        this.opts
-      )
-    }
-    return this.inputStream.write(chunk, encoding, cb)
-  }
-
-  flush (cb) {
-    this.inputStream.end(() => {
-      if (!this.handleContentP) {
-        const e = new Error('Cache input stream was empty')
-        e.code = 'ENODATA'
-        // empty streams are probably emitting end right away.
-        // defer this one tick by rejecting a promise on it.
-        return Promise.reject(e).catch(cb)
-      }
-      // eslint-disable-next-line promise/catch-or-return
-      this.handleContentP.then(
-        (res) => {
-          res.integrity && this.emit('integrity', res.integrity)
-          // eslint-disable-next-line promise/always-return
-          res.size !== null && this.emit('size', res.size)
-          cb()
-        },
-        (er) => cb(er)
-      )
-    })
-  }
-}
-
-function writeStream (cache, opts = {}) {
-  return new CacacheWriteStream(cache, opts)
-}
-
-async function handleContent (inputStream, cache, opts) {
-  const tmp = await makeTmp(cache, opts)
-  try {
-    const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
-    await moveToDestination(
-      tmp,
-      cache,
-      res.integrity,
-      opts
-    )
-    return res
-  } finally {
-    if (!tmp.moved) {
-      await fs.rm(tmp.target, { recursive: true, force: true })
-    }
-  }
-}
-
-async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
-  const outStream = new fsm.WriteStream(tmpTarget, {
-    flags: 'wx',
-  })
-
-  if (opts.integrityEmitter) {
-    // we need to create these all simultaneously since they can fire in any order
-    const [integrity, size] = await Promise.all([
-      events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
-      events.once(opts.integrityEmitter, 'size').then(res => res[0]),
-      new Pipeline(inputStream, outStream).promise(),
-    ])
-    return { integrity, size }
-  }
-
-  let integrity
-  let size
-  const hashStream = ssri.integrityStream({
-    integrity: opts.integrity,
-    algorithms: opts.algorithms,
-    size: opts.size,
-  })
-  hashStream.on('integrity', i => {
-    integrity = i
-  })
-  hashStream.on('size', s => {
-    size = s
-  })
-
-  const pipeline = new Pipeline(inputStream, hashStream, outStream)
-  await pipeline.promise()
-  return { integrity, size }
-}
-
-async function makeTmp (cache, opts) {
-  const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
-  await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
-  return {
-    target: tmpTarget,
-    moved: false,
-  }
-}
-
-async function moveToDestination (tmp, cache, sri, opts) {
-  const destination = contentPath(cache, sri)
-  const destDir = path.dirname(destination)
-  if (moveOperations.has(destination)) {
-    return moveOperations.get(destination)
-  }
-  moveOperations.set(
-    destination,
-    fs.mkdir(destDir, { recursive: true })
-      .then(async () => {
-        await moveFile(tmp.target, destination, { overwrite: false })
-        tmp.moved = true
-        return tmp.moved
-      })
-      .catch(err => {
-        if (!err.message.startsWith('The destination file exists')) {
-          throw Object.assign(err, { code: 'EEXIST' })
-        }
-      }).finally(() => {
-        moveOperations.delete(destination)
-      })
-
-  )
-  return moveOperations.get(destination)
-}
-
-function sizeError (expected, found) {
-  /* eslint-disable-next-line max-len */
-  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
-  err.expected = expected
-  err.found = found
-  err.code = 'EBADSIZE'
-  return err
-}
-
-function checksumError (expected, found) {
-  const err = new Error(`Integrity check failed:
-  Wanted: ${expected}
-   Found: ${found}`)
-  err.code = 'EINTEGRITY'
-  err.expected = expected
-  err.found = found
-  return err
-}
diff --git a/node_modules/pacote/node_modules/cacache/lib/entry-index.js b/node_modules/pacote/node_modules/cacache/lib/entry-index.js
deleted file mode 100644
index 722a37af5ce15..0000000000000
--- a/node_modules/pacote/node_modules/cacache/lib/entry-index.js
+++ /dev/null
@@ -1,330 +0,0 @@
-'use strict'
-
-const crypto = require('crypto')
-const {
-  appendFile,
-  mkdir,
-  readFile,
-  readdir,
-  rm,
-  writeFile,
-} = require('fs/promises')
-const { Minipass } = require('minipass')
-const path = require('path')
-const ssri = require('ssri')
-const uniqueFilename = require('unique-filename')
-
-const contentPath = require('./content/path')
-const hashToSegments = require('./util/hash-to-segments')
-const indexV = require('../package.json')['cache-version'].index
-const { moveFile } = require('@npmcli/fs')
-
-module.exports.NotFoundError = class NotFoundError extends Error {
-  constructor (cache, key) {
-    super(`No cache entry for ${key} found in ${cache}`)
-    this.code = 'ENOENT'
-    this.cache = cache
-    this.key = key
-  }
-}
-
-module.exports.compact = compact
-
-async function compact (cache, key, matchFn, opts = {}) {
-  const bucket = bucketPath(cache, key)
-  const entries = await bucketEntries(bucket)
-  const newEntries = []
-  // we loop backwards because the bottom-most result is the newest
-  // since we add new entries with appendFile
-  for (let i = entries.length - 1; i >= 0; --i) {
-    const entry = entries[i]
-    // a null integrity could mean either a delete was appended
-    // or the user has simply stored an index that does not map
-    // to any content. we determine if the user wants to keep the
-    // null integrity based on the validateEntry function passed in options.
-    // if the integrity is null and no validateEntry is provided, we break
-    // as we consider the null integrity to be a deletion of everything
-    // that came before it.
-    if (entry.integrity === null && !opts.validateEntry) {
-      break
-    }
-
-    // if this entry is valid, and it is either the first entry or
-    // the newEntries array doesn't already include an entry that
-    // matches this one based on the provided matchFn, then we add
-    // it to the beginning of our list
-    if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
-      (newEntries.length === 0 ||
-        !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
-      newEntries.unshift(entry)
-    }
-  }
-
-  const newIndex = '\n' + newEntries.map((entry) => {
-    const stringified = JSON.stringify(entry)
-    const hash = hashEntry(stringified)
-    return `${hash}\t${stringified}`
-  }).join('\n')
-
-  const setup = async () => {
-    const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
-    await mkdir(path.dirname(target), { recursive: true })
-    return {
-      target,
-      moved: false,
-    }
-  }
-
-  const teardown = async (tmp) => {
-    if (!tmp.moved) {
-      return rm(tmp.target, { recursive: true, force: true })
-    }
-  }
-
-  const write = async (tmp) => {
-    await writeFile(tmp.target, newIndex, { flag: 'wx' })
-    await mkdir(path.dirname(bucket), { recursive: true })
-    // we use @npmcli/move-file directly here because we
-    // want to overwrite the existing file
-    await moveFile(tmp.target, bucket)
-    tmp.moved = true
-  }
-
-  // write the file atomically
-  const tmp = await setup()
-  try {
-    await write(tmp)
-  } finally {
-    await teardown(tmp)
-  }
-
-  // we reverse the list we generated such that the newest
-  // entries come first in order to make looping through them easier
-  // the true passed to formatEntry tells it to keep null
-  // integrity values, if they made it this far it's because
-  // validateEntry returned true, and as such we should return it
-  return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
-}
-
-module.exports.insert = insert
-
-async function insert (cache, key, integrity, opts = {}) {
-  const { metadata, size, time } = opts
-  const bucket = bucketPath(cache, key)
-  const entry = {
-    key,
-    integrity: integrity && ssri.stringify(integrity),
-    time: time || Date.now(),
-    size,
-    metadata,
-  }
-  try {
-    await mkdir(path.dirname(bucket), { recursive: true })
-    const stringified = JSON.stringify(entry)
-    // NOTE - Cleverness ahoy!
-    //
-    // This works because it's tremendously unlikely for an entry to corrupt
-    // another while still preserving the string length of the JSON in
-    // question. So, we just slap the length in there and verify it on read.
-    //
-    // Thanks to @isaacs for the whiteboarding session that ended up with
-    // this.
-    await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return undefined
-    }
-
-    throw err
-  }
-  return formatEntry(cache, entry)
-}
-
-module.exports.find = find
-
-async function find (cache, key) {
-  const bucket = bucketPath(cache, key)
-  try {
-    const entries = await bucketEntries(bucket)
-    return entries.reduce((latest, next) => {
-      if (next && next.key === key) {
-        return formatEntry(cache, next)
-      } else {
-        return latest
-      }
-    }, null)
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return null
-    } else {
-      throw err
-    }
-  }
-}
-
-module.exports.delete = del
-
-function del (cache, key, opts = {}) {
-  if (!opts.removeFully) {
-    return insert(cache, key, null, opts)
-  }
-
-  const bucket = bucketPath(cache, key)
-  return rm(bucket, { recursive: true, force: true })
-}
-
-module.exports.lsStream = lsStream
-
-function lsStream (cache) {
-  const indexDir = bucketDir(cache)
-  const stream = new Minipass({ objectMode: true })
-
-  // Set all this up to run on the stream and then just return the stream
-  Promise.resolve().then(async () => {
-    const buckets = await readdirOrEmpty(indexDir)
-    await Promise.all(buckets.map(async (bucket) => {
-      const bucketPath = path.join(indexDir, bucket)
-      const subbuckets = await readdirOrEmpty(bucketPath)
-      await Promise.all(subbuckets.map(async (subbucket) => {
-        const subbucketPath = path.join(bucketPath, subbucket)
-
-        // "/cachename//./*"
-        const subbucketEntries = await readdirOrEmpty(subbucketPath)
-        await Promise.all(subbucketEntries.map(async (entry) => {
-          const entryPath = path.join(subbucketPath, entry)
-          try {
-            const entries = await bucketEntries(entryPath)
-            // using a Map here prevents duplicate keys from showing up
-            // twice, I guess?
-            const reduced = entries.reduce((acc, entry) => {
-              acc.set(entry.key, entry)
-              return acc
-            }, new Map())
-            // reduced is a map of key => entry
-            for (const entry of reduced.values()) {
-              const formatted = formatEntry(cache, entry)
-              if (formatted) {
-                stream.write(formatted)
-              }
-            }
-          } catch (err) {
-            if (err.code === 'ENOENT') {
-              return undefined
-            }
-            throw err
-          }
-        }))
-      }))
-    }))
-    stream.end()
-    return stream
-  }).catch(err => stream.emit('error', err))
-
-  return stream
-}
-
-module.exports.ls = ls
-
-async function ls (cache) {
-  const entries = await lsStream(cache).collect()
-  return entries.reduce((acc, xs) => {
-    acc[xs.key] = xs
-    return acc
-  }, {})
-}
-
-module.exports.bucketEntries = bucketEntries
-
-async function bucketEntries (bucket, filter) {
-  const data = await readFile(bucket, 'utf8')
-  return _bucketEntries(data, filter)
-}
-
-function _bucketEntries (data, filter) {
-  const entries = []
-  data.split('\n').forEach((entry) => {
-    if (!entry) {
-      return
-    }
-
-    const pieces = entry.split('\t')
-    if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
-      // Hash is no good! Corruption or malice? Doesn't matter!
-      // EJECT EJECT
-      return
-    }
-    let obj
-    try {
-      obj = JSON.parse(pieces[1])
-    } catch (_) {
-      // eslint-ignore-next-line no-empty-block
-    }
-    // coverage disabled here, no need to test with an entry that parses to something falsey
-    // istanbul ignore else
-    if (obj) {
-      entries.push(obj)
-    }
-  })
-  return entries
-}
-
-module.exports.bucketDir = bucketDir
-
-function bucketDir (cache) {
-  return path.join(cache, `index-v${indexV}`)
-}
-
-module.exports.bucketPath = bucketPath
-
-function bucketPath (cache, key) {
-  const hashed = hashKey(key)
-  return path.join.apply(
-    path,
-    [bucketDir(cache)].concat(hashToSegments(hashed))
-  )
-}
-
-module.exports.hashKey = hashKey
-
-function hashKey (key) {
-  return hash(key, 'sha256')
-}
-
-module.exports.hashEntry = hashEntry
-
-function hashEntry (str) {
-  return hash(str, 'sha1')
-}
-
-function hash (str, digest) {
-  return crypto
-    .createHash(digest)
-    .update(str)
-    .digest('hex')
-}
-
-function formatEntry (cache, entry, keepAll) {
-  // Treat null digests as deletions. They'll shadow any previous entries.
-  if (!entry.integrity && !keepAll) {
-    return null
-  }
-
-  return {
-    key: entry.key,
-    integrity: entry.integrity,
-    path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
-    size: entry.size,
-    time: entry.time,
-    metadata: entry.metadata,
-  }
-}
-
-function readdirOrEmpty (dir) {
-  return readdir(dir).catch((err) => {
-    if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
-      return []
-    }
-
-    throw err
-  })
-}
diff --git a/node_modules/pacote/node_modules/cacache/lib/get.js b/node_modules/pacote/node_modules/cacache/lib/get.js
deleted file mode 100644
index 80ec206c7ecaa..0000000000000
--- a/node_modules/pacote/node_modules/cacache/lib/get.js
+++ /dev/null
@@ -1,170 +0,0 @@
-'use strict'
-
-const Collect = require('minipass-collect')
-const { Minipass } = require('minipass')
-const Pipeline = require('minipass-pipeline')
-
-const index = require('./entry-index')
-const memo = require('./memoization')
-const read = require('./content/read')
-
-async function getData (cache, key, opts = {}) {
-  const { integrity, memoize, size } = opts
-  const memoized = memo.get(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return {
-      metadata: memoized.entry.metadata,
-      data: memoized.data,
-      integrity: memoized.entry.integrity,
-      size: memoized.entry.size,
-    }
-  }
-
-  const entry = await index.find(cache, key, opts)
-  if (!entry) {
-    throw new index.NotFoundError(cache, key)
-  }
-  const data = await read(cache, entry.integrity, { integrity, size })
-  if (memoize) {
-    memo.put(cache, entry, data, opts)
-  }
-
-  return {
-    data,
-    metadata: entry.metadata,
-    size: entry.size,
-    integrity: entry.integrity,
-  }
-}
-module.exports = getData
-
-async function getDataByDigest (cache, key, opts = {}) {
-  const { integrity, memoize, size } = opts
-  const memoized = memo.get.byDigest(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return memoized
-  }
-
-  const res = await read(cache, key, { integrity, size })
-  if (memoize) {
-    memo.put.byDigest(cache, key, res, opts)
-  }
-  return res
-}
-module.exports.byDigest = getDataByDigest
-
-const getMemoizedStream = (memoized) => {
-  const stream = new Minipass()
-  stream.on('newListener', function (ev, cb) {
-    ev === 'metadata' && cb(memoized.entry.metadata)
-    ev === 'integrity' && cb(memoized.entry.integrity)
-    ev === 'size' && cb(memoized.entry.size)
-  })
-  stream.end(memoized.data)
-  return stream
-}
-
-function getStream (cache, key, opts = {}) {
-  const { memoize, size } = opts
-  const memoized = memo.get(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return getMemoizedStream(memoized)
-  }
-
-  const stream = new Pipeline()
-  // Set all this up to run on the stream and then just return the stream
-  Promise.resolve().then(async () => {
-    const entry = await index.find(cache, key)
-    if (!entry) {
-      throw new index.NotFoundError(cache, key)
-    }
-
-    stream.emit('metadata', entry.metadata)
-    stream.emit('integrity', entry.integrity)
-    stream.emit('size', entry.size)
-    stream.on('newListener', function (ev, cb) {
-      ev === 'metadata' && cb(entry.metadata)
-      ev === 'integrity' && cb(entry.integrity)
-      ev === 'size' && cb(entry.size)
-    })
-
-    const src = read.readStream(
-      cache,
-      entry.integrity,
-      { ...opts, size: typeof size !== 'number' ? entry.size : size }
-    )
-
-    if (memoize) {
-      const memoStream = new Collect.PassThrough()
-      memoStream.on('collect', data => memo.put(cache, entry, data, opts))
-      stream.unshift(memoStream)
-    }
-    stream.unshift(src)
-    return stream
-  }).catch((err) => stream.emit('error', err))
-
-  return stream
-}
-
-module.exports.stream = getStream
-
-function getStreamDigest (cache, integrity, opts = {}) {
-  const { memoize } = opts
-  const memoized = memo.get.byDigest(cache, integrity, opts)
-  if (memoized && memoize !== false) {
-    const stream = new Minipass()
-    stream.end(memoized)
-    return stream
-  } else {
-    const stream = read.readStream(cache, integrity, opts)
-    if (!memoize) {
-      return stream
-    }
-
-    const memoStream = new Collect.PassThrough()
-    memoStream.on('collect', data => memo.put.byDigest(
-      cache,
-      integrity,
-      data,
-      opts
-    ))
-    return new Pipeline(stream, memoStream)
-  }
-}
-
-module.exports.stream.byDigest = getStreamDigest
-
-function info (cache, key, opts = {}) {
-  const { memoize } = opts
-  const memoized = memo.get(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return Promise.resolve(memoized.entry)
-  } else {
-    return index.find(cache, key)
-  }
-}
-module.exports.info = info
-
-async function copy (cache, key, dest, opts = {}) {
-  const entry = await index.find(cache, key, opts)
-  if (!entry) {
-    throw new index.NotFoundError(cache, key)
-  }
-  await read.copy(cache, entry.integrity, dest, opts)
-  return {
-    metadata: entry.metadata,
-    size: entry.size,
-    integrity: entry.integrity,
-  }
-}
-
-module.exports.copy = copy
-
-async function copyByDigest (cache, key, dest, opts = {}) {
-  await read.copy(cache, key, dest, opts)
-  return key
-}
-
-module.exports.copy.byDigest = copyByDigest
-
-module.exports.hasContent = read.hasContent
diff --git a/node_modules/pacote/node_modules/cacache/lib/index.js b/node_modules/pacote/node_modules/cacache/lib/index.js
deleted file mode 100644
index c9b0da5f3a271..0000000000000
--- a/node_modules/pacote/node_modules/cacache/lib/index.js
+++ /dev/null
@@ -1,42 +0,0 @@
-'use strict'
-
-const get = require('./get.js')
-const put = require('./put.js')
-const rm = require('./rm.js')
-const verify = require('./verify.js')
-const { clearMemoized } = require('./memoization.js')
-const tmp = require('./util/tmp.js')
-const index = require('./entry-index.js')
-
-module.exports.index = {}
-module.exports.index.compact = index.compact
-module.exports.index.insert = index.insert
-
-module.exports.ls = index.ls
-module.exports.ls.stream = index.lsStream
-
-module.exports.get = get
-module.exports.get.byDigest = get.byDigest
-module.exports.get.stream = get.stream
-module.exports.get.stream.byDigest = get.stream.byDigest
-module.exports.get.copy = get.copy
-module.exports.get.copy.byDigest = get.copy.byDigest
-module.exports.get.info = get.info
-module.exports.get.hasContent = get.hasContent
-
-module.exports.put = put
-module.exports.put.stream = put.stream
-
-module.exports.rm = rm.entry
-module.exports.rm.all = rm.all
-module.exports.rm.entry = module.exports.rm
-module.exports.rm.content = rm.content
-
-module.exports.clearMemoized = clearMemoized
-
-module.exports.tmp = {}
-module.exports.tmp.mkdir = tmp.mkdir
-module.exports.tmp.withTmp = tmp.withTmp
-
-module.exports.verify = verify
-module.exports.verify.lastRun = verify.lastRun
diff --git a/node_modules/pacote/node_modules/cacache/lib/memoization.js b/node_modules/pacote/node_modules/cacache/lib/memoization.js
deleted file mode 100644
index 0ff604a479c9c..0000000000000
--- a/node_modules/pacote/node_modules/cacache/lib/memoization.js
+++ /dev/null
@@ -1,72 +0,0 @@
-'use strict'
-
-const LRU = require('lru-cache')
-
-const MEMOIZED = new LRU({
-  max: 500,
-  maxSize: 50 * 1024 * 1024, // 50MB
-  ttl: 3 * 60 * 1000, // 3 minutes
-  sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
-})
-
-module.exports.clearMemoized = clearMemoized
-
-function clearMemoized () {
-  const old = {}
-  MEMOIZED.forEach((v, k) => {
-    old[k] = v
-  })
-  MEMOIZED.clear()
-  return old
-}
-
-module.exports.put = put
-
-function put (cache, entry, data, opts) {
-  pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
-  putDigest(cache, entry.integrity, data, opts)
-}
-
-module.exports.put.byDigest = putDigest
-
-function putDigest (cache, integrity, data, opts) {
-  pickMem(opts).set(`digest:${cache}:${integrity}`, data)
-}
-
-module.exports.get = get
-
-function get (cache, key, opts) {
-  return pickMem(opts).get(`key:${cache}:${key}`)
-}
-
-module.exports.get.byDigest = getDigest
-
-function getDigest (cache, integrity, opts) {
-  return pickMem(opts).get(`digest:${cache}:${integrity}`)
-}
-
-class ObjProxy {
-  constructor (obj) {
-    this.obj = obj
-  }
-
-  get (key) {
-    return this.obj[key]
-  }
-
-  set (key, val) {
-    this.obj[key] = val
-  }
-}
-
-function pickMem (opts) {
-  if (!opts || !opts.memoize) {
-    return MEMOIZED
-  } else if (opts.memoize.get && opts.memoize.set) {
-    return opts.memoize
-  } else if (typeof opts.memoize === 'object') {
-    return new ObjProxy(opts.memoize)
-  } else {
-    return MEMOIZED
-  }
-}
diff --git a/node_modules/pacote/node_modules/cacache/lib/put.js b/node_modules/pacote/node_modules/cacache/lib/put.js
deleted file mode 100644
index 9fc932d5f6dec..0000000000000
--- a/node_modules/pacote/node_modules/cacache/lib/put.js
+++ /dev/null
@@ -1,80 +0,0 @@
-'use strict'
-
-const index = require('./entry-index')
-const memo = require('./memoization')
-const write = require('./content/write')
-const Flush = require('minipass-flush')
-const { PassThrough } = require('minipass-collect')
-const Pipeline = require('minipass-pipeline')
-
-const putOpts = (opts) => ({
-  algorithms: ['sha512'],
-  ...opts,
-})
-
-module.exports = putData
-
-async function putData (cache, key, data, opts = {}) {
-  const { memoize } = opts
-  opts = putOpts(opts)
-  const res = await write(cache, data, opts)
-  const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
-  if (memoize) {
-    memo.put(cache, entry, data, opts)
-  }
-
-  return res.integrity
-}
-
-module.exports.stream = putStream
-
-function putStream (cache, key, opts = {}) {
-  const { memoize } = opts
-  opts = putOpts(opts)
-  let integrity
-  let size
-  let error
-
-  let memoData
-  const pipeline = new Pipeline()
-  // first item in the pipeline is the memoizer, because we need
-  // that to end first and get the collected data.
-  if (memoize) {
-    const memoizer = new PassThrough().on('collect', data => {
-      memoData = data
-    })
-    pipeline.push(memoizer)
-  }
-
-  // contentStream is a write-only, not a passthrough
-  // no data comes out of it.
-  const contentStream = write.stream(cache, opts)
-    .on('integrity', (int) => {
-      integrity = int
-    })
-    .on('size', (s) => {
-      size = s
-    })
-    .on('error', (err) => {
-      error = err
-    })
-
-  pipeline.push(contentStream)
-
-  // last but not least, we write the index and emit hash and size,
-  // and memoize if we're doing that
-  pipeline.push(new Flush({
-    async flush () {
-      if (!error) {
-        const entry = await index.insert(cache, key, integrity, { ...opts, size })
-        if (memoize && memoData) {
-          memo.put(cache, entry, memoData, opts)
-        }
-        pipeline.emit('integrity', integrity)
-        pipeline.emit('size', size)
-      }
-    },
-  }))
-
-  return pipeline
-}
diff --git a/node_modules/pacote/node_modules/cacache/lib/rm.js b/node_modules/pacote/node_modules/cacache/lib/rm.js
deleted file mode 100644
index a94760c7cf243..0000000000000
--- a/node_modules/pacote/node_modules/cacache/lib/rm.js
+++ /dev/null
@@ -1,31 +0,0 @@
-'use strict'
-
-const { rm } = require('fs/promises')
-const glob = require('./util/glob.js')
-const index = require('./entry-index')
-const memo = require('./memoization')
-const path = require('path')
-const rmContent = require('./content/rm')
-
-module.exports = entry
-module.exports.entry = entry
-
-function entry (cache, key, opts) {
-  memo.clearMemoized()
-  return index.delete(cache, key, opts)
-}
-
-module.exports.content = content
-
-function content (cache, integrity) {
-  memo.clearMemoized()
-  return rmContent(cache, integrity)
-}
-
-module.exports.all = all
-
-async function all (cache) {
-  memo.clearMemoized()
-  const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
-  return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
-}
diff --git a/node_modules/pacote/node_modules/cacache/lib/util/glob.js b/node_modules/pacote/node_modules/cacache/lib/util/glob.js
deleted file mode 100644
index 8500c1c16a429..0000000000000
--- a/node_modules/pacote/node_modules/cacache/lib/util/glob.js
+++ /dev/null
@@ -1,7 +0,0 @@
-'use strict'
-
-const { glob } = require('glob')
-const path = require('path')
-
-const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep)
-module.exports = (path, options) => glob(globify(path), options)
diff --git a/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js
deleted file mode 100644
index 445599b503808..0000000000000
--- a/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js
+++ /dev/null
@@ -1,7 +0,0 @@
-'use strict'
-
-module.exports = hashToSegments
-
-function hashToSegments (hash) {
-  return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
-}
diff --git a/node_modules/pacote/node_modules/cacache/lib/util/tmp.js b/node_modules/pacote/node_modules/cacache/lib/util/tmp.js
deleted file mode 100644
index 0bf5302136ebe..0000000000000
--- a/node_modules/pacote/node_modules/cacache/lib/util/tmp.js
+++ /dev/null
@@ -1,26 +0,0 @@
-'use strict'
-
-const { withTempDir } = require('@npmcli/fs')
-const fs = require('fs/promises')
-const path = require('path')
-
-module.exports.mkdir = mktmpdir
-
-async function mktmpdir (cache, opts = {}) {
-  const { tmpPrefix } = opts
-  const tmpDir = path.join(cache, 'tmp')
-  await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
-  // do not use path.join(), it drops the trailing / if tmpPrefix is unset
-  const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
-  return fs.mkdtemp(target, { owner: 'inherit' })
-}
-
-module.exports.withTmp = withTmp
-
-function withTmp (cache, opts, cb) {
-  if (!cb) {
-    cb = opts
-    opts = {}
-  }
-  return withTempDir(path.join(cache, 'tmp'), cb, opts)
-}
diff --git a/node_modules/pacote/node_modules/cacache/lib/verify.js b/node_modules/pacote/node_modules/cacache/lib/verify.js
deleted file mode 100644
index 62e85c946490f..0000000000000
--- a/node_modules/pacote/node_modules/cacache/lib/verify.js
+++ /dev/null
@@ -1,257 +0,0 @@
-'use strict'
-
-const {
-  mkdir,
-  readFile,
-  rm,
-  stat,
-  truncate,
-  writeFile,
-} = require('fs/promises')
-const pMap = require('p-map')
-const contentPath = require('./content/path')
-const fsm = require('fs-minipass')
-const glob = require('./util/glob.js')
-const index = require('./entry-index')
-const path = require('path')
-const ssri = require('ssri')
-
-const hasOwnProperty = (obj, key) =>
-  Object.prototype.hasOwnProperty.call(obj, key)
-
-const verifyOpts = (opts) => ({
-  concurrency: 20,
-  log: { silly () {} },
-  ...opts,
-})
-
-module.exports = verify
-
-async function verify (cache, opts) {
-  opts = verifyOpts(opts)
-  opts.log.silly('verify', 'verifying cache at', cache)
-
-  const steps = [
-    markStartTime,
-    fixPerms,
-    garbageCollect,
-    rebuildIndex,
-    cleanTmp,
-    writeVerifile,
-    markEndTime,
-  ]
-
-  const stats = {}
-  for (const step of steps) {
-    const label = step.name
-    const start = new Date()
-    const s = await step(cache, opts)
-    if (s) {
-      Object.keys(s).forEach((k) => {
-        stats[k] = s[k]
-      })
-    }
-    const end = new Date()
-    if (!stats.runTime) {
-      stats.runTime = {}
-    }
-    stats.runTime[label] = end - start
-  }
-  stats.runTime.total = stats.endTime - stats.startTime
-  opts.log.silly(
-    'verify',
-    'verification finished for',
-    cache,
-    'in',
-    `${stats.runTime.total}ms`
-  )
-  return stats
-}
-
-async function markStartTime (cache, opts) {
-  return { startTime: new Date() }
-}
-
-async function markEndTime (cache, opts) {
-  return { endTime: new Date() }
-}
-
-async function fixPerms (cache, opts) {
-  opts.log.silly('verify', 'fixing cache permissions')
-  await mkdir(cache, { recursive: true })
-  return null
-}
-
-// Implements a naive mark-and-sweep tracing garbage collector.
-//
-// The algorithm is basically as follows:
-// 1. Read (and filter) all index entries ("pointers")
-// 2. Mark each integrity value as "live"
-// 3. Read entire filesystem tree in `content-vX/` dir
-// 4. If content is live, verify its checksum and delete it if it fails
-// 5. If content is not marked as live, rm it.
-//
-async function garbageCollect (cache, opts) {
-  opts.log.silly('verify', 'garbage collecting content')
-  const indexStream = index.lsStream(cache)
-  const liveContent = new Set()
-  indexStream.on('data', (entry) => {
-    if (opts.filter && !opts.filter(entry)) {
-      return
-    }
-
-    // integrity is stringified, re-parse it so we can get each hash
-    const integrity = ssri.parse(entry.integrity)
-    for (const algo in integrity) {
-      liveContent.add(integrity[algo].toString())
-    }
-  })
-  await new Promise((resolve, reject) => {
-    indexStream.on('end', resolve).on('error', reject)
-  })
-  const contentDir = contentPath.contentDir(cache)
-  const files = await glob(path.join(contentDir, '**'), {
-    follow: false,
-    nodir: true,
-    nosort: true,
-  })
-  const stats = {
-    verifiedContent: 0,
-    reclaimedCount: 0,
-    reclaimedSize: 0,
-    badContentCount: 0,
-    keptSize: 0,
-  }
-  await pMap(
-    files,
-    async (f) => {
-      const split = f.split(/[/\\]/)
-      const digest = split.slice(split.length - 3).join('')
-      const algo = split[split.length - 4]
-      const integrity = ssri.fromHex(digest, algo)
-      if (liveContent.has(integrity.toString())) {
-        const info = await verifyContent(f, integrity)
-        if (!info.valid) {
-          stats.reclaimedCount++
-          stats.badContentCount++
-          stats.reclaimedSize += info.size
-        } else {
-          stats.verifiedContent++
-          stats.keptSize += info.size
-        }
-      } else {
-        // No entries refer to this content. We can delete.
-        stats.reclaimedCount++
-        const s = await stat(f)
-        await rm(f, { recursive: true, force: true })
-        stats.reclaimedSize += s.size
-      }
-      return stats
-    },
-    { concurrency: opts.concurrency }
-  )
-  return stats
-}
-
-async function verifyContent (filepath, sri) {
-  const contentInfo = {}
-  try {
-    const { size } = await stat(filepath)
-    contentInfo.size = size
-    contentInfo.valid = true
-    await ssri.checkStream(new fsm.ReadStream(filepath), sri)
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return { size: 0, valid: false }
-    }
-    if (err.code !== 'EINTEGRITY') {
-      throw err
-    }
-
-    await rm(filepath, { recursive: true, force: true })
-    contentInfo.valid = false
-  }
-  return contentInfo
-}
-
-async function rebuildIndex (cache, opts) {
-  opts.log.silly('verify', 'rebuilding index')
-  const entries = await index.ls(cache)
-  const stats = {
-    missingContent: 0,
-    rejectedEntries: 0,
-    totalEntries: 0,
-  }
-  const buckets = {}
-  for (const k in entries) {
-    /* istanbul ignore else */
-    if (hasOwnProperty(entries, k)) {
-      const hashed = index.hashKey(k)
-      const entry = entries[k]
-      const excluded = opts.filter && !opts.filter(entry)
-      excluded && stats.rejectedEntries++
-      if (buckets[hashed] && !excluded) {
-        buckets[hashed].push(entry)
-      } else if (buckets[hashed] && excluded) {
-        // skip
-      } else if (excluded) {
-        buckets[hashed] = []
-        buckets[hashed]._path = index.bucketPath(cache, k)
-      } else {
-        buckets[hashed] = [entry]
-        buckets[hashed]._path = index.bucketPath(cache, k)
-      }
-    }
-  }
-  await pMap(
-    Object.keys(buckets),
-    (key) => {
-      return rebuildBucket(cache, buckets[key], stats, opts)
-    },
-    { concurrency: opts.concurrency }
-  )
-  return stats
-}
-
-async function rebuildBucket (cache, bucket, stats, opts) {
-  await truncate(bucket._path)
-  // This needs to be serialized because cacache explicitly
-  // lets very racy bucket conflicts clobber each other.
-  for (const entry of bucket) {
-    const content = contentPath(cache, entry.integrity)
-    try {
-      await stat(content)
-      await index.insert(cache, entry.key, entry.integrity, {
-        metadata: entry.metadata,
-        size: entry.size,
-        time: entry.time,
-      })
-      stats.totalEntries++
-    } catch (err) {
-      if (err.code === 'ENOENT') {
-        stats.rejectedEntries++
-        stats.missingContent++
-      } else {
-        throw err
-      }
-    }
-  }
-}
-
-function cleanTmp (cache, opts) {
-  opts.log.silly('verify', 'cleaning tmp directory')
-  return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
-}
-
-async function writeVerifile (cache, opts) {
-  const verifile = path.join(cache, '_lastverified')
-  opts.log.silly('verify', 'writing verifile to ' + verifile)
-  return writeFile(verifile, `${Date.now()}`)
-}
-
-module.exports.lastRun = lastRun
-
-async function lastRun (cache) {
-  const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
-  return new Date(+data)
-}
diff --git a/node_modules/pacote/node_modules/cacache/package.json b/node_modules/pacote/node_modules/cacache/package.json
deleted file mode 100644
index ab58cb8b7c50f..0000000000000
--- a/node_modules/pacote/node_modules/cacache/package.json
+++ /dev/null
@@ -1,82 +0,0 @@
-{
-  "name": "cacache",
-  "version": "17.1.4",
-  "cache-version": {
-    "content": "2",
-    "index": "5"
-  },
-  "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
-  "main": "lib/index.js",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "coverage": "tap",
-    "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
-    "lint": "eslint \"**/*.js\"",
-    "npmclilint": "npmcli-lint",
-    "lintfix": "npm run lint -- --fix",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "posttest": "npm run lint",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/cacache.git"
-  },
-  "keywords": [
-    "cache",
-    "caching",
-    "content-addressable",
-    "sri",
-    "sri hash",
-    "subresource integrity",
-    "cache",
-    "storage",
-    "store",
-    "file store",
-    "filesystem",
-    "disk cache",
-    "disk storage"
-  ],
-  "license": "ISC",
-  "dependencies": {
-    "@npmcli/fs": "^3.1.0",
-    "fs-minipass": "^3.0.0",
-    "glob": "^10.2.2",
-    "lru-cache": "^7.7.1",
-    "minipass": "^7.0.3",
-    "minipass-collect": "^1.0.2",
-    "minipass-flush": "^1.0.5",
-    "minipass-pipeline": "^1.2.4",
-    "p-map": "^4.0.0",
-    "ssri": "^10.0.0",
-    "tar": "^6.1.11",
-    "unique-filename": "^3.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.18.0",
-    "tap": "^16.0.0"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "windowsCI": false,
-    "version": "4.18.0",
-    "publish": "true"
-  },
-  "author": "GitHub Inc.",
-  "tap": {
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  }
-}
diff --git a/node_modules/pacote/node_modules/normalize-package-data/LICENSE b/node_modules/pacote/node_modules/normalize-package-data/LICENSE
new file mode 100644
index 0000000000000..19d1364a8ac08
--- /dev/null
+++ b/node_modules/pacote/node_modules/normalize-package-data/LICENSE
@@ -0,0 +1,15 @@
+This package contains code originally written by Isaac Z. Schlueter.
+Used with permission.
+
+Copyright (c) Meryn Stol ("Author")
+All rights reserved.
+
+The BSD License
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/extract_description.js b/node_modules/pacote/node_modules/normalize-package-data/lib/extract_description.js
new file mode 100644
index 0000000000000..631966b5f29af
--- /dev/null
+++ b/node_modules/pacote/node_modules/normalize-package-data/lib/extract_description.js
@@ -0,0 +1,24 @@
+module.exports = extractDescription
+
+// Extracts description from contents of a readme file in markdown format
+function extractDescription (d) {
+  if (!d) {
+    return
+  }
+  if (d === 'ERROR: No README data found!') {
+    return
+  }
+  // the first block of text before the first heading
+  // that isn't the first line heading
+  d = d.trim().split('\n')
+  let s = 0
+  while (d[s] && d[s].trim().match(/^(#|$)/)) {
+    s++
+  }
+  const l = d.length
+  let e = s + 1
+  while (e < l && d[e].trim()) {
+    e++
+  }
+  return d.slice(s, e).join(' ').trim()
+}
diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/fixer.js b/node_modules/pacote/node_modules/normalize-package-data/lib/fixer.js
new file mode 100644
index 0000000000000..bb78231d83ca9
--- /dev/null
+++ b/node_modules/pacote/node_modules/normalize-package-data/lib/fixer.js
@@ -0,0 +1,475 @@
+var isValidSemver = require('semver/functions/valid')
+var cleanSemver = require('semver/functions/clean')
+var validateLicense = require('validate-npm-package-license')
+var hostedGitInfo = require('hosted-git-info')
+var isBuiltinModule = require('is-core-module')
+var depTypes = ['dependencies', 'devDependencies', 'optionalDependencies']
+var extractDescription = require('./extract_description')
+var url = require('url')
+var typos = require('./typos.json')
+
+var isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.'))
+
+module.exports = {
+  // default warning function
+  warn: function () {},
+
+  fixRepositoryField: function (data) {
+    if (data.repositories) {
+      this.warn('repositories')
+      data.repository = data.repositories[0]
+    }
+    if (!data.repository) {
+      return this.warn('missingRepository')
+    }
+    if (typeof data.repository === 'string') {
+      data.repository = {
+        type: 'git',
+        url: data.repository,
+      }
+    }
+    var r = data.repository.url || ''
+    if (r) {
+      var hosted = hostedGitInfo.fromUrl(r)
+      if (hosted) {
+        r = data.repository.url
+          = hosted.getDefaultRepresentation() === 'shortcut' ? hosted.https() : hosted.toString()
+      }
+    }
+
+    if (r.match(/github.com\/[^/]+\/[^/]+\.git\.git$/)) {
+      this.warn('brokenGitUrl', r)
+    }
+  },
+
+  fixTypos: function (data) {
+    Object.keys(typos.topLevel).forEach(function (d) {
+      if (Object.prototype.hasOwnProperty.call(data, d)) {
+        this.warn('typo', d, typos.topLevel[d])
+      }
+    }, this)
+  },
+
+  fixScriptsField: function (data) {
+    if (!data.scripts) {
+      return
+    }
+    if (typeof data.scripts !== 'object') {
+      this.warn('nonObjectScripts')
+      delete data.scripts
+      return
+    }
+    Object.keys(data.scripts).forEach(function (k) {
+      if (typeof data.scripts[k] !== 'string') {
+        this.warn('nonStringScript')
+        delete data.scripts[k]
+      } else if (typos.script[k] && !data.scripts[typos.script[k]]) {
+        this.warn('typo', k, typos.script[k], 'scripts')
+      }
+    }, this)
+  },
+
+  fixFilesField: function (data) {
+    var files = data.files
+    if (files && !Array.isArray(files)) {
+      this.warn('nonArrayFiles')
+      delete data.files
+    } else if (data.files) {
+      data.files = data.files.filter(function (file) {
+        if (!file || typeof file !== 'string') {
+          this.warn('invalidFilename', file)
+          return false
+        } else {
+          return true
+        }
+      }, this)
+    }
+  },
+
+  fixBinField: function (data) {
+    if (!data.bin) {
+      return
+    }
+    if (typeof data.bin === 'string') {
+      var b = {}
+      var match
+      if (match = data.name.match(/^@[^/]+[/](.*)$/)) {
+        b[match[1]] = data.bin
+      } else {
+        b[data.name] = data.bin
+      }
+      data.bin = b
+    }
+  },
+
+  fixManField: function (data) {
+    if (!data.man) {
+      return
+    }
+    if (typeof data.man === 'string') {
+      data.man = [data.man]
+    }
+  },
+  fixBundleDependenciesField: function (data) {
+    var bdd = 'bundledDependencies'
+    var bd = 'bundleDependencies'
+    if (data[bdd] && !data[bd]) {
+      data[bd] = data[bdd]
+      delete data[bdd]
+    }
+    if (data[bd] && !Array.isArray(data[bd])) {
+      this.warn('nonArrayBundleDependencies')
+      delete data[bd]
+    } else if (data[bd]) {
+      data[bd] = data[bd].filter(function (filtered) {
+        if (!filtered || typeof filtered !== 'string') {
+          this.warn('nonStringBundleDependency', filtered)
+          return false
+        } else {
+          if (!data.dependencies) {
+            data.dependencies = {}
+          }
+          if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) {
+            this.warn('nonDependencyBundleDependency', filtered)
+            data.dependencies[filtered] = '*'
+          }
+          return true
+        }
+      }, this)
+    }
+  },
+
+  fixDependencies: function (data, strict) {
+    objectifyDeps(data, this.warn)
+    addOptionalDepsToDeps(data, this.warn)
+    this.fixBundleDependenciesField(data)
+
+    ;['dependencies', 'devDependencies'].forEach(function (deps) {
+      if (!(deps in data)) {
+        return
+      }
+      if (!data[deps] || typeof data[deps] !== 'object') {
+        this.warn('nonObjectDependencies', deps)
+        delete data[deps]
+        return
+      }
+      Object.keys(data[deps]).forEach(function (d) {
+        var r = data[deps][d]
+        if (typeof r !== 'string') {
+          this.warn('nonStringDependency', d, JSON.stringify(r))
+          delete data[deps][d]
+        }
+        var hosted = hostedGitInfo.fromUrl(data[deps][d])
+        if (hosted) {
+          data[deps][d] = hosted.toString()
+        }
+      }, this)
+    }, this)
+  },
+
+  fixModulesField: function (data) {
+    if (data.modules) {
+      this.warn('deprecatedModules')
+      delete data.modules
+    }
+  },
+
+  fixKeywordsField: function (data) {
+    if (typeof data.keywords === 'string') {
+      data.keywords = data.keywords.split(/,\s+/)
+    }
+    if (data.keywords && !Array.isArray(data.keywords)) {
+      delete data.keywords
+      this.warn('nonArrayKeywords')
+    } else if (data.keywords) {
+      data.keywords = data.keywords.filter(function (kw) {
+        if (typeof kw !== 'string' || !kw) {
+          this.warn('nonStringKeyword')
+          return false
+        } else {
+          return true
+        }
+      }, this)
+    }
+  },
+
+  fixVersionField: function (data, strict) {
+    // allow "loose" semver 1.0 versions in non-strict mode
+    // enforce strict semver 2.0 compliance in strict mode
+    var loose = !strict
+    if (!data.version) {
+      data.version = ''
+      return true
+    }
+    if (!isValidSemver(data.version, loose)) {
+      throw new Error('Invalid version: "' + data.version + '"')
+    }
+    data.version = cleanSemver(data.version, loose)
+    return true
+  },
+
+  fixPeople: function (data) {
+    modifyPeople(data, unParsePerson)
+    modifyPeople(data, parsePerson)
+  },
+
+  fixNameField: function (data, options) {
+    if (typeof options === 'boolean') {
+      options = { strict: options }
+    } else if (typeof options === 'undefined') {
+      options = {}
+    }
+    var strict = options.strict
+    if (!data.name && !strict) {
+      data.name = ''
+      return
+    }
+    if (typeof data.name !== 'string') {
+      throw new Error('name field must be a string.')
+    }
+    if (!strict) {
+      data.name = data.name.trim()
+    }
+    ensureValidName(data.name, strict, options.allowLegacyCase)
+    if (isBuiltinModule(data.name)) {
+      this.warn('conflictingName', data.name)
+    }
+  },
+
+  fixDescriptionField: function (data) {
+    if (data.description && typeof data.description !== 'string') {
+      this.warn('nonStringDescription')
+      delete data.description
+    }
+    if (data.readme && !data.description) {
+      data.description = extractDescription(data.readme)
+    }
+    if (data.description === undefined) {
+      delete data.description
+    }
+    if (!data.description) {
+      this.warn('missingDescription')
+    }
+  },
+
+  fixReadmeField: function (data) {
+    if (!data.readme) {
+      this.warn('missingReadme')
+      data.readme = 'ERROR: No README data found!'
+    }
+  },
+
+  fixBugsField: function (data) {
+    if (!data.bugs && data.repository && data.repository.url) {
+      var hosted = hostedGitInfo.fromUrl(data.repository.url)
+      if (hosted && hosted.bugs()) {
+        data.bugs = { url: hosted.bugs() }
+      }
+    } else if (data.bugs) {
+      if (typeof data.bugs === 'string') {
+        if (isEmail(data.bugs)) {
+          data.bugs = { email: data.bugs }
+        /* eslint-disable-next-line node/no-deprecated-api */
+        } else if (url.parse(data.bugs).protocol) {
+          data.bugs = { url: data.bugs }
+        } else {
+          this.warn('nonEmailUrlBugsString')
+        }
+      } else {
+        bugsTypos(data.bugs, this.warn)
+        var oldBugs = data.bugs
+        data.bugs = {}
+        if (oldBugs.url) {
+          /* eslint-disable-next-line node/no-deprecated-api */
+          if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) {
+            data.bugs.url = oldBugs.url
+          } else {
+            this.warn('nonUrlBugsUrlField')
+          }
+        }
+        if (oldBugs.email) {
+          if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) {
+            data.bugs.email = oldBugs.email
+          } else {
+            this.warn('nonEmailBugsEmailField')
+          }
+        }
+      }
+      if (!data.bugs.email && !data.bugs.url) {
+        delete data.bugs
+        this.warn('emptyNormalizedBugs')
+      }
+    }
+  },
+
+  fixHomepageField: function (data) {
+    if (!data.homepage && data.repository && data.repository.url) {
+      var hosted = hostedGitInfo.fromUrl(data.repository.url)
+      if (hosted && hosted.docs()) {
+        data.homepage = hosted.docs()
+      }
+    }
+    if (!data.homepage) {
+      return
+    }
+
+    if (typeof data.homepage !== 'string') {
+      this.warn('nonUrlHomepage')
+      return delete data.homepage
+    }
+    /* eslint-disable-next-line node/no-deprecated-api */
+    if (!url.parse(data.homepage).protocol) {
+      data.homepage = 'http://' + data.homepage
+    }
+  },
+
+  fixLicenseField: function (data) {
+    const license = data.license || data.licence
+    if (!license) {
+      return this.warn('missingLicense')
+    }
+    if (
+      typeof (license) !== 'string' ||
+      license.length < 1 ||
+      license.trim() === ''
+    ) {
+      return this.warn('invalidLicense')
+    }
+    if (!validateLicense(license).validForNewPackages) {
+      return this.warn('invalidLicense')
+    }
+  },
+}
+
+function isValidScopedPackageName (spec) {
+  if (spec.charAt(0) !== '@') {
+    return false
+  }
+
+  var rest = spec.slice(1).split('/')
+  if (rest.length !== 2) {
+    return false
+  }
+
+  return rest[0] && rest[1] &&
+    rest[0] === encodeURIComponent(rest[0]) &&
+    rest[1] === encodeURIComponent(rest[1])
+}
+
+function isCorrectlyEncodedName (spec) {
+  return !spec.match(/[/@\s+%:]/) &&
+    spec === encodeURIComponent(spec)
+}
+
+function ensureValidName (name, strict, allowLegacyCase) {
+  if (name.charAt(0) === '.' ||
+      !(isValidScopedPackageName(name) || isCorrectlyEncodedName(name)) ||
+      (strict && (!allowLegacyCase) && name !== name.toLowerCase()) ||
+      name.toLowerCase() === 'node_modules' ||
+      name.toLowerCase() === 'favicon.ico') {
+    throw new Error('Invalid name: ' + JSON.stringify(name))
+  }
+}
+
+function modifyPeople (data, fn) {
+  if (data.author) {
+    data.author = fn(data.author)
+  }['maintainers', 'contributors'].forEach(function (set) {
+    if (!Array.isArray(data[set])) {
+      return
+    }
+    data[set] = data[set].map(fn)
+  })
+  return data
+}
+
+function unParsePerson (person) {
+  if (typeof person === 'string') {
+    return person
+  }
+  var name = person.name || ''
+  var u = person.url || person.web
+  var wrappedUrl = u ? (' (' + u + ')') : ''
+  var e = person.email || person.mail
+  var wrappedEmail = e ? (' <' + e + '>') : ''
+  return name + wrappedEmail + wrappedUrl
+}
+
+function parsePerson (person) {
+  if (typeof person !== 'string') {
+    return person
+  }
+  var matchedName = person.match(/^([^(<]+)/)
+  var matchedUrl = person.match(/\(([^()]+)\)/)
+  var matchedEmail = person.match(/<([^<>]+)>/)
+  var obj = {}
+  if (matchedName && matchedName[0].trim()) {
+    obj.name = matchedName[0].trim()
+  }
+  if (matchedEmail) {
+    obj.email = matchedEmail[1]
+  }
+  if (matchedUrl) {
+    obj.url = matchedUrl[1]
+  }
+  return obj
+}
+
+function addOptionalDepsToDeps (data, warn) {
+  var o = data.optionalDependencies
+  if (!o) {
+    return
+  }
+  var d = data.dependencies || {}
+  Object.keys(o).forEach(function (k) {
+    d[k] = o[k]
+  })
+  data.dependencies = d
+}
+
+function depObjectify (deps, type, warn) {
+  if (!deps) {
+    return {}
+  }
+  if (typeof deps === 'string') {
+    deps = deps.trim().split(/[\n\r\s\t ,]+/)
+  }
+  if (!Array.isArray(deps)) {
+    return deps
+  }
+  warn('deprecatedArrayDependencies', type)
+  var o = {}
+  deps.filter(function (d) {
+    return typeof d === 'string'
+  }).forEach(function (d) {
+    d = d.trim().split(/(:?[@\s><=])/)
+    var dn = d.shift()
+    var dv = d.join('')
+    dv = dv.trim()
+    dv = dv.replace(/^@/, '')
+    o[dn] = dv
+  })
+  return o
+}
+
+function objectifyDeps (data, warn) {
+  depTypes.forEach(function (type) {
+    if (!data[type]) {
+      return
+    }
+    data[type] = depObjectify(data[type], type, warn)
+  })
+}
+
+function bugsTypos (bugs, warn) {
+  if (!bugs) {
+    return
+  }
+  Object.keys(bugs).forEach(function (k) {
+    if (typos.bugs[k]) {
+      warn('typo', k, typos.bugs[k], 'bugs')
+      bugs[typos.bugs[k]] = bugs[k]
+      delete bugs[k]
+    }
+  })
+}
diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/make_warning.js b/node_modules/pacote/node_modules/normalize-package-data/lib/make_warning.js
new file mode 100644
index 0000000000000..3be9c86539952
--- /dev/null
+++ b/node_modules/pacote/node_modules/normalize-package-data/lib/make_warning.js
@@ -0,0 +1,22 @@
+var util = require('util')
+var messages = require('./warning_messages.json')
+
+module.exports = function () {
+  var args = Array.prototype.slice.call(arguments, 0)
+  var warningName = args.shift()
+  if (warningName === 'typo') {
+    return makeTypoWarning.apply(null, args)
+  } else {
+    var msgTemplate = messages[warningName] ? messages[warningName] : warningName + ": '%s'"
+    args.unshift(msgTemplate)
+    return util.format.apply(null, args)
+  }
+}
+
+function makeTypoWarning (providedName, probableName, field) {
+  if (field) {
+    providedName = field + "['" + providedName + "']"
+    probableName = field + "['" + probableName + "']"
+  }
+  return util.format(messages.typo, providedName, probableName)
+}
diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/normalize.js b/node_modules/pacote/node_modules/normalize-package-data/lib/normalize.js
new file mode 100644
index 0000000000000..bf71d2c1e2235
--- /dev/null
+++ b/node_modules/pacote/node_modules/normalize-package-data/lib/normalize.js
@@ -0,0 +1,48 @@
+module.exports = normalize
+
+var fixer = require('./fixer')
+normalize.fixer = fixer
+
+var makeWarning = require('./make_warning')
+
+var fieldsToFix = ['name', 'version', 'description', 'repository', 'modules', 'scripts',
+  'files', 'bin', 'man', 'bugs', 'keywords', 'readme', 'homepage', 'license']
+var otherThingsToFix = ['dependencies', 'people', 'typos']
+
+var thingsToFix = fieldsToFix.map(function (fieldName) {
+  return ucFirst(fieldName) + 'Field'
+})
+// two ways to do this in CoffeeScript on only one line, sub-70 chars:
+// thingsToFix = fieldsToFix.map (name) -> ucFirst(name) + "Field"
+// thingsToFix = (ucFirst(name) + "Field" for name in fieldsToFix)
+thingsToFix = thingsToFix.concat(otherThingsToFix)
+
+function normalize (data, warn, strict) {
+  if (warn === true) {
+    warn = null
+    strict = true
+  }
+  if (!strict) {
+    strict = false
+  }
+  if (!warn || data.private) {
+    warn = function (msg) { /* noop */ }
+  }
+
+  if (data.scripts &&
+      data.scripts.install === 'node-gyp rebuild' &&
+      !data.scripts.preinstall) {
+    data.gypfile = true
+  }
+  fixer.warn = function () {
+    warn(makeWarning.apply(null, arguments))
+  }
+  thingsToFix.forEach(function (thingName) {
+    fixer['fix' + ucFirst(thingName)](data, strict)
+  })
+  data._id = data.name + '@' + data.version
+}
+
+function ucFirst (string) {
+  return string.charAt(0).toUpperCase() + string.slice(1)
+}
diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/safe_format.js b/node_modules/pacote/node_modules/normalize-package-data/lib/safe_format.js
new file mode 100644
index 0000000000000..5fc888e5450cd
--- /dev/null
+++ b/node_modules/pacote/node_modules/normalize-package-data/lib/safe_format.js
@@ -0,0 +1,11 @@
+var util = require('util')
+
+module.exports = function () {
+  var args = Array.prototype.slice.call(arguments, 0)
+  args.forEach(function (arg) {
+    if (!arg) {
+      throw new TypeError('Bad arguments.')
+    }
+  })
+  return util.format.apply(null, arguments)
+}
diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/typos.json b/node_modules/pacote/node_modules/normalize-package-data/lib/typos.json
new file mode 100644
index 0000000000000..7f9dd283b30ff
--- /dev/null
+++ b/node_modules/pacote/node_modules/normalize-package-data/lib/typos.json
@@ -0,0 +1,25 @@
+{
+  "topLevel": {
+    "dependancies": "dependencies"
+   ,"dependecies": "dependencies"
+   ,"depdenencies": "dependencies"
+   ,"devEependencies": "devDependencies"
+   ,"depends": "dependencies"
+   ,"dev-dependencies": "devDependencies"
+   ,"devDependences": "devDependencies"
+   ,"devDepenencies": "devDependencies"
+   ,"devdependencies": "devDependencies"
+   ,"repostitory": "repository"
+   ,"repo": "repository"
+   ,"prefereGlobal": "preferGlobal"
+   ,"hompage": "homepage"
+   ,"hampage": "homepage"
+   ,"autohr": "author"
+   ,"autor": "author"
+   ,"contributers": "contributors"
+   ,"publicationConfig": "publishConfig"
+   ,"script": "scripts"
+  },
+  "bugs": { "web": "url", "name": "url" },
+  "script": { "server": "start", "tests": "test" }
+}
diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/warning_messages.json b/node_modules/pacote/node_modules/normalize-package-data/lib/warning_messages.json
new file mode 100644
index 0000000000000..4890f506ed965
--- /dev/null
+++ b/node_modules/pacote/node_modules/normalize-package-data/lib/warning_messages.json
@@ -0,0 +1,30 @@
+{
+  "repositories": "'repositories' (plural) Not supported. Please pick one as the 'repository' field"
+  ,"missingRepository": "No repository field."
+  ,"brokenGitUrl": "Probably broken git url: %s"
+  ,"nonObjectScripts": "scripts must be an object"
+  ,"nonStringScript": "script values must be string commands"
+  ,"nonArrayFiles": "Invalid 'files' member"
+  ,"invalidFilename": "Invalid filename in 'files' list: %s"
+  ,"nonArrayBundleDependencies": "Invalid 'bundleDependencies' list. Must be array of package names"
+  ,"nonStringBundleDependency": "Invalid bundleDependencies member: %s"
+  ,"nonDependencyBundleDependency": "Non-dependency in bundleDependencies: %s"
+  ,"nonObjectDependencies": "%s field must be an object"
+  ,"nonStringDependency": "Invalid dependency: %s %s"
+  ,"deprecatedArrayDependencies": "specifying %s as array is deprecated"
+  ,"deprecatedModules": "modules field is deprecated"
+  ,"nonArrayKeywords": "keywords should be an array of strings"
+  ,"nonStringKeyword": "keywords should be an array of strings"
+  ,"conflictingName": "%s is also the name of a node core module."
+  ,"nonStringDescription": "'description' field should be a string"
+  ,"missingDescription": "No description"
+  ,"missingReadme": "No README data"
+  ,"missingLicense": "No license field."
+  ,"nonEmailUrlBugsString": "Bug string field must be url, email, or {email,url}"
+  ,"nonUrlBugsUrlField": "bugs.url field must be a string url. Deleted."
+  ,"nonEmailBugsEmailField": "bugs.email field must be a string email. Deleted."
+  ,"emptyNormalizedBugs": "Normalized value of bugs field is an empty object. Deleted."
+  ,"nonUrlHomepage": "homepage field must be a string url. Deleted."
+  ,"invalidLicense": "license should be a valid SPDX license expression"
+  ,"typo": "%s should probably be %s."
+}
diff --git a/node_modules/pacote/node_modules/normalize-package-data/package.json b/node_modules/pacote/node_modules/normalize-package-data/package.json
new file mode 100644
index 0000000000000..48d2371d4a66b
--- /dev/null
+++ b/node_modules/pacote/node_modules/normalize-package-data/package.json
@@ -0,0 +1,62 @@
+{
+  "name": "normalize-package-data",
+  "version": "6.0.0",
+  "author": "GitHub Inc.",
+  "description": "Normalizes data that can be found in package.json files.",
+  "license": "BSD-2-Clause",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/normalize-package-data.git"
+  },
+  "main": "lib/normalize.js",
+  "scripts": {
+    "test": "tap",
+    "npmclilint": "npmcli-lint",
+    "lint": "eslint \"**/*.js\"",
+    "lintfix": "npm run lint -- --fix",
+    "posttest": "npm run lint",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "snap": "tap",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "dependencies": {
+    "hosted-git-info": "^7.0.0",
+    "is-core-module": "^2.8.1",
+    "semver": "^7.3.5",
+    "validate-npm-package-license": "^3.0.4"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "tap": "^16.0.1"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "engines": {
+    "node": "^16.14.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.18.0",
+    "publish": "true",
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ]
+  },
+  "tap": {
+    "branches": 86,
+    "functions": 92,
+    "lines": 86,
+    "statements": 86,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/LICENSE b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/LICENSE
new file mode 100644
index 0000000000000..45055763dc838
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/LICENSE
@@ -0,0 +1,13 @@
+Copyright (c) 2015, Rebecca Turner
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/from-url.js b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/from-url.js
new file mode 100644
index 0000000000000..efc1247d59d12
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/from-url.js
@@ -0,0 +1,122 @@
+'use strict'
+
+const parseUrl = require('./parse-url')
+
+// look for github shorthand inputs, such as npm/cli
+const isGitHubShorthand = (arg) => {
+  // it cannot contain whitespace before the first #
+  // it cannot start with a / because that's probably an absolute file path
+  // but it must include a slash since repos are username/repository
+  // it cannot start with a . because that's probably a relative file path
+  // it cannot start with an @ because that's a scoped package if it passes the other tests
+  // it cannot contain a : before a # because that tells us that there's a protocol
+  // a second / may not exist before a #
+  const firstHash = arg.indexOf('#')
+  const firstSlash = arg.indexOf('/')
+  const secondSlash = arg.indexOf('/', firstSlash + 1)
+  const firstColon = arg.indexOf(':')
+  const firstSpace = /\s/.exec(arg)
+  const firstAt = arg.indexOf('@')
+
+  const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash)
+  const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash)
+  const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash)
+  const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash)
+  const hasSlash = firstSlash > 0
+  // if a # is found, what we really want to know is that the character
+  // immediately before # is not a /
+  const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/')
+  const doesNotStartWithDot = !arg.startsWith('.')
+
+  return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash &&
+    doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash &&
+    secondSlashOnlyAfterHash
+}
+
+module.exports = (giturl, opts, { gitHosts, protocols }) => {
+  if (!giturl) {
+    return
+  }
+
+  const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl
+  const parsed = parseUrl(correctedUrl, protocols)
+  if (!parsed) {
+    return
+  }
+
+  const gitHostShortcut = gitHosts.byShortcut[parsed.protocol]
+  const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.')
+    ? parsed.hostname.slice(4)
+    : parsed.hostname]
+  const gitHostName = gitHostShortcut || gitHostDomain
+  if (!gitHostName) {
+    return
+  }
+
+  const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain]
+  let auth = null
+  if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) {
+    auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}`
+  }
+
+  let committish = null
+  let user = null
+  let project = null
+  let defaultRepresentation = null
+
+  try {
+    if (gitHostShortcut) {
+      let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname
+      const firstAt = pathname.indexOf('@')
+      // we ignore auth for shortcuts, so just trim it out
+      if (firstAt > -1) {
+        pathname = pathname.slice(firstAt + 1)
+      }
+
+      const lastSlash = pathname.lastIndexOf('/')
+      if (lastSlash > -1) {
+        user = decodeURIComponent(pathname.slice(0, lastSlash))
+        // we want nulls only, never empty strings
+        if (!user) {
+          user = null
+        }
+        project = decodeURIComponent(pathname.slice(lastSlash + 1))
+      } else {
+        project = decodeURIComponent(pathname)
+      }
+
+      if (project.endsWith('.git')) {
+        project = project.slice(0, -4)
+      }
+
+      if (parsed.hash) {
+        committish = decodeURIComponent(parsed.hash.slice(1))
+      }
+
+      defaultRepresentation = 'shortcut'
+    } else {
+      if (!gitHostInfo.protocols.includes(parsed.protocol)) {
+        return
+      }
+
+      const segments = gitHostInfo.extract(parsed)
+      if (!segments) {
+        return
+      }
+
+      user = segments.user && decodeURIComponent(segments.user)
+      project = decodeURIComponent(segments.project)
+      committish = decodeURIComponent(segments.committish)
+      defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1)
+    }
+  } catch (err) {
+    /* istanbul ignore else */
+    if (err instanceof URIError) {
+      return
+    } else {
+      throw err
+    }
+  }
+
+  return [gitHostName, user, auth, project, committish, defaultRepresentation, opts]
+}
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/hosts.js b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/hosts.js
new file mode 100644
index 0000000000000..013712b7842c8
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/hosts.js
@@ -0,0 +1,228 @@
+/* eslint-disable max-len */
+
+'use strict'
+
+const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
+const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
+const formatHashFragment = (f) => f.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-')
+
+const defaults = {
+  sshtemplate: ({ domain, user, project, committish }) =>
+    `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`,
+  sshurltemplate: ({ domain, user, project, committish }) =>
+    `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  edittemplate: ({ domain, user, project, committish, editpath, path }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`,
+  browsetemplate: ({ domain, user, project, committish, treepath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`,
+  browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) =>
+    `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
+  browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) =>
+    `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
+  docstemplate: ({ domain, user, project, treepath, committish }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`,
+  httpstemplate: ({ auth, domain, user, project, committish }) =>
+    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  filetemplate: ({ domain, user, project, committish, path }) =>
+    `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`,
+  shortcuttemplate: ({ type, user, project, committish }) =>
+    `${type}:${user}/${project}${maybeJoin('#', committish)}`,
+  pathtemplate: ({ user, project, committish }) =>
+    `${user}/${project}${maybeJoin('#', committish)}`,
+  bugstemplate: ({ domain, user, project }) =>
+    `https://${domain}/${user}/${project}/issues`,
+  hashformat: formatHashFragment,
+}
+
+const hosts = {}
+hosts.github = {
+  // First two are insecure and generally shouldn't be used any more, but
+  // they are still supported.
+  protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'github.com',
+  treepath: 'tree',
+  blobpath: 'blob',
+  editpath: 'edit',
+  filetemplate: ({ auth, user, project, committish, path }) =>
+    `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`,
+  gittemplate: ({ auth, domain, user, project, committish }) =>
+    `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    let [, user, project, type, committish] = url.pathname.split('/', 5)
+    if (type && type !== 'tree') {
+      return
+    }
+
+    if (!type) {
+      committish = url.hash.slice(1)
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish }
+  },
+}
+
+hosts.bitbucket = {
+  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'bitbucket.org',
+  treepath: 'src',
+  blobpath: 'src',
+  editpath: '?mode=edit',
+  edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+    if (['get'].includes(aux)) {
+      return
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+hosts.gitlab = {
+  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'gitlab.com',
+  treepath: 'tree',
+  blobpath: 'tree',
+  editpath: '-/edit',
+  httpstemplate: ({ auth, domain, user, project, committish }) =>
+    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    const path = url.pathname.slice(1)
+    if (path.includes('/-/') || path.includes('/archive.tar.gz')) {
+      return
+    }
+
+    const segments = path.split('/')
+    let project = segments.pop()
+    if (project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    const user = segments.join('/')
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+hosts.gist = {
+  protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'gist.github.com',
+  editpath: 'edit',
+  sshtemplate: ({ domain, project, committish }) =>
+    `git@${domain}:${project}.git${maybeJoin('#', committish)}`,
+  sshurltemplate: ({ domain, project, committish }) =>
+    `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`,
+  edittemplate: ({ domain, user, project, committish, editpath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`,
+  browsetemplate: ({ domain, project, committish }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
+  browsetreetemplate: ({ domain, project, committish, path, hashformat }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
+  browseblobtemplate: ({ domain, project, committish, path, hashformat }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
+  docstemplate: ({ domain, project, committish }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
+  httpstemplate: ({ domain, project, committish }) =>
+    `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`,
+  filetemplate: ({ user, project, committish, path }) =>
+    `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`,
+  shortcuttemplate: ({ type, project, committish }) =>
+    `${type}:${project}${maybeJoin('#', committish)}`,
+  pathtemplate: ({ project, committish }) =>
+    `${project}${maybeJoin('#', committish)}`,
+  bugstemplate: ({ domain, project }) =>
+    `https://${domain}/${project}`,
+  gittemplate: ({ domain, project, committish }) =>
+    `git://${domain}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ project, committish }) =>
+    `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+    if (aux === 'raw') {
+      return
+    }
+
+    if (!project) {
+      if (!user) {
+        return
+      }
+
+      project = user
+      user = null
+    }
+
+    if (project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+  hashformat: function (fragment) {
+    return fragment && 'file-' + formatHashFragment(fragment)
+  },
+}
+
+hosts.sourcehut = {
+  protocols: ['git+ssh:', 'https:'],
+  domain: 'git.sr.ht',
+  treepath: 'tree',
+  blobpath: 'tree',
+  filetemplate: ({ domain, user, project, committish, path }) =>
+    `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`,
+  httpstemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`,
+  bugstemplate: ({ user, project }) =>
+    `https://todo.sr.ht/${user}/${project}`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+
+    // tarball url
+    if (['archive'].includes(aux)) {
+      return
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+for (const [name, host] of Object.entries(hosts)) {
+  hosts[name] = Object.assign({}, defaults, host)
+}
+
+module.exports = hosts
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/index.js b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/index.js
new file mode 100644
index 0000000000000..a7339c217e9a3
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/index.js
@@ -0,0 +1,179 @@
+'use strict'
+
+const LRU = require('lru-cache')
+const hosts = require('./hosts.js')
+const fromUrl = require('./from-url.js')
+const parseUrl = require('./parse-url.js')
+
+const cache = new LRU({ max: 1000 })
+
+class GitHost {
+  constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
+    Object.assign(this, GitHost.#gitHosts[type], {
+      type,
+      user,
+      auth,
+      project,
+      committish,
+      default: defaultRepresentation,
+      opts,
+    })
+  }
+
+  static #gitHosts = { byShortcut: {}, byDomain: {} }
+  static #protocols = {
+    'git+ssh:': { name: 'sshurl' },
+    'ssh:': { name: 'sshurl' },
+    'git+https:': { name: 'https', auth: true },
+    'git:': { auth: true },
+    'http:': { auth: true },
+    'https:': { auth: true },
+    'git+http:': { auth: true },
+  }
+
+  static addHost (name, host) {
+    GitHost.#gitHosts[name] = host
+    GitHost.#gitHosts.byDomain[host.domain] = name
+    GitHost.#gitHosts.byShortcut[`${name}:`] = name
+    GitHost.#protocols[`${name}:`] = { name }
+  }
+
+  static fromUrl (giturl, opts) {
+    if (typeof giturl !== 'string') {
+      return
+    }
+
+    const key = giturl + JSON.stringify(opts || {})
+
+    if (!cache.has(key)) {
+      const hostArgs = fromUrl(giturl, opts, {
+        gitHosts: GitHost.#gitHosts,
+        protocols: GitHost.#protocols,
+      })
+      cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined)
+    }
+
+    return cache.get(key)
+  }
+
+  static parseUrl (url) {
+    return parseUrl(url)
+  }
+
+  #fill (template, opts) {
+    if (typeof template !== 'function') {
+      return null
+    }
+
+    const options = { ...this, ...this.opts, ...opts }
+
+    // the path should always be set so we don't end up with 'undefined' in urls
+    if (!options.path) {
+      options.path = ''
+    }
+
+    // template functions will insert the leading slash themselves
+    if (options.path.startsWith('/')) {
+      options.path = options.path.slice(1)
+    }
+
+    if (options.noCommittish) {
+      options.committish = null
+    }
+
+    const result = template(options)
+    return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result
+  }
+
+  hash () {
+    return this.committish ? `#${this.committish}` : ''
+  }
+
+  ssh (opts) {
+    return this.#fill(this.sshtemplate, opts)
+  }
+
+  sshurl (opts) {
+    return this.#fill(this.sshurltemplate, opts)
+  }
+
+  browse (path, ...args) {
+    // not a string, treat path as opts
+    if (typeof path !== 'string') {
+      return this.#fill(this.browsetemplate, path)
+    }
+
+    if (typeof args[0] !== 'string') {
+      return this.#fill(this.browsetreetemplate, { ...args[0], path })
+    }
+
+    return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path })
+  }
+
+  // If the path is known to be a file, then browseFile should be used. For some hosts
+  // the url is the same as browse, but for others like GitHub a file can use both `/tree/`
+  // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/`
+  // path will redirect to a specific commit. Using the `/blob/` path avoids this and
+  // does not redirect to a different commit.
+  browseFile (path, ...args) {
+    if (typeof args[0] !== 'string') {
+      return this.#fill(this.browseblobtemplate, { ...args[0], path })
+    }
+
+    return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path })
+  }
+
+  docs (opts) {
+    return this.#fill(this.docstemplate, opts)
+  }
+
+  bugs (opts) {
+    return this.#fill(this.bugstemplate, opts)
+  }
+
+  https (opts) {
+    return this.#fill(this.httpstemplate, opts)
+  }
+
+  git (opts) {
+    return this.#fill(this.gittemplate, opts)
+  }
+
+  shortcut (opts) {
+    return this.#fill(this.shortcuttemplate, opts)
+  }
+
+  path (opts) {
+    return this.#fill(this.pathtemplate, opts)
+  }
+
+  tarball (opts) {
+    return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false })
+  }
+
+  file (path, opts) {
+    return this.#fill(this.filetemplate, { ...opts, path })
+  }
+
+  edit (path, opts) {
+    return this.#fill(this.edittemplate, { ...opts, path })
+  }
+
+  getDefaultRepresentation () {
+    return this.default
+  }
+
+  toString (opts) {
+    if (this.default && typeof this[this.default] === 'function') {
+      return this[this.default](opts)
+    }
+
+    return this.sshurl(opts)
+  }
+}
+
+for (const [name, host] of Object.entries(hosts)) {
+  GitHost.addHost(name, host)
+}
+
+module.exports = GitHost
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/parse-url.js
new file mode 100644
index 0000000000000..7d5489c008ab4
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/parse-url.js
@@ -0,0 +1,78 @@
+const url = require('url')
+
+const lastIndexOfBefore = (str, char, beforeChar) => {
+  const startPosition = str.indexOf(beforeChar)
+  return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity)
+}
+
+const safeUrl = (u) => {
+  try {
+    return new url.URL(u)
+  } catch {
+    // this fn should never throw
+  }
+}
+
+// accepts input like git:github.com:user/repo and inserts the // after the first :
+const correctProtocol = (arg, protocols) => {
+  const firstColon = arg.indexOf(':')
+  const proto = arg.slice(0, firstColon + 1)
+  if (Object.prototype.hasOwnProperty.call(protocols, proto)) {
+    return arg
+  }
+
+  const firstAt = arg.indexOf('@')
+  if (firstAt > -1) {
+    if (firstAt > firstColon) {
+      return `git+ssh://${arg}`
+    } else {
+      return arg
+    }
+  }
+
+  const doubleSlash = arg.indexOf('//')
+  if (doubleSlash === firstColon + 1) {
+    return arg
+  }
+
+  return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}`
+}
+
+// attempt to correct an scp style url so that it will parse with `new URL()`
+const correctUrl = (giturl) => {
+  // ignore @ that come after the first hash since the denotes the start
+  // of a committish which can contain @ characters
+  const firstAt = lastIndexOfBefore(giturl, '@', '#')
+  // ignore colons that come after the hash since that could include colons such as:
+  // git@github.com:user/package-2#semver:^1.0.0
+  const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#')
+
+  if (lastColonBeforeHash > firstAt) {
+    // the last : comes after the first @ (or there is no @)
+    // like it would in:
+    // proto://hostname.com:user/repo
+    // username@hostname.com:user/repo
+    // :password@hostname.com:user/repo
+    // username:password@hostname.com:user/repo
+    // proto://username@hostname.com:user/repo
+    // proto://:password@hostname.com:user/repo
+    // proto://username:password@hostname.com:user/repo
+    // then we replace the last : with a / to create a valid path
+    giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1)
+  }
+
+  if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) {
+    // we have no : at all
+    // as it would be in:
+    // username@hostname.com/user/repo
+    // then we prepend a protocol
+    giturl = `git+ssh://${giturl}`
+  }
+
+  return giturl
+}
+
+module.exports = (giturl, protocols) => {
+  const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl
+  return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol))
+}
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/package.json b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/package.json
new file mode 100644
index 0000000000000..612259948afe7
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/package.json
@@ -0,0 +1,59 @@
+{
+  "name": "hosted-git-info",
+  "version": "6.1.1",
+  "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
+  "main": "./lib/index.js",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/hosted-git-info.git"
+  },
+  "keywords": [
+    "git",
+    "github",
+    "bitbucket",
+    "gitlab"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/hosted-git-info/issues"
+  },
+  "homepage": "https://github.com/npm/hosted-git-info",
+  "scripts": {
+    "posttest": "npm run lint",
+    "snap": "tap",
+    "test": "tap",
+    "test:coverage": "tap --coverage-report=html",
+    "lint": "eslint \"**/*.js\"",
+    "postlint": "template-oss-check",
+    "lintfix": "npm run lint -- --fix",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "dependencies": {
+    "lru-cache": "^7.5.1"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.7.1",
+    "tap": "^16.0.1"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "tap": {
+    "color": 1,
+    "coverage": true,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.7.1"
+  }
+}
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE
new file mode 100644
index 0000000000000..19cec97b18468
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js
new file mode 100644
index 0000000000000..36bd18cd9f9a6
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js
@@ -0,0 +1,431 @@
+'use strict'
+module.exports = npa
+module.exports.resolve = resolve
+module.exports.toPurl = toPurl
+module.exports.Result = Result
+
+const url = require('url')
+const HostedGit = require('hosted-git-info')
+const semver = require('semver')
+const path = global.FAKE_WINDOWS ? require('path').win32 : require('path')
+const validatePackageName = require('validate-npm-package-name')
+const { homedir } = require('os')
+const log = require('proc-log')
+
+const isWindows = process.platform === 'win32' || global.FAKE_WINDOWS
+const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
+const isURL = /^(?:git[+])?[a-z]+:/i
+const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
+const isFilename = /[.](?:tgz|tar.gz|tar)$/i
+
+function npa (arg, where) {
+  let name
+  let spec
+  if (typeof arg === 'object') {
+    if (arg instanceof Result && (!where || where === arg.where)) {
+      return arg
+    } else if (arg.name && arg.rawSpec) {
+      return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
+    } else {
+      return npa(arg.raw, where || arg.where)
+    }
+  }
+  const nameEndsAt = arg[0] === '@' ? arg.slice(1).indexOf('@') + 1 : arg.indexOf('@')
+  const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
+  if (isURL.test(arg)) {
+    spec = arg
+  } else if (isGit.test(arg)) {
+    spec = `git+ssh://${arg}`
+  } else if (namePart[0] !== '@' && (hasSlashes.test(namePart) || isFilename.test(namePart))) {
+    spec = arg
+  } else if (nameEndsAt > 0) {
+    name = namePart
+    spec = arg.slice(nameEndsAt + 1) || '*'
+  } else {
+    const valid = validatePackageName(arg)
+    if (valid.validForOldPackages) {
+      name = arg
+      spec = '*'
+    } else {
+      spec = arg
+    }
+  }
+  return resolve(name, spec, where, arg)
+}
+
+const isFilespec = isWindows ? /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/ : /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
+
+function resolve (name, spec, where, arg) {
+  const res = new Result({
+    raw: arg,
+    name: name,
+    rawSpec: spec,
+    fromArgument: arg != null,
+  })
+
+  if (name) {
+    res.setName(name)
+  }
+
+  if (spec && (isFilespec.test(spec) || /^file:/i.test(spec))) {
+    return fromFile(res, where)
+  } else if (spec && /^npm:/i.test(spec)) {
+    return fromAlias(res, where)
+  }
+
+  const hosted = HostedGit.fromUrl(spec, {
+    noGitPlus: true,
+    noCommittish: true,
+  })
+  if (hosted) {
+    return fromHostedGit(res, hosted)
+  } else if (spec && isURL.test(spec)) {
+    return fromURL(res)
+  } else if (spec && (hasSlashes.test(spec) || isFilename.test(spec))) {
+    return fromFile(res, where)
+  } else {
+    return fromRegistry(res)
+  }
+}
+
+const defaultRegistry = 'https://registry.npmjs.org'
+
+function toPurl (arg, reg = defaultRegistry) {
+  const res = npa(arg)
+
+  if (res.type !== 'version') {
+    throw invalidPurlType(res.type, res.raw)
+  }
+
+  // URI-encode leading @ of scoped packages
+  let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec
+  if (reg !== defaultRegistry) {
+    purl += '?repository_url=' + reg
+  }
+
+  return purl
+}
+
+function invalidPackageName (name, valid, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`)
+  err.code = 'EINVALIDPACKAGENAME'
+  return err
+}
+
+function invalidTagName (name, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`)
+  err.code = 'EINVALIDTAGNAME'
+  return err
+}
+
+function invalidPurlType (type, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`)
+  err.code = 'EINVALIDPURLTYPE'
+  return err
+}
+
+function Result (opts) {
+  this.type = opts.type
+  this.registry = opts.registry
+  this.where = opts.where
+  if (opts.raw == null) {
+    this.raw = opts.name ? opts.name + '@' + opts.rawSpec : opts.rawSpec
+  } else {
+    this.raw = opts.raw
+  }
+
+  this.name = undefined
+  this.escapedName = undefined
+  this.scope = undefined
+  this.rawSpec = opts.rawSpec || ''
+  this.saveSpec = opts.saveSpec
+  this.fetchSpec = opts.fetchSpec
+  if (opts.name) {
+    this.setName(opts.name)
+  }
+  this.gitRange = opts.gitRange
+  this.gitCommittish = opts.gitCommittish
+  this.gitSubdir = opts.gitSubdir
+  this.hosted = opts.hosted
+}
+
+Result.prototype.setName = function (name) {
+  const valid = validatePackageName(name)
+  if (!valid.validForOldPackages) {
+    throw invalidPackageName(name, valid, this.raw)
+  }
+
+  this.name = name
+  this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
+  // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
+  this.escapedName = name.replace('/', '%2f')
+  return this
+}
+
+Result.prototype.toString = function () {
+  const full = []
+  if (this.name != null && this.name !== '') {
+    full.push(this.name)
+  }
+  const spec = this.saveSpec || this.fetchSpec || this.rawSpec
+  if (spec != null && spec !== '') {
+    full.push(spec)
+  }
+  return full.length ? full.join('@') : this.raw
+}
+
+Result.prototype.toJSON = function () {
+  const result = Object.assign({}, this)
+  delete result.hosted
+  return result
+}
+
+function setGitCommittish (res, committish) {
+  if (!committish) {
+    res.gitCommittish = null
+    return res
+  }
+
+  // for each :: separated item:
+  for (const part of committish.split('::')) {
+    // if the item has no : the n it is a commit-ish
+    if (!part.includes(':')) {
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a committish')
+      }
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a second committish')
+      }
+      res.gitCommittish = part
+      continue
+    }
+    // split on name:value
+    const [name, value] = part.split(':')
+    // if name is semver do semver lookup of ref or tag
+    if (name === 'semver') {
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a semver range')
+      }
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a second semver range')
+      }
+      res.gitRange = decodeURIComponent(value)
+      continue
+    }
+    if (name === 'path') {
+      if (res.gitSubdir) {
+        throw new Error('cannot override existing path with a second path')
+      }
+      res.gitSubdir = `/${value}`
+      continue
+    }
+    log.warn('npm-package-arg', `ignoring unknown key "${name}"`)
+  }
+
+  return res
+}
+
+function fromFile (res, where) {
+  if (!where) {
+    where = process.cwd()
+  }
+  res.type = isFilename.test(res.rawSpec) ? 'file' : 'directory'
+  res.where = where
+
+  // always put the '/' on where when resolving urls, or else
+  // file:foo from /path/to/bar goes to /path/to/foo, when we want
+  // it to be /path/to/bar/foo
+
+  let specUrl
+  let resolvedUrl
+  const prefix = (!/^file:/.test(res.rawSpec) ? 'file:' : '')
+  const rawWithPrefix = prefix + res.rawSpec
+  let rawNoPrefix = rawWithPrefix.replace(/^file:/, '')
+  try {
+    resolvedUrl = new url.URL(rawWithPrefix, `file://${path.resolve(where)}/`)
+    specUrl = new url.URL(rawWithPrefix)
+  } catch (originalError) {
+    const er = new Error('Invalid file: URL, must comply with RFC 8909')
+    throw Object.assign(er, {
+      raw: res.rawSpec,
+      spec: res,
+      where,
+      originalError,
+    })
+  }
+
+  // environment switch for testing
+  if (process.env.NPM_PACKAGE_ARG_8909_STRICT !== '1') {
+    // XXX backwards compatibility lack of compliance with 8909
+    // Remove when we want a breaking change to come into RFC compliance.
+    if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
+      const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///')
+      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
+      specUrl = new url.URL(rawSpec)
+      rawNoPrefix = rawSpec.replace(/^file:/, '')
+    }
+    // turn file:/../foo into file:../foo
+    // for 1, 2 or 3 leading slashes since we attempted
+    // in the previous step to make it a file protocol url with a leading slash
+    if (/^\/{1,3}\.\.?(\/|$)/.test(rawNoPrefix)) {
+      const rawSpec = res.rawSpec.replace(/^file:\/{1,3}/, 'file:')
+      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
+      specUrl = new url.URL(rawSpec)
+      rawNoPrefix = rawSpec.replace(/^file:/, '')
+    }
+    // XXX end 8909 violation backwards compatibility section
+  }
+
+  // file:foo - relative url to ./foo
+  // file:/foo - absolute path /foo
+  // file:///foo - absolute path to /foo, no authority host
+  // file://localhost/foo - absolute path to /foo, on localhost
+  // file://foo - absolute path to / on foo host (error!)
+  if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
+    const msg = `Invalid file: URL, must be absolute if // present`
+    throw Object.assign(new Error(msg), {
+      raw: res.rawSpec,
+      parsed: resolvedUrl,
+    })
+  }
+
+  // turn /C:/blah into just C:/blah on windows
+  let specPath = decodeURIComponent(specUrl.pathname)
+  let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
+  if (isWindows) {
+    specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1')
+    resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1')
+  }
+
+  // replace ~ with homedir, but keep the ~ in the saveSpec
+  // otherwise, make it relative to where param
+  if (/^\/~(\/|$)/.test(specPath)) {
+    res.saveSpec = `file:${specPath.substr(1)}`
+    resolvedPath = path.resolve(homedir(), specPath.substr(3))
+  } else if (!path.isAbsolute(rawNoPrefix)) {
+    res.saveSpec = `file:${path.relative(where, resolvedPath)}`
+  } else {
+    res.saveSpec = `file:${path.resolve(resolvedPath)}`
+  }
+
+  res.fetchSpec = path.resolve(where, resolvedPath)
+  return res
+}
+
+function fromHostedGit (res, hosted) {
+  res.type = 'git'
+  res.hosted = hosted
+  res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
+  res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
+  return setGitCommittish(res, hosted.committish)
+}
+
+function unsupportedURLType (protocol, spec) {
+  const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
+  err.code = 'EUNSUPPORTEDPROTOCOL'
+  return err
+}
+
+function matchGitScp (spec) {
+  // git ssh specifiers are overloaded to also use scp-style git
+  // specifiers, so we have to parse those out and treat them special.
+  // They are NOT true URIs, so we can't hand them to `url.parse`.
+  //
+  // This regex looks for things that look like:
+  // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
+  //
+  // ...and various combinations. The username in the beginning is *required*.
+  const matched = spec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
+  return matched && !matched[1].match(/:[0-9]+\/?.*$/i) && {
+    fetchSpec: matched[1],
+    gitCommittish: matched[2] == null ? null : matched[2],
+  }
+}
+
+function fromURL (res) {
+  // eslint-disable-next-line node/no-deprecated-api
+  const urlparse = url.parse(res.rawSpec)
+  res.saveSpec = res.rawSpec
+  // check the protocol, and then see if it's git or not
+  switch (urlparse.protocol) {
+    case 'git:':
+    case 'git+http:':
+    case 'git+https:':
+    case 'git+rsync:':
+    case 'git+ftp:':
+    case 'git+file:':
+    case 'git+ssh:': {
+      res.type = 'git'
+      const match = urlparse.protocol === 'git+ssh:' ? matchGitScp(res.rawSpec)
+        : null
+      if (match) {
+        setGitCommittish(res, match.gitCommittish)
+        res.fetchSpec = match.fetchSpec
+      } else {
+        setGitCommittish(res, urlparse.hash != null ? urlparse.hash.slice(1) : '')
+        urlparse.protocol = urlparse.protocol.replace(/^git[+]/, '')
+        if (urlparse.protocol === 'file:' && /^git\+file:\/\/[a-z]:/i.test(res.rawSpec)) {
+          // keep the drive letter : on windows file paths
+          urlparse.host += ':'
+          urlparse.hostname += ':'
+        }
+        delete urlparse.hash
+        res.fetchSpec = url.format(urlparse)
+      }
+      break
+    }
+    case 'http:':
+    case 'https:':
+      res.type = 'remote'
+      res.fetchSpec = res.saveSpec
+      break
+
+    default:
+      throw unsupportedURLType(urlparse.protocol, res.rawSpec)
+  }
+
+  return res
+}
+
+function fromAlias (res, where) {
+  const subSpec = npa(res.rawSpec.substr(4), where)
+  if (subSpec.type === 'alias') {
+    throw new Error('nested aliases not supported')
+  }
+
+  if (!subSpec.registry) {
+    throw new Error('aliases only work for registry deps')
+  }
+
+  res.subSpec = subSpec
+  res.registry = true
+  res.type = 'alias'
+  res.saveSpec = null
+  res.fetchSpec = null
+  return res
+}
+
+function fromRegistry (res) {
+  res.registry = true
+  const spec = res.rawSpec.trim()
+  // no save spec for registry components as we save based on the fetched
+  // version, not on the argument so this can't compute that.
+  res.saveSpec = null
+  res.fetchSpec = spec
+  const version = semver.valid(spec, true)
+  const range = semver.validRange(spec, true)
+  if (version) {
+    res.type = 'version'
+  } else if (range) {
+    res.type = 'range'
+  } else {
+    if (encodeURIComponent(spec) !== spec) {
+      throw invalidTagName(spec, res.raw)
+    }
+    res.type = 'tag'
+  }
+  return res
+}
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json
new file mode 100644
index 0000000000000..bb9e71b258a93
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json
@@ -0,0 +1,59 @@
+{
+  "name": "npm-package-arg",
+  "version": "10.1.0",
+  "description": "Parse the things that can be arguments to `npm install`",
+  "main": "./lib/npa.js",
+  "directories": {
+    "test": "test"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "dependencies": {
+    "hosted-git-info": "^6.0.0",
+    "proc-log": "^3.0.0",
+    "semver": "^7.3.5",
+    "validate-npm-package-name": "^5.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.10.0",
+    "tap": "^16.0.1"
+  },
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "npmclilint": "npmcli-lint",
+    "lint": "eslint \"**/*.js\"",
+    "lintfix": "npm run lint -- --fix",
+    "posttest": "npm run lint",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/npm-package-arg.git"
+  },
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/npm-package-arg/issues"
+  },
+  "homepage": "https://github.com/npm/npm-package-arg",
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "tap": {
+    "branches": 97,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.10.0"
+  }
+}
diff --git a/node_modules/pacote/node_modules/read-package-json/LICENSE b/node_modules/pacote/node_modules/read-package-json/LICENSE
new file mode 100644
index 0000000000000..052085c436514
--- /dev/null
+++ b/node_modules/pacote/node_modules/read-package-json/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/read-package-json/lib/read-json.js b/node_modules/pacote/node_modules/read-package-json/lib/read-json.js
new file mode 100644
index 0000000000000..d35f09ebd208f
--- /dev/null
+++ b/node_modules/pacote/node_modules/read-package-json/lib/read-json.js
@@ -0,0 +1,589 @@
+var fs = require('fs')
+
+var path = require('path')
+
+var { glob } = require('glob')
+var normalizeData = require('normalize-package-data')
+var safeJSON = require('json-parse-even-better-errors')
+var util = require('util')
+var normalizePackageBin = require('npm-normalize-package-bin')
+
+module.exports = readJson
+
+// put more stuff on here to customize.
+readJson.extraSet = [
+  bundleDependencies,
+  gypfile,
+  serverjs,
+  scriptpath,
+  authors,
+  readme,
+  mans,
+  bins,
+  githead,
+  fillTypes,
+]
+
+var typoWarned = {}
+var cache = {}
+
+function readJson (file, log_, strict_, cb_) {
+  var log, strict, cb
+  for (var i = 1; i < arguments.length - 1; i++) {
+    if (typeof arguments[i] === 'boolean') {
+      strict = arguments[i]
+    } else if (typeof arguments[i] === 'function') {
+      log = arguments[i]
+    }
+  }
+
+  if (!log) {
+    log = function () {}
+  }
+  cb = arguments[arguments.length - 1]
+
+  readJson_(file, log, strict, cb)
+}
+
+function readJson_ (file, log, strict, cb) {
+  fs.readFile(file, 'utf8', function (er, d) {
+    parseJson(file, er, d, log, strict, cb)
+  })
+}
+
+function stripBOM (content) {
+  // Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
+  // because the buffer-to-string conversion in `fs.readFileSync()`
+  // translates it to FEFF, the UTF-16 BOM.
+  if (content.charCodeAt(0) === 0xFEFF) {
+    content = content.slice(1)
+  }
+  return content
+}
+
+function jsonClone (obj) {
+  if (obj == null) {
+    return obj
+  } else if (Array.isArray(obj)) {
+    var newarr = new Array(obj.length)
+    for (var ii in obj) {
+      newarr[ii] = jsonClone(obj[ii])
+    }
+    return newarr
+  } else if (typeof obj === 'object') {
+    var newobj = {}
+    for (var kk in obj) {
+      newobj[kk] = jsonClone(obj[kk])
+    }
+    return newobj
+  } else {
+    return obj
+  }
+}
+
+function parseJson (file, er, d, log, strict, cb) {
+  if (er && er.code === 'ENOENT') {
+    return fs.stat(path.dirname(file), function (err, stat) {
+      if (!err && stat && !stat.isDirectory()) {
+        // ENOTDIR isn't used on Windows, but npm expects it.
+        er = Object.create(er)
+        er.code = 'ENOTDIR'
+        return cb(er)
+      } else {
+        return indexjs(file, er, log, strict, cb)
+      }
+    })
+  }
+  if (er) {
+    return cb(er)
+  }
+
+  if (cache[d]) {
+    return cb(null, jsonClone(cache[d]))
+  }
+
+  var data
+
+  try {
+    data = safeJSON(stripBOM(d))
+    for (var key in data) {
+      if (/^_/.test(key)) {
+        delete data[key]
+      }
+    }
+  } catch (jsonErr) {
+    data = parseIndex(d)
+    if (!data) {
+      return cb(parseError(jsonErr, file))
+    }
+  }
+  extrasCached(file, d, data, log, strict, cb)
+}
+
+function extrasCached (file, d, data, log, strict, cb) {
+  extras(file, data, log, strict, function (err, extrasData) {
+    if (!err) {
+      cache[d] = jsonClone(extrasData)
+    }
+    cb(err, extrasData)
+  })
+}
+
+function indexjs (file, er, log, strict, cb) {
+  if (path.basename(file) === 'index.js') {
+    return cb(er)
+  }
+
+  var index = path.resolve(path.dirname(file), 'index.js')
+  fs.readFile(index, 'utf8', function (er2, d) {
+    if (er2) {
+      return cb(er)
+    }
+
+    if (cache[d]) {
+      return cb(null, cache[d])
+    }
+
+    var data = parseIndex(d)
+    if (!data) {
+      return cb(er)
+    }
+
+    extrasCached(file, d, data, log, strict, cb)
+  })
+}
+
+readJson.extras = extras
+function extras (file, data, log_, strict_, cb_) {
+  var log, strict, cb
+  for (var i = 2; i < arguments.length - 1; i++) {
+    if (typeof arguments[i] === 'boolean') {
+      strict = arguments[i]
+    } else if (typeof arguments[i] === 'function') {
+      log = arguments[i]
+    }
+  }
+
+  if (!log) {
+    log = function () {}
+  }
+  cb = arguments[i]
+
+  var set = readJson.extraSet
+  var n = set.length
+  var errState = null
+  set.forEach(function (fn) {
+    fn(file, data, then)
+  })
+
+  function then (er) {
+    if (errState) {
+      return
+    }
+    if (er) {
+      return cb(errState = er)
+    }
+    if (--n > 0) {
+      return
+    }
+    final(file, data, log, strict, cb)
+  }
+}
+
+function scriptpath (file, data, cb) {
+  if (!data.scripts) {
+    return cb(null, data)
+  }
+  var k = Object.keys(data.scripts)
+  k.forEach(scriptpath_, data.scripts)
+  cb(null, data)
+}
+
+function scriptpath_ (key) {
+  var s = this[key]
+  // This is never allowed, and only causes problems
+  if (typeof s !== 'string') {
+    return delete this[key]
+  }
+
+  var spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
+  if (s.match(spre)) {
+    this[key] = this[key].replace(spre, '')
+  }
+}
+
+function gypfile (file, data, cb) {
+  var dir = path.dirname(file)
+  var s = data.scripts || {}
+  if (s.install || s.preinstall) {
+    return cb(null, data)
+  }
+
+  if (data.gypfile === false) {
+    return cb(null, data)
+  }
+  glob('*.gyp', { cwd: dir })
+    .then(files => gypfile_(file, data, files, cb))
+    .catch(er => cb(er))
+}
+
+function gypfile_ (file, data, files, cb) {
+  if (!files.length) {
+    return cb(null, data)
+  }
+  var s = data.scripts || {}
+  s.install = 'node-gyp rebuild'
+  data.scripts = s
+  data.gypfile = true
+  return cb(null, data)
+}
+
+function serverjs (file, data, cb) {
+  var dir = path.dirname(file)
+  var s = data.scripts || {}
+  if (s.start) {
+    return cb(null, data)
+  }
+  fs.access(path.join(dir, 'server.js'), (err) => {
+    if (!err) {
+      s.start = 'node server.js'
+      data.scripts = s
+    }
+    return cb(null, data)
+  })
+}
+
+function authors (file, data, cb) {
+  if (data.contributors) {
+    return cb(null, data)
+  }
+  var af = path.resolve(path.dirname(file), 'AUTHORS')
+  fs.readFile(af, 'utf8', function (er, ad) {
+    // ignore error.  just checking it.
+    if (er) {
+      return cb(null, data)
+    }
+    authors_(file, data, ad, cb)
+  })
+}
+
+function authors_ (file, data, ad, cb) {
+  ad = ad.split(/\r?\n/g).map(function (line) {
+    return line.replace(/^\s*#.*$/, '').trim()
+  }).filter(function (line) {
+    return line
+  })
+  data.contributors = ad
+  return cb(null, data)
+}
+
+function readme (file, data, cb) {
+  if (data.readme) {
+    return cb(null, data)
+  }
+  var dir = path.dirname(file)
+  var globOpts = { cwd: dir, nocase: true, mark: true }
+  glob('{README,README.*}', globOpts)
+    .then(files => {
+      // don't accept directories.
+      files = files.filter(function (filtered) {
+        return !filtered.match(/\/$/)
+      })
+      if (!files.length) {
+        return cb()
+      }
+      var fn = preferMarkdownReadme(files)
+      var rm = path.resolve(dir, fn)
+      return readme_(file, data, rm, cb)
+    })
+    .catch(er => cb(er))
+}
+
+function preferMarkdownReadme (files) {
+  var fallback = 0
+  var re = /\.m?a?r?k?d?o?w?n?$/i
+  for (var i = 0; i < files.length; i++) {
+    if (files[i].match(re)) {
+      return files[i]
+    } else if (files[i].match(/README$/)) {
+      fallback = i
+    }
+  }
+  // prefer README.md, followed by README; otherwise, return
+  // the first filename (which could be README)
+  return files[fallback]
+}
+
+function readme_ (file, data, rm, cb) {
+  var rmfn = path.basename(rm)
+  fs.readFile(rm, 'utf8', function (er, rmData) {
+    // maybe not readable, or something.
+    if (er) {
+      return cb()
+    }
+    data.readme = rmData
+    data.readmeFilename = rmfn
+    return cb(er, data)
+  })
+}
+
+function mans (file, data, cb) {
+  let cwd = data.directories && data.directories.man
+  if (data.man || !cwd) {
+    return cb(null, data)
+  }
+  const dirname = path.dirname(file)
+  cwd = path.resolve(path.dirname(file), cwd)
+  glob('**/*.[0-9]', { cwd })
+    .then(mansGlob => {
+      data.man = mansGlob.map(man =>
+        path.relative(dirname, path.join(cwd, man)).split(path.sep).join('/')
+      )
+      return cb(null, data)
+    })
+    .catch(er => cb(er))
+}
+
+function bins (file, data, cb) {
+  data = normalizePackageBin(data)
+
+  var m = data.directories && data.directories.bin
+  if (data.bin || !m) {
+    return cb(null, data)
+  }
+
+  m = path.resolve(path.dirname(file), path.join('.', path.join('/', m)))
+  glob('**', { cwd: m })
+    .then(binsGlob => bins_(file, data, binsGlob, cb))
+    .catch(er => cb(er))
+}
+
+function bins_ (file, data, binsGlob, cb) {
+  var m = (data.directories && data.directories.bin) || '.'
+  data.bin = binsGlob.reduce(function (acc, mf) {
+    if (mf && mf.charAt(0) !== '.') {
+      var f = path.basename(mf)
+      acc[f] = path.join(m, mf)
+    }
+    return acc
+  }, {})
+  return cb(null, normalizePackageBin(data))
+}
+
+function bundleDependencies (file, data, cb) {
+  var bd = 'bundleDependencies'
+  var bdd = 'bundledDependencies'
+  // normalize key name
+  if (data[bdd] !== undefined) {
+    if (data[bd] === undefined) {
+      data[bd] = data[bdd]
+    }
+    delete data[bdd]
+  }
+  if (data[bd] === false) {
+    delete data[bd]
+  } else if (data[bd] === true) {
+    data[bd] = Object.keys(data.dependencies || {})
+  } else if (data[bd] !== undefined && !Array.isArray(data[bd])) {
+    delete data[bd]
+  }
+  return cb(null, data)
+}
+
+function githead (file, data, cb) {
+  if (data.gitHead) {
+    return cb(null, data)
+  }
+  var dir = path.dirname(file)
+  var head = path.resolve(dir, '.git/HEAD')
+  fs.readFile(head, 'utf8', function (er, headData) {
+    if (er) {
+      var parent = path.dirname(dir)
+      if (parent === dir) {
+        return cb(null, data)
+      }
+      return githead(dir, data, cb)
+    }
+    githead_(data, dir, headData, cb)
+  })
+}
+
+function githead_ (data, dir, head, cb) {
+  if (!head.match(/^ref: /)) {
+    data.gitHead = head.trim()
+    return cb(null, data)
+  }
+  var headRef = head.replace(/^ref: /, '').trim()
+  var headFile = path.resolve(dir, '.git', headRef)
+  fs.readFile(headFile, 'utf8', function (er, headData) {
+    if (er || !headData) {
+      var packFile = path.resolve(dir, '.git/packed-refs')
+      return fs.readFile(packFile, 'utf8', function (readFileErr, refs) {
+        if (readFileErr || !refs) {
+          return cb(null, data)
+        }
+        refs = refs.split('\n')
+        for (var i = 0; i < refs.length; i++) {
+          var match = refs[i].match(/^([0-9a-f]{40}) (.+)$/)
+          if (match && match[2].trim() === headRef) {
+            data.gitHead = match[1]
+            break
+          }
+        }
+        return cb(null, data)
+      })
+    }
+    headData = headData.replace(/^ref: /, '').trim()
+    data.gitHead = headData
+    return cb(null, data)
+  })
+}
+
+/**
+ * Warn if the bin references don't point to anything.  This might be better in
+ * normalize-package-data if it had access to the file path.
+ */
+function checkBinReferences_ (file, data, warn, cb) {
+  if (!(data.bin instanceof Object)) {
+    return cb()
+  }
+
+  var keys = Object.keys(data.bin)
+  var keysLeft = keys.length
+  if (!keysLeft) {
+    return cb()
+  }
+
+  function handleExists (relName, result) {
+    keysLeft--
+    if (!result) {
+      warn('No bin file found at ' + relName)
+    }
+    if (!keysLeft) {
+      cb()
+    }
+  }
+
+  keys.forEach(function (key) {
+    var dirName = path.dirname(file)
+    var relName = data.bin[key]
+    /* istanbul ignore if - impossible, bins have been normalized */
+    if (typeof relName !== 'string') {
+      var msg = 'Bin filename for ' + key +
+        ' is not a string: ' + util.inspect(relName)
+      warn(msg)
+      delete data.bin[key]
+      handleExists(relName, true)
+      return
+    }
+    var binPath = path.resolve(dirName, relName)
+    fs.stat(binPath, (err) => handleExists(relName, !err))
+  })
+}
+
+function final (file, data, log, strict, cb) {
+  var pId = makePackageId(data)
+
+  function warn (msg) {
+    if (typoWarned[pId]) {
+      return
+    }
+    if (log) {
+      log('package.json', pId, msg)
+    }
+  }
+
+  try {
+    normalizeData(data, warn, strict)
+  } catch (error) {
+    return cb(error)
+  }
+
+  checkBinReferences_(file, data, warn, function () {
+    typoWarned[pId] = true
+    cb(null, data)
+  })
+}
+
+function fillTypes (file, data, cb) {
+  var index = data.main || 'index.js'
+
+  if (typeof index !== 'string') {
+    return cb(new TypeError('The "main" attribute must be of type string.'))
+  }
+
+  // TODO exports is much more complicated than this in verbose format
+  // We need to support for instance
+
+  // "exports": {
+  //   ".": [
+  //     {
+  //       "default": "./lib/npm.js"
+  //     },
+  //     "./lib/npm.js"
+  //   ],
+  //   "./package.json": "./package.json"
+  // },
+  // as well as conditional exports
+
+  // if (data.exports && typeof data.exports === 'string') {
+  //   index = data.exports
+  // }
+
+  // if (data.exports && data.exports['.']) {
+  //   index = data.exports['.']
+  //   if (typeof index !== 'string') {
+  //   }
+  // }
+
+  var extless =
+    path.join(path.dirname(index), path.basename(index, path.extname(index)))
+  var dts = `./${extless}.d.ts`
+  var dtsPath = path.join(path.dirname(file), dts)
+  var hasDTSFields = 'types' in data || 'typings' in data
+  if (!hasDTSFields && fs.existsSync(dtsPath)) {
+    data.types = dts.split(path.sep).join('/')
+  }
+
+  cb(null, data)
+}
+
+function makePackageId (data) {
+  var name = cleanString(data.name)
+  var ver = cleanString(data.version)
+  return name + '@' + ver
+}
+
+function cleanString (str) {
+  return (!str || typeof (str) !== 'string') ? '' : str.trim()
+}
+
+// /**package { "name": "foo", "version": "1.2.3", ... } **/
+function parseIndex (data) {
+  data = data.split(/^\/\*\*package(?:\s|$)/m)
+
+  if (data.length < 2) {
+    return null
+  }
+  data = data[1]
+  data = data.split(/\*\*\/$/m)
+
+  if (data.length < 2) {
+    return null
+  }
+  data = data[0]
+  data = data.replace(/^\s*\*/mg, '')
+
+  try {
+    return safeJSON(data)
+  } catch (er) {
+    return null
+  }
+}
+
+function parseError (ex, file) {
+  var e = new Error('Failed to parse json\n' + ex.message)
+  e.code = 'EJSONPARSE'
+  e.path = file
+  return e
+}
diff --git a/node_modules/pacote/node_modules/read-package-json/package.json b/node_modules/pacote/node_modules/read-package-json/package.json
new file mode 100644
index 0000000000000..01061f2bc2792
--- /dev/null
+++ b/node_modules/pacote/node_modules/read-package-json/package.json
@@ -0,0 +1,65 @@
+{
+  "name": "read-package-json",
+  "version": "7.0.0",
+  "author": "GitHub Inc.",
+  "description": "The thing npm uses to read package.json files with semantics and defaults and validation",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/read-package-json.git"
+  },
+  "main": "lib/read-json.js",
+  "scripts": {
+    "prerelease": "npm t",
+    "postrelease": "npm publish && git push --follow-tags",
+    "release": "standard-version -s",
+    "test": "tap",
+    "npmclilint": "npmcli-lint",
+    "lint": "eslint \"**/*.js\"",
+    "lintfix": "npm run lint -- --fix",
+    "posttest": "npm run lint",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "snap": "tap",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "dependencies": {
+    "glob": "^10.2.2",
+    "json-parse-even-better-errors": "^3.0.0",
+    "normalize-package-data": "^6.0.0",
+    "npm-normalize-package-bin": "^3.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "tap": "^16.0.1"
+  },
+  "license": "ISC",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "engines": {
+    "node": "^16.14.0 || >=18.0.0"
+  },
+  "tap": {
+    "branches": 73,
+    "functions": 77,
+    "lines": 77,
+    "statements": 77,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.18.0",
+    "publish": "true",
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ]
+  }
+}
diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json
index d9119065bfc3d..987cbccded90b 100644
--- a/node_modules/pacote/package.json
+++ b/node_modules/pacote/package.json
@@ -1,6 +1,6 @@
 {
   "name": "pacote",
-  "version": "16.0.0",
+  "version": "17.0.0",
   "description": "JavaScript package downloader",
   "author": "GitHub Inc.",
   "bin": {
@@ -28,7 +28,7 @@
     "@npmcli/arborist": "^6.0.0 || ^6.0.0-pre.0",
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/template-oss": "4.18.0",
-    "hosted-git-info": "^6.0.0",
+    "hosted-git-info": "^7.0.0",
     "mutate-fs": "^2.1.1",
     "nock": "^13.2.4",
     "npm-registry-mock": "^1.3.2",
@@ -44,27 +44,27 @@
     "git"
   ],
   "dependencies": {
-    "@npmcli/git": "^4.0.0",
+    "@npmcli/git": "^5.0.0",
     "@npmcli/installed-package-contents": "^2.0.1",
     "@npmcli/promise-spawn": "^6.0.1",
     "@npmcli/run-script": "^6.0.0",
-    "cacache": "^17.0.0",
+    "cacache": "^18.0.0",
     "fs-minipass": "^3.0.0",
     "minipass": "^7.0.2",
-    "npm-package-arg": "^10.0.0",
+    "npm-package-arg": "^11.0.0",
     "npm-packlist": "^7.0.0",
     "npm-pick-manifest": "^8.0.0",
     "npm-registry-fetch": "^15.0.0",
     "proc-log": "^3.0.0",
     "promise-retry": "^2.0.1",
-    "read-package-json": "^6.0.0",
+    "read-package-json": "^7.0.0",
     "read-package-json-fast": "^3.0.0",
     "sigstore": "^1.3.0",
     "ssri": "^10.0.0",
     "tar": "^6.1.11"
   },
   "engines": {
-    "node": "^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "repository": {
     "type": "git",
@@ -73,7 +73,7 @@
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "ciVersions": [
-      "16.13.0",
+      "16.14.0",
       "16.x",
       "18.0.0",
       "18.x"
diff --git a/package-lock.json b/package-lock.json
index 693e245a4e45b..0d868b1c1b358 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -138,7 +138,7 @@
         "npm-user-validate": "^2.0.0",
         "npmlog": "^7.0.1",
         "p-map": "^4.0.0",
-        "pacote": "^16.0.0",
+        "pacote": "^17.0.0",
         "parse-conflict-json": "^3.0.1",
         "proc-log": "^3.0.0",
         "qrcode-terminal": "^0.12.0",
@@ -230,7 +230,7 @@
         "json-stringify-safe": "^5.0.1",
         "nock": "^13.3.0",
         "npm-package-arg": "^11.0.0",
-        "pacote": "^16.0.0",
+        "pacote": "^17.0.0",
         "tap": "^16.3.4"
       },
       "engines": {
@@ -2524,6 +2524,24 @@
         "node": "^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git": {
+      "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-4.1.0.tgz",
+      "integrity": "sha512-9hwoB3gStVfa0N31ymBmrX+GuDGdVA/QWShZVqE0HK2Af+7QGGrCTbZia/SW0ImUTjTne7SP91qxDmtXvDHRPQ==",
+      "dependencies": {
+        "@npmcli/promise-spawn": "^6.0.0",
+        "lru-cache": "^7.4.4",
+        "npm-pick-manifest": "^8.0.0",
+        "proc-log": "^3.0.0",
+        "promise-inflight": "^1.0.1",
+        "promise-retry": "^2.0.1",
+        "semver": "^7.3.5",
+        "which": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/metavuln-calculator/node_modules/cacache": {
       "version": "17.1.4",
       "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
@@ -2546,6 +2564,17 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info": {
+      "version": "6.1.1",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
+      "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
+      "dependencies": {
+        "lru-cache": "^7.5.1"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache": {
       "version": "7.18.3",
       "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
@@ -2554,6 +2583,65 @@
         "node": ">=12"
       }
     },
+    "node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg": {
+      "version": "10.1.0",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
+      "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==",
+      "dependencies": {
+        "hosted-git-info": "^6.0.0",
+        "proc-log": "^3.0.0",
+        "semver": "^7.3.5",
+        "validate-npm-package-name": "^5.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest": {
+      "version": "8.0.2",
+      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-8.0.2.tgz",
+      "integrity": "sha512-1dKY+86/AIiq1tkKVD3l0WI+Gd3vkknVGAggsFeBkTvbhMQ1OND/LKkYv4JtXPKUJ8bOTCyLiqEg2P6QNdK+Gg==",
+      "dependencies": {
+        "npm-install-checks": "^6.0.0",
+        "npm-normalize-package-bin": "^3.0.0",
+        "npm-package-arg": "^10.0.0",
+        "semver": "^7.3.5"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/@npmcli/metavuln-calculator/node_modules/pacote": {
+      "version": "16.0.0",
+      "resolved": "https://registry.npmjs.org/pacote/-/pacote-16.0.0.tgz",
+      "integrity": "sha512-tXeSsl21IUIzw/zW0rzK/po2zlI5Nqbkpu0V6Pv99f3leHde7zdv+VjuP9pnVeTVV7OvaS49u+lgmvXjQ0TMJQ==",
+      "dependencies": {
+        "@npmcli/git": "^4.0.0",
+        "@npmcli/installed-package-contents": "^2.0.1",
+        "@npmcli/promise-spawn": "^6.0.1",
+        "@npmcli/run-script": "^6.0.0",
+        "cacache": "^17.0.0",
+        "fs-minipass": "^3.0.0",
+        "minipass": "^7.0.2",
+        "npm-package-arg": "^10.0.0",
+        "npm-packlist": "^7.0.0",
+        "npm-pick-manifest": "^8.0.0",
+        "npm-registry-fetch": "^15.0.0",
+        "proc-log": "^3.0.0",
+        "promise-retry": "^2.0.1",
+        "read-package-json": "^6.0.0",
+        "read-package-json-fast": "^3.0.0",
+        "sigstore": "^1.3.0",
+        "ssri": "^10.0.0",
+        "tar": "^6.1.11"
+      },
+      "bin": {
+        "pacote": "lib/bin.js"
+      },
+      "engines": {
+        "node": "^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/mock-globals": {
       "resolved": "mock-globals",
       "link": true
@@ -10612,25 +10700,25 @@
       }
     },
     "node_modules/pacote": {
-      "version": "16.0.0",
-      "resolved": "https://registry.npmjs.org/pacote/-/pacote-16.0.0.tgz",
-      "integrity": "sha512-tXeSsl21IUIzw/zW0rzK/po2zlI5Nqbkpu0V6Pv99f3leHde7zdv+VjuP9pnVeTVV7OvaS49u+lgmvXjQ0TMJQ==",
+      "version": "17.0.0",
+      "resolved": "https://registry.npmjs.org/pacote/-/pacote-17.0.0.tgz",
+      "integrity": "sha512-ho3CUJW0Eh/z6qus9YfPE7lbLoIr97T9KkvrcWcqmykNMuvF1bGL2IXJ0U+hoe7rAamLCX6CXl6xp0aPvyoPag==",
       "inBundle": true,
       "dependencies": {
-        "@npmcli/git": "^4.0.0",
+        "@npmcli/git": "^5.0.0",
         "@npmcli/installed-package-contents": "^2.0.1",
         "@npmcli/promise-spawn": "^6.0.1",
         "@npmcli/run-script": "^6.0.0",
-        "cacache": "^17.0.0",
+        "cacache": "^18.0.0",
         "fs-minipass": "^3.0.0",
         "minipass": "^7.0.2",
-        "npm-package-arg": "^10.0.0",
+        "npm-package-arg": "^11.0.0",
         "npm-packlist": "^7.0.0",
         "npm-pick-manifest": "^8.0.0",
         "npm-registry-fetch": "^15.0.0",
         "proc-log": "^3.0.0",
         "promise-retry": "^2.0.1",
-        "read-package-json": "^6.0.0",
+        "read-package-json": "^7.0.0",
         "read-package-json-fast": "^3.0.0",
         "sigstore": "^1.3.0",
         "ssri": "^10.0.0",
@@ -10640,52 +10728,49 @@
         "pacote": "lib/bin.js"
       },
       "engines": {
-        "node": "^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/pacote/node_modules/@npmcli/git": {
-      "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-4.1.0.tgz",
-      "integrity": "sha512-9hwoB3gStVfa0N31ymBmrX+GuDGdVA/QWShZVqE0HK2Af+7QGGrCTbZia/SW0ImUTjTne7SP91qxDmtXvDHRPQ==",
+    "node_modules/pacote/node_modules/lru-cache": {
+      "version": "7.18.3",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
+      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
+      "inBundle": true,
+      "engines": {
+        "node": ">=12"
+      }
+    },
+    "node_modules/pacote/node_modules/normalize-package-data": {
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.0.tgz",
+      "integrity": "sha512-UL7ELRVxYBHBgYEtZCXjxuD5vPxnmvMGq0jp/dGPKKrN7tfsBh2IY7TlJ15WWwdjRWD3RJbnsygUurTK3xkPkg==",
       "inBundle": true,
       "dependencies": {
-        "@npmcli/promise-spawn": "^6.0.0",
-        "lru-cache": "^7.4.4",
-        "npm-pick-manifest": "^8.0.0",
-        "proc-log": "^3.0.0",
-        "promise-inflight": "^1.0.1",
-        "promise-retry": "^2.0.1",
+        "hosted-git-info": "^7.0.0",
+        "is-core-module": "^2.8.1",
         "semver": "^7.3.5",
-        "which": "^3.0.0"
+        "validate-npm-package-license": "^3.0.4"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/pacote/node_modules/cacache": {
-      "version": "17.1.4",
-      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
-      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
+    "node_modules/pacote/node_modules/npm-pick-manifest": {
+      "version": "8.0.2",
+      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-8.0.2.tgz",
+      "integrity": "sha512-1dKY+86/AIiq1tkKVD3l0WI+Gd3vkknVGAggsFeBkTvbhMQ1OND/LKkYv4JtXPKUJ8bOTCyLiqEg2P6QNdK+Gg==",
       "inBundle": true,
       "dependencies": {
-        "@npmcli/fs": "^3.1.0",
-        "fs-minipass": "^3.0.0",
-        "glob": "^10.2.2",
-        "lru-cache": "^7.7.1",
-        "minipass": "^7.0.3",
-        "minipass-collect": "^1.0.2",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "p-map": "^4.0.0",
-        "ssri": "^10.0.0",
-        "tar": "^6.1.11",
-        "unique-filename": "^3.0.0"
+        "npm-install-checks": "^6.0.0",
+        "npm-normalize-package-bin": "^3.0.0",
+        "npm-package-arg": "^10.0.0",
+        "semver": "^7.3.5"
       },
       "engines": {
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
-    "node_modules/pacote/node_modules/hosted-git-info": {
+    "node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info": {
       "version": "6.1.1",
       "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
       "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
@@ -10697,16 +10782,7 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
-    "node_modules/pacote/node_modules/lru-cache": {
-      "version": "7.18.3",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
-      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
-      "inBundle": true,
-      "engines": {
-        "node": ">=12"
-      }
-    },
-    "node_modules/pacote/node_modules/npm-package-arg": {
+    "node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg": {
       "version": "10.1.0",
       "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
       "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==",
@@ -10721,19 +10797,19 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
-    "node_modules/pacote/node_modules/npm-pick-manifest": {
-      "version": "8.0.2",
-      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-8.0.2.tgz",
-      "integrity": "sha512-1dKY+86/AIiq1tkKVD3l0WI+Gd3vkknVGAggsFeBkTvbhMQ1OND/LKkYv4JtXPKUJ8bOTCyLiqEg2P6QNdK+Gg==",
+    "node_modules/pacote/node_modules/read-package-json": {
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-7.0.0.tgz",
+      "integrity": "sha512-uL4Z10OKV4p6vbdvIXB+OzhInYtIozl/VxUBPgNkBuUi2DeRonnuspmaVAMcrkmfjKGNmRndyQAbE7/AmzGwFg==",
       "inBundle": true,
       "dependencies": {
-        "npm-install-checks": "^6.0.0",
-        "npm-normalize-package-bin": "^3.0.0",
-        "npm-package-arg": "^10.0.0",
-        "semver": "^7.3.5"
+        "glob": "^10.2.2",
+        "json-parse-even-better-errors": "^3.0.0",
+        "normalize-package-data": "^6.0.0",
+        "npm-normalize-package-bin": "^3.0.0"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "node_modules/parent-module": {
@@ -16332,7 +16408,7 @@
         "npm-pick-manifest": "^9.0.0",
         "npm-registry-fetch": "^15.0.0",
         "npmlog": "^7.0.1",
-        "pacote": "^16.0.0",
+        "pacote": "^17.0.0",
         "parse-conflict-json": "^3.0.0",
         "proc-log": "^3.0.0",
         "promise-all-reject-late": "^1.0.0",
@@ -16413,7 +16489,7 @@
         "diff": "^5.1.0",
         "minimatch": "^9.0.0",
         "npm-package-arg": "^11.0.0",
-        "pacote": "^16.0.0",
+        "pacote": "^17.0.0",
         "tar": "^6.1.13"
       },
       "devDependencies": {
@@ -16434,7 +16510,7 @@
         "ci-info": "^3.7.1",
         "npm-package-arg": "^11.0.0",
         "npmlog": "^7.0.1",
-        "pacote": "^16.0.0",
+        "pacote": "^17.0.0",
         "proc-log": "^3.0.0",
         "read": "^2.0.0",
         "read-package-json-fast": "^3.0.2",
@@ -16512,7 +16588,7 @@
         "@npmcli/arborist": "^6.3.0",
         "@npmcli/run-script": "^6.0.0",
         "npm-package-arg": "^11.0.0",
-        "pacote": "^16.0.0"
+        "pacote": "^17.0.0"
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^4.0.0",
diff --git a/package.json b/package.json
index 2cc6dcb9e33f1..f13feb344bbfa 100644
--- a/package.json
+++ b/package.json
@@ -103,7 +103,7 @@
     "npm-user-validate": "^2.0.0",
     "npmlog": "^7.0.1",
     "p-map": "^4.0.0",
-    "pacote": "^16.0.0",
+    "pacote": "^17.0.0",
     "parse-conflict-json": "^3.0.1",
     "proc-log": "^3.0.0",
     "qrcode-terminal": "^0.12.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 7a134ec5d4c91..c8ee90d9c4609 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -26,7 +26,7 @@
     "npm-pick-manifest": "^9.0.0",
     "npm-registry-fetch": "^15.0.0",
     "npmlog": "^7.0.1",
-    "pacote": "^16.0.0",
+    "pacote": "^17.0.0",
     "parse-conflict-json": "^3.0.0",
     "proc-log": "^3.0.0",
     "promise-all-reject-late": "^1.0.0",
diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json
index 1bdcba1f3b5dc..12565b747d062 100644
--- a/workspaces/libnpmdiff/package.json
+++ b/workspaces/libnpmdiff/package.json
@@ -53,7 +53,7 @@
     "diff": "^5.1.0",
     "minimatch": "^9.0.0",
     "npm-package-arg": "^11.0.0",
-    "pacote": "^16.0.0",
+    "pacote": "^17.0.0",
     "tar": "^6.1.13"
   },
   "templateOSS": {
diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json
index a6501d3169caa..e633eb98dd50a 100644
--- a/workspaces/libnpmexec/package.json
+++ b/workspaces/libnpmexec/package.json
@@ -64,7 +64,7 @@
     "ci-info": "^3.7.1",
     "npm-package-arg": "^11.0.0",
     "npmlog": "^7.0.1",
-    "pacote": "^16.0.0",
+    "pacote": "^17.0.0",
     "proc-log": "^3.0.0",
     "read": "^2.0.0",
     "read-package-json-fast": "^3.0.2",
diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json
index f5e98878096f9..0a8330815c2a2 100644
--- a/workspaces/libnpmpack/package.json
+++ b/workspaces/libnpmpack/package.json
@@ -39,7 +39,7 @@
     "@npmcli/arborist": "^6.3.0",
     "@npmcli/run-script": "^6.0.0",
     "npm-package-arg": "^11.0.0",
-    "pacote": "^16.0.0"
+    "pacote": "^17.0.0"
   },
   "engines": {
     "node": "^16.13.0 || >=18.0.0"

From 84925a017b450ea1c5e386a7065324dffcd1c139 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Tue, 15 Aug 2023 13:53:22 -0700
Subject: [PATCH 38/68] deps: init-package-json@6.0.0

---
 node_modules/.gitignore                       |    5 +-
 .../hosted-git-info/lib/from-url.js           |  122 --
 .../node_modules/hosted-git-info/lib/hosts.js |  228 ---
 .../node_modules/hosted-git-info/lib/index.js |  179 ---
 .../hosted-git-info/lib/parse-url.js          |   78 --
 .../node_modules/hosted-git-info/package.json |   59 -
 .../node_modules/lru-cache/LICENSE            |   15 -
 .../node_modules/lru-cache/index.js           | 1227 -----------------
 .../node_modules/lru-cache/index.mjs          | 1227 -----------------
 .../node_modules/lru-cache/package.json       |   96 --
 .../normalize-package-data/LICENSE            |   15 +
 .../lib/extract_description.js                |   24 +
 .../normalize-package-data/lib/fixer.js       |  475 +++++++
 .../lib/make_warning.js                       |   22 +
 .../normalize-package-data/lib/normalize.js   |   48 +
 .../normalize-package-data/lib/safe_format.js |   11 +
 .../normalize-package-data/lib/typos.json     |   25 +
 .../lib/warning_messages.json                 |   30 +
 .../package.json                              |   75 +-
 .../node_modules/npm-package-arg/LICENSE      |   15 -
 .../node_modules/npm-package-arg/lib/npa.js   |  431 ------
 .../LICENSE                                   |    4 +-
 .../read-package-json/lib/read-json.js        |  589 ++++++++
 .../read-package-json/package.json            |   65 +
 node_modules/init-package-json/package.json   |   21 +-
 package-lock.json                             |   60 +-
 package.json                                  |    2 +-
 27 files changed, 1388 insertions(+), 3760 deletions(-)
 delete mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/lib/from-url.js
 delete mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/lib/hosts.js
 delete mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js
 delete mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/lib/parse-url.js
 delete mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/package.json
 delete mode 100644 node_modules/init-package-json/node_modules/lru-cache/LICENSE
 delete mode 100644 node_modules/init-package-json/node_modules/lru-cache/index.js
 delete mode 100644 node_modules/init-package-json/node_modules/lru-cache/index.mjs
 delete mode 100644 node_modules/init-package-json/node_modules/lru-cache/package.json
 create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/LICENSE
 create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/extract_description.js
 create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/fixer.js
 create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/make_warning.js
 create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/normalize.js
 create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/safe_format.js
 create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/typos.json
 create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/warning_messages.json
 rename node_modules/init-package-json/node_modules/{npm-package-arg => normalize-package-data}/package.json (55%)
 delete mode 100644 node_modules/init-package-json/node_modules/npm-package-arg/LICENSE
 delete mode 100644 node_modules/init-package-json/node_modules/npm-package-arg/lib/npa.js
 rename node_modules/init-package-json/node_modules/{hosted-git-info => read-package-json}/LICENSE (93%)
 create mode 100644 node_modules/init-package-json/node_modules/read-package-json/lib/read-json.js
 create mode 100644 node_modules/init-package-json/node_modules/read-package-json/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 7bc0a96e9f6ef..106eec7635e0c 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -147,9 +147,8 @@
 !/init-package-json
 !/init-package-json/node_modules/
 /init-package-json/node_modules/*
-!/init-package-json/node_modules/hosted-git-info
-!/init-package-json/node_modules/lru-cache
-!/init-package-json/node_modules/npm-package-arg
+!/init-package-json/node_modules/normalize-package-data
+!/init-package-json/node_modules/read-package-json
 !/ip-regex
 !/ip
 !/is-cidr
diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/from-url.js b/node_modules/init-package-json/node_modules/hosted-git-info/lib/from-url.js
deleted file mode 100644
index efc1247d59d12..0000000000000
--- a/node_modules/init-package-json/node_modules/hosted-git-info/lib/from-url.js
+++ /dev/null
@@ -1,122 +0,0 @@
-'use strict'
-
-const parseUrl = require('./parse-url')
-
-// look for github shorthand inputs, such as npm/cli
-const isGitHubShorthand = (arg) => {
-  // it cannot contain whitespace before the first #
-  // it cannot start with a / because that's probably an absolute file path
-  // but it must include a slash since repos are username/repository
-  // it cannot start with a . because that's probably a relative file path
-  // it cannot start with an @ because that's a scoped package if it passes the other tests
-  // it cannot contain a : before a # because that tells us that there's a protocol
-  // a second / may not exist before a #
-  const firstHash = arg.indexOf('#')
-  const firstSlash = arg.indexOf('/')
-  const secondSlash = arg.indexOf('/', firstSlash + 1)
-  const firstColon = arg.indexOf(':')
-  const firstSpace = /\s/.exec(arg)
-  const firstAt = arg.indexOf('@')
-
-  const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash)
-  const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash)
-  const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash)
-  const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash)
-  const hasSlash = firstSlash > 0
-  // if a # is found, what we really want to know is that the character
-  // immediately before # is not a /
-  const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/')
-  const doesNotStartWithDot = !arg.startsWith('.')
-
-  return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash &&
-    doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash &&
-    secondSlashOnlyAfterHash
-}
-
-module.exports = (giturl, opts, { gitHosts, protocols }) => {
-  if (!giturl) {
-    return
-  }
-
-  const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl
-  const parsed = parseUrl(correctedUrl, protocols)
-  if (!parsed) {
-    return
-  }
-
-  const gitHostShortcut = gitHosts.byShortcut[parsed.protocol]
-  const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.')
-    ? parsed.hostname.slice(4)
-    : parsed.hostname]
-  const gitHostName = gitHostShortcut || gitHostDomain
-  if (!gitHostName) {
-    return
-  }
-
-  const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain]
-  let auth = null
-  if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) {
-    auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}`
-  }
-
-  let committish = null
-  let user = null
-  let project = null
-  let defaultRepresentation = null
-
-  try {
-    if (gitHostShortcut) {
-      let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname
-      const firstAt = pathname.indexOf('@')
-      // we ignore auth for shortcuts, so just trim it out
-      if (firstAt > -1) {
-        pathname = pathname.slice(firstAt + 1)
-      }
-
-      const lastSlash = pathname.lastIndexOf('/')
-      if (lastSlash > -1) {
-        user = decodeURIComponent(pathname.slice(0, lastSlash))
-        // we want nulls only, never empty strings
-        if (!user) {
-          user = null
-        }
-        project = decodeURIComponent(pathname.slice(lastSlash + 1))
-      } else {
-        project = decodeURIComponent(pathname)
-      }
-
-      if (project.endsWith('.git')) {
-        project = project.slice(0, -4)
-      }
-
-      if (parsed.hash) {
-        committish = decodeURIComponent(parsed.hash.slice(1))
-      }
-
-      defaultRepresentation = 'shortcut'
-    } else {
-      if (!gitHostInfo.protocols.includes(parsed.protocol)) {
-        return
-      }
-
-      const segments = gitHostInfo.extract(parsed)
-      if (!segments) {
-        return
-      }
-
-      user = segments.user && decodeURIComponent(segments.user)
-      project = decodeURIComponent(segments.project)
-      committish = decodeURIComponent(segments.committish)
-      defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1)
-    }
-  } catch (err) {
-    /* istanbul ignore else */
-    if (err instanceof URIError) {
-      return
-    } else {
-      throw err
-    }
-  }
-
-  return [gitHostName, user, auth, project, committish, defaultRepresentation, opts]
-}
diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/hosts.js b/node_modules/init-package-json/node_modules/hosted-git-info/lib/hosts.js
deleted file mode 100644
index 013712b7842c8..0000000000000
--- a/node_modules/init-package-json/node_modules/hosted-git-info/lib/hosts.js
+++ /dev/null
@@ -1,228 +0,0 @@
-/* eslint-disable max-len */
-
-'use strict'
-
-const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
-const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
-const formatHashFragment = (f) => f.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-')
-
-const defaults = {
-  sshtemplate: ({ domain, user, project, committish }) =>
-    `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, user, project, committish }) =>
-    `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath, path }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`,
-  browsetemplate: ({ domain, user, project, committish, treepath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  docstemplate: ({ domain, user, project, treepath, committish }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`,
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`,
-  shortcuttemplate: ({ type, user, project, committish }) =>
-    `${type}:${user}/${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ user, project, committish }) =>
-    `${user}/${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, user, project }) =>
-    `https://${domain}/${user}/${project}/issues`,
-  hashformat: formatHashFragment,
-}
-
-const hosts = {}
-hosts.github = {
-  // First two are insecure and generally shouldn't be used any more, but
-  // they are still supported.
-  protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'github.com',
-  treepath: 'tree',
-  blobpath: 'blob',
-  editpath: 'edit',
-  filetemplate: ({ auth, user, project, committish, path }) =>
-    `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`,
-  gittemplate: ({ auth, domain, user, project, committish }) =>
-    `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, type, committish] = url.pathname.split('/', 5)
-    if (type && type !== 'tree') {
-      return
-    }
-
-    if (!type) {
-      committish = url.hash.slice(1)
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish }
-  },
-}
-
-hosts.bitbucket = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'bitbucket.org',
-  treepath: 'src',
-  blobpath: 'src',
-  editpath: '?mode=edit',
-  edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (['get'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gitlab = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gitlab.com',
-  treepath: 'tree',
-  blobpath: 'tree',
-  editpath: '-/edit',
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    const path = url.pathname.slice(1)
-    if (path.includes('/-/') || path.includes('/archive.tar.gz')) {
-      return
-    }
-
-    const segments = path.split('/')
-    let project = segments.pop()
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    const user = segments.join('/')
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gist = {
-  protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gist.github.com',
-  editpath: 'edit',
-  sshtemplate: ({ domain, project, committish }) =>
-    `git@${domain}:${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, project, committish }) =>
-    `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`,
-  browsetemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  browseblobtemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  docstemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  httpstemplate: ({ domain, project, committish }) =>
-    `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ user, project, committish, path }) =>
-    `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`,
-  shortcuttemplate: ({ type, project, committish }) =>
-    `${type}:${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ project, committish }) =>
-    `${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, project }) =>
-    `https://${domain}/${project}`,
-  gittemplate: ({ domain, project, committish }) =>
-    `git://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ project, committish }) =>
-    `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (aux === 'raw') {
-      return
-    }
-
-    if (!project) {
-      if (!user) {
-        return
-      }
-
-      project = user
-      user = null
-    }
-
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-  hashformat: function (fragment) {
-    return fragment && 'file-' + formatHashFragment(fragment)
-  },
-}
-
-hosts.sourcehut = {
-  protocols: ['git+ssh:', 'https:'],
-  domain: 'git.sr.ht',
-  treepath: 'tree',
-  blobpath: 'tree',
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`,
-  httpstemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`,
-  bugstemplate: ({ user, project }) =>
-    `https://todo.sr.ht/${user}/${project}`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-
-    // tarball url
-    if (['archive'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  hosts[name] = Object.assign({}, defaults, host)
-}
-
-module.exports = hosts
diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js b/node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js
deleted file mode 100644
index a7339c217e9a3..0000000000000
--- a/node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js
+++ /dev/null
@@ -1,179 +0,0 @@
-'use strict'
-
-const LRU = require('lru-cache')
-const hosts = require('./hosts.js')
-const fromUrl = require('./from-url.js')
-const parseUrl = require('./parse-url.js')
-
-const cache = new LRU({ max: 1000 })
-
-class GitHost {
-  constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
-    Object.assign(this, GitHost.#gitHosts[type], {
-      type,
-      user,
-      auth,
-      project,
-      committish,
-      default: defaultRepresentation,
-      opts,
-    })
-  }
-
-  static #gitHosts = { byShortcut: {}, byDomain: {} }
-  static #protocols = {
-    'git+ssh:': { name: 'sshurl' },
-    'ssh:': { name: 'sshurl' },
-    'git+https:': { name: 'https', auth: true },
-    'git:': { auth: true },
-    'http:': { auth: true },
-    'https:': { auth: true },
-    'git+http:': { auth: true },
-  }
-
-  static addHost (name, host) {
-    GitHost.#gitHosts[name] = host
-    GitHost.#gitHosts.byDomain[host.domain] = name
-    GitHost.#gitHosts.byShortcut[`${name}:`] = name
-    GitHost.#protocols[`${name}:`] = { name }
-  }
-
-  static fromUrl (giturl, opts) {
-    if (typeof giturl !== 'string') {
-      return
-    }
-
-    const key = giturl + JSON.stringify(opts || {})
-
-    if (!cache.has(key)) {
-      const hostArgs = fromUrl(giturl, opts, {
-        gitHosts: GitHost.#gitHosts,
-        protocols: GitHost.#protocols,
-      })
-      cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined)
-    }
-
-    return cache.get(key)
-  }
-
-  static parseUrl (url) {
-    return parseUrl(url)
-  }
-
-  #fill (template, opts) {
-    if (typeof template !== 'function') {
-      return null
-    }
-
-    const options = { ...this, ...this.opts, ...opts }
-
-    // the path should always be set so we don't end up with 'undefined' in urls
-    if (!options.path) {
-      options.path = ''
-    }
-
-    // template functions will insert the leading slash themselves
-    if (options.path.startsWith('/')) {
-      options.path = options.path.slice(1)
-    }
-
-    if (options.noCommittish) {
-      options.committish = null
-    }
-
-    const result = template(options)
-    return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result
-  }
-
-  hash () {
-    return this.committish ? `#${this.committish}` : ''
-  }
-
-  ssh (opts) {
-    return this.#fill(this.sshtemplate, opts)
-  }
-
-  sshurl (opts) {
-    return this.#fill(this.sshurltemplate, opts)
-  }
-
-  browse (path, ...args) {
-    // not a string, treat path as opts
-    if (typeof path !== 'string') {
-      return this.#fill(this.browsetemplate, path)
-    }
-
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browsetreetemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  // If the path is known to be a file, then browseFile should be used. For some hosts
-  // the url is the same as browse, but for others like GitHub a file can use both `/tree/`
-  // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/`
-  // path will redirect to a specific commit. Using the `/blob/` path avoids this and
-  // does not redirect to a different commit.
-  browseFile (path, ...args) {
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browseblobtemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  docs (opts) {
-    return this.#fill(this.docstemplate, opts)
-  }
-
-  bugs (opts) {
-    return this.#fill(this.bugstemplate, opts)
-  }
-
-  https (opts) {
-    return this.#fill(this.httpstemplate, opts)
-  }
-
-  git (opts) {
-    return this.#fill(this.gittemplate, opts)
-  }
-
-  shortcut (opts) {
-    return this.#fill(this.shortcuttemplate, opts)
-  }
-
-  path (opts) {
-    return this.#fill(this.pathtemplate, opts)
-  }
-
-  tarball (opts) {
-    return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false })
-  }
-
-  file (path, opts) {
-    return this.#fill(this.filetemplate, { ...opts, path })
-  }
-
-  edit (path, opts) {
-    return this.#fill(this.edittemplate, { ...opts, path })
-  }
-
-  getDefaultRepresentation () {
-    return this.default
-  }
-
-  toString (opts) {
-    if (this.default && typeof this[this.default] === 'function') {
-      return this[this.default](opts)
-    }
-
-    return this.sshurl(opts)
-  }
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  GitHost.addHost(name, host)
-}
-
-module.exports = GitHost
diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/init-package-json/node_modules/hosted-git-info/lib/parse-url.js
deleted file mode 100644
index 7d5489c008ab4..0000000000000
--- a/node_modules/init-package-json/node_modules/hosted-git-info/lib/parse-url.js
+++ /dev/null
@@ -1,78 +0,0 @@
-const url = require('url')
-
-const lastIndexOfBefore = (str, char, beforeChar) => {
-  const startPosition = str.indexOf(beforeChar)
-  return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity)
-}
-
-const safeUrl = (u) => {
-  try {
-    return new url.URL(u)
-  } catch {
-    // this fn should never throw
-  }
-}
-
-// accepts input like git:github.com:user/repo and inserts the // after the first :
-const correctProtocol = (arg, protocols) => {
-  const firstColon = arg.indexOf(':')
-  const proto = arg.slice(0, firstColon + 1)
-  if (Object.prototype.hasOwnProperty.call(protocols, proto)) {
-    return arg
-  }
-
-  const firstAt = arg.indexOf('@')
-  if (firstAt > -1) {
-    if (firstAt > firstColon) {
-      return `git+ssh://${arg}`
-    } else {
-      return arg
-    }
-  }
-
-  const doubleSlash = arg.indexOf('//')
-  if (doubleSlash === firstColon + 1) {
-    return arg
-  }
-
-  return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}`
-}
-
-// attempt to correct an scp style url so that it will parse with `new URL()`
-const correctUrl = (giturl) => {
-  // ignore @ that come after the first hash since the denotes the start
-  // of a committish which can contain @ characters
-  const firstAt = lastIndexOfBefore(giturl, '@', '#')
-  // ignore colons that come after the hash since that could include colons such as:
-  // git@github.com:user/package-2#semver:^1.0.0
-  const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#')
-
-  if (lastColonBeforeHash > firstAt) {
-    // the last : comes after the first @ (or there is no @)
-    // like it would in:
-    // proto://hostname.com:user/repo
-    // username@hostname.com:user/repo
-    // :password@hostname.com:user/repo
-    // username:password@hostname.com:user/repo
-    // proto://username@hostname.com:user/repo
-    // proto://:password@hostname.com:user/repo
-    // proto://username:password@hostname.com:user/repo
-    // then we replace the last : with a / to create a valid path
-    giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1)
-  }
-
-  if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) {
-    // we have no : at all
-    // as it would be in:
-    // username@hostname.com/user/repo
-    // then we prepend a protocol
-    giturl = `git+ssh://${giturl}`
-  }
-
-  return giturl
-}
-
-module.exports = (giturl, protocols) => {
-  const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl
-  return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol))
-}
diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/package.json b/node_modules/init-package-json/node_modules/hosted-git-info/package.json
deleted file mode 100644
index 612259948afe7..0000000000000
--- a/node_modules/init-package-json/node_modules/hosted-git-info/package.json
+++ /dev/null
@@ -1,59 +0,0 @@
-{
-  "name": "hosted-git-info",
-  "version": "6.1.1",
-  "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
-  "main": "./lib/index.js",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/hosted-git-info.git"
-  },
-  "keywords": [
-    "git",
-    "github",
-    "bitbucket",
-    "gitlab"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/hosted-git-info/issues"
-  },
-  "homepage": "https://github.com/npm/hosted-git-info",
-  "scripts": {
-    "posttest": "npm run lint",
-    "snap": "tap",
-    "test": "tap",
-    "test:coverage": "tap --coverage-report=html",
-    "lint": "eslint \"**/*.js\"",
-    "postlint": "template-oss-check",
-    "lintfix": "npm run lint -- --fix",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "dependencies": {
-    "lru-cache": "^7.5.1"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.7.1",
-    "tap": "^16.0.1"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "tap": {
-    "color": 1,
-    "coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.7.1"
-  }
-}
diff --git a/node_modules/init-package-json/node_modules/lru-cache/LICENSE b/node_modules/init-package-json/node_modules/lru-cache/LICENSE
deleted file mode 100644
index f785757cd63f8..0000000000000
--- a/node_modules/init-package-json/node_modules/lru-cache/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/init-package-json/node_modules/lru-cache/index.js b/node_modules/init-package-json/node_modules/lru-cache/index.js
deleted file mode 100644
index 48e99fe5e5a70..0000000000000
--- a/node_modules/init-package-json/node_modules/lru-cache/index.js
+++ /dev/null
@@ -1,1227 +0,0 @@
-const perf =
-  typeof performance === 'object' &&
-  performance &&
-  typeof performance.now === 'function'
-    ? performance
-    : Date
-
-const hasAbortController = typeof AbortController === 'function'
-
-// minimal backwards-compatibility polyfill
-// this doesn't have nearly all the checks and whatnot that
-// actual AbortController/Signal has, but it's enough for
-// our purposes, and if used properly, behaves the same.
-const AC = hasAbortController
-  ? AbortController
-  : class AbortController {
-      constructor() {
-        this.signal = new AS()
-      }
-      abort(reason = new Error('This operation was aborted')) {
-        this.signal.reason = this.signal.reason || reason
-        this.signal.aborted = true
-        this.signal.dispatchEvent({
-          type: 'abort',
-          target: this.signal,
-        })
-      }
-    }
-
-const hasAbortSignal = typeof AbortSignal === 'function'
-// Some polyfills put this on the AC class, not global
-const hasACAbortSignal = typeof AC.AbortSignal === 'function'
-const AS = hasAbortSignal
-  ? AbortSignal
-  : hasACAbortSignal
-  ? AC.AbortController
-  : class AbortSignal {
-      constructor() {
-        this.reason = undefined
-        this.aborted = false
-        this._listeners = []
-      }
-      dispatchEvent(e) {
-        if (e.type === 'abort') {
-          this.aborted = true
-          this.onabort(e)
-          this._listeners.forEach(f => f(e), this)
-        }
-      }
-      onabort() {}
-      addEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners.push(fn)
-        }
-      }
-      removeEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners = this._listeners.filter(f => f !== fn)
-        }
-      }
-    }
-
-const warned = new Set()
-const deprecatedOption = (opt, instead) => {
-  const code = `LRU_CACHE_OPTION_${opt}`
-  if (shouldWarn(code)) {
-    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
-  }
-}
-const deprecatedMethod = (method, instead) => {
-  const code = `LRU_CACHE_METHOD_${method}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
-    warn(code, `${method} method`, `cache.${instead}()`, get)
-  }
-}
-const deprecatedProperty = (field, instead) => {
-  const code = `LRU_CACHE_PROPERTY_${field}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
-    warn(code, `${field} property`, `cache.${instead}`, get)
-  }
-}
-
-const emitWarning = (...a) => {
-  typeof process === 'object' &&
-  process &&
-  typeof process.emitWarning === 'function'
-    ? process.emitWarning(...a)
-    : console.error(...a)
-}
-
-const shouldWarn = code => !warned.has(code)
-
-const warn = (code, what, instead, fn) => {
-  warned.add(code)
-  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
-  emitWarning(msg, 'DeprecationWarning', code, fn)
-}
-
-const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
-
-/* istanbul ignore next - This is a little bit ridiculous, tbh.
- * The maximum array length is 2^32-1 or thereabouts on most JS impls.
- * And well before that point, you're caching the entire world, I mean,
- * that's ~32GB of just integers for the next/prev links, plus whatever
- * else to hold that many keys and values.  Just filling the memory with
- * zeroes at init time is brutal when you get that big.
- * But why not be complete?
- * Maybe in the future, these limits will have expanded. */
-const getUintArray = max =>
-  !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-    ? Uint8Array
-    : max <= Math.pow(2, 16)
-    ? Uint16Array
-    : max <= Math.pow(2, 32)
-    ? Uint32Array
-    : max <= Number.MAX_SAFE_INTEGER
-    ? ZeroArray
-    : null
-
-class ZeroArray extends Array {
-  constructor(size) {
-    super(size)
-    this.fill(0)
-  }
-}
-
-class Stack {
-  constructor(max) {
-    if (max === 0) {
-      return []
-    }
-    const UintArray = getUintArray(max)
-    this.heap = new UintArray(max)
-    this.length = 0
-  }
-  push(n) {
-    this.heap[this.length++] = n
-  }
-  pop() {
-    return this.heap[--this.length]
-  }
-}
-
-class LRUCache {
-  constructor(options = {}) {
-    const {
-      max = 0,
-      ttl,
-      ttlResolution = 1,
-      ttlAutopurge,
-      updateAgeOnGet,
-      updateAgeOnHas,
-      allowStale,
-      dispose,
-      disposeAfter,
-      noDisposeOnSet,
-      noUpdateTTL,
-      maxSize = 0,
-      maxEntrySize = 0,
-      sizeCalculation,
-      fetchMethod,
-      fetchContext,
-      noDeleteOnFetchRejection,
-      noDeleteOnStaleGet,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-    } = options
-
-    // deprecated options, don't trigger a warning for getting them if
-    // the thing being passed in is another LRUCache we're copying.
-    const { length, maxAge, stale } =
-      options instanceof LRUCache ? {} : options
-
-    if (max !== 0 && !isPosInt(max)) {
-      throw new TypeError('max option must be a nonnegative integer')
-    }
-
-    const UintArray = max ? getUintArray(max) : Array
-    if (!UintArray) {
-      throw new Error('invalid max value: ' + max)
-    }
-
-    this.max = max
-    this.maxSize = maxSize
-    this.maxEntrySize = maxEntrySize || this.maxSize
-    this.sizeCalculation = sizeCalculation || length
-    if (this.sizeCalculation) {
-      if (!this.maxSize && !this.maxEntrySize) {
-        throw new TypeError(
-          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
-        )
-      }
-      if (typeof this.sizeCalculation !== 'function') {
-        throw new TypeError('sizeCalculation set to non-function')
-      }
-    }
-
-    this.fetchMethod = fetchMethod || null
-    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
-      throw new TypeError(
-        'fetchMethod must be a function if specified'
-      )
-    }
-
-    this.fetchContext = fetchContext
-    if (!this.fetchMethod && fetchContext !== undefined) {
-      throw new TypeError(
-        'cannot set fetchContext without fetchMethod'
-      )
-    }
-
-    this.keyMap = new Map()
-    this.keyList = new Array(max).fill(null)
-    this.valList = new Array(max).fill(null)
-    this.next = new UintArray(max)
-    this.prev = new UintArray(max)
-    this.head = 0
-    this.tail = 0
-    this.free = new Stack(max)
-    this.initialFill = 1
-    this.size = 0
-
-    if (typeof dispose === 'function') {
-      this.dispose = dispose
-    }
-    if (typeof disposeAfter === 'function') {
-      this.disposeAfter = disposeAfter
-      this.disposed = []
-    } else {
-      this.disposeAfter = null
-      this.disposed = null
-    }
-    this.noDisposeOnSet = !!noDisposeOnSet
-    this.noUpdateTTL = !!noUpdateTTL
-    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
-    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
-    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
-    this.ignoreFetchAbort = !!ignoreFetchAbort
-
-    // NB: maxEntrySize is set to maxSize if it's set
-    if (this.maxEntrySize !== 0) {
-      if (this.maxSize !== 0) {
-        if (!isPosInt(this.maxSize)) {
-          throw new TypeError(
-            'maxSize must be a positive integer if specified'
-          )
-        }
-      }
-      if (!isPosInt(this.maxEntrySize)) {
-        throw new TypeError(
-          'maxEntrySize must be a positive integer if specified'
-        )
-      }
-      this.initializeSizeTracking()
-    }
-
-    this.allowStale = !!allowStale || !!stale
-    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
-    this.updateAgeOnGet = !!updateAgeOnGet
-    this.updateAgeOnHas = !!updateAgeOnHas
-    this.ttlResolution =
-      isPosInt(ttlResolution) || ttlResolution === 0
-        ? ttlResolution
-        : 1
-    this.ttlAutopurge = !!ttlAutopurge
-    this.ttl = ttl || maxAge || 0
-    if (this.ttl) {
-      if (!isPosInt(this.ttl)) {
-        throw new TypeError(
-          'ttl must be a positive integer if specified'
-        )
-      }
-      this.initializeTTLTracking()
-    }
-
-    // do not allow completely unbounded caches
-    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
-      throw new TypeError(
-        'At least one of max, maxSize, or ttl is required'
-      )
-    }
-    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
-      const code = 'LRU_CACHE_UNBOUNDED'
-      if (shouldWarn(code)) {
-        warned.add(code)
-        const msg =
-          'TTL caching without ttlAutopurge, max, or maxSize can ' +
-          'result in unbounded memory consumption.'
-        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
-      }
-    }
-
-    if (stale) {
-      deprecatedOption('stale', 'allowStale')
-    }
-    if (maxAge) {
-      deprecatedOption('maxAge', 'ttl')
-    }
-    if (length) {
-      deprecatedOption('length', 'sizeCalculation')
-    }
-  }
-
-  getRemainingTTL(key) {
-    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
-  }
-
-  initializeTTLTracking() {
-    this.ttls = new ZeroArray(this.max)
-    this.starts = new ZeroArray(this.max)
-
-    this.setItemTTL = (index, ttl, start = perf.now()) => {
-      this.starts[index] = ttl !== 0 ? start : 0
-      this.ttls[index] = ttl
-      if (ttl !== 0 && this.ttlAutopurge) {
-        const t = setTimeout(() => {
-          if (this.isStale(index)) {
-            this.delete(this.keyList[index])
-          }
-        }, ttl + 1)
-        /* istanbul ignore else - unref() not supported on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-    }
-
-    this.updateItemAge = index => {
-      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
-    }
-
-    this.statusTTL = (status, index) => {
-      if (status) {
-        status.ttl = this.ttls[index]
-        status.start = this.starts[index]
-        status.now = cachedNow || getNow()
-        status.remainingTTL = status.now + status.ttl - status.start
-      }
-    }
-
-    // debounce calls to perf.now() to 1s so we're not hitting
-    // that costly call repeatedly.
-    let cachedNow = 0
-    const getNow = () => {
-      const n = perf.now()
-      if (this.ttlResolution > 0) {
-        cachedNow = n
-        const t = setTimeout(
-          () => (cachedNow = 0),
-          this.ttlResolution
-        )
-        /* istanbul ignore else - not available on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-      return n
-    }
-
-    this.getRemainingTTL = key => {
-      const index = this.keyMap.get(key)
-      if (index === undefined) {
-        return 0
-      }
-      return this.ttls[index] === 0 || this.starts[index] === 0
-        ? Infinity
-        : this.starts[index] +
-            this.ttls[index] -
-            (cachedNow || getNow())
-    }
-
-    this.isStale = index => {
-      return (
-        this.ttls[index] !== 0 &&
-        this.starts[index] !== 0 &&
-        (cachedNow || getNow()) - this.starts[index] >
-          this.ttls[index]
-      )
-    }
-  }
-  updateItemAge(_index) {}
-  statusTTL(_status, _index) {}
-  setItemTTL(_index, _ttl, _start) {}
-  isStale(_index) {
-    return false
-  }
-
-  initializeSizeTracking() {
-    this.calculatedSize = 0
-    this.sizes = new ZeroArray(this.max)
-    this.removeItemSize = index => {
-      this.calculatedSize -= this.sizes[index]
-      this.sizes[index] = 0
-    }
-    this.requireSize = (k, v, size, sizeCalculation) => {
-      // provisionally accept background fetches.
-      // actual value size will be checked when they return.
-      if (this.isBackgroundFetch(v)) {
-        return 0
-      }
-      if (!isPosInt(size)) {
-        if (sizeCalculation) {
-          if (typeof sizeCalculation !== 'function') {
-            throw new TypeError('sizeCalculation must be a function')
-          }
-          size = sizeCalculation(v, k)
-          if (!isPosInt(size)) {
-            throw new TypeError(
-              'sizeCalculation return invalid (expect positive integer)'
-            )
-          }
-        } else {
-          throw new TypeError(
-            'invalid size value (must be positive integer). ' +
-              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
-              'must be set.'
-          )
-        }
-      }
-      return size
-    }
-    this.addItemSize = (index, size, status) => {
-      this.sizes[index] = size
-      if (this.maxSize) {
-        const maxSize = this.maxSize - this.sizes[index]
-        while (this.calculatedSize > maxSize) {
-          this.evict(true)
-        }
-      }
-      this.calculatedSize += this.sizes[index]
-      if (status) {
-        status.entrySize = size
-        status.totalCalculatedSize = this.calculatedSize
-      }
-    }
-  }
-  removeItemSize(_index) {}
-  addItemSize(_index, _size) {}
-  requireSize(_k, _v, size, sizeCalculation) {
-    if (size || sizeCalculation) {
-      throw new TypeError(
-        'cannot set size without setting maxSize or maxEntrySize on cache'
-      )
-    }
-  }
-
-  *indexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.tail; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.head) {
-          break
-        } else {
-          i = this.prev[i]
-        }
-      }
-    }
-  }
-
-  *rindexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.head; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.tail) {
-          break
-        } else {
-          i = this.next[i]
-        }
-      }
-    }
-  }
-
-  isValidIndex(index) {
-    return (
-      index !== undefined &&
-      this.keyMap.get(this.keyList[index]) === index
-    )
-  }
-
-  *entries() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-  *rentries() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-
-  *keys() {
-    for (const i of this.indexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-  *rkeys() {
-    for (const i of this.rindexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-
-  *values() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-  *rvalues() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-
-  [Symbol.iterator]() {
-    return this.entries()
-  }
-
-  find(fn, getOptions) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      if (fn(value, this.keyList[i], this)) {
-        return this.get(this.keyList[i], getOptions)
-      }
-    }
-  }
-
-  forEach(fn, thisp = this) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  rforEach(fn, thisp = this) {
-    for (const i of this.rindexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  get prune() {
-    deprecatedMethod('prune', 'purgeStale')
-    return this.purgeStale
-  }
-
-  purgeStale() {
-    let deleted = false
-    for (const i of this.rindexes({ allowStale: true })) {
-      if (this.isStale(i)) {
-        this.delete(this.keyList[i])
-        deleted = true
-      }
-    }
-    return deleted
-  }
-
-  dump() {
-    const arr = []
-    for (const i of this.indexes({ allowStale: true })) {
-      const key = this.keyList[i]
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      const entry = { value }
-      if (this.ttls) {
-        entry.ttl = this.ttls[i]
-        // always dump the start relative to a portable timestamp
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = perf.now() - this.starts[i]
-        entry.start = Math.floor(Date.now() - age)
-      }
-      if (this.sizes) {
-        entry.size = this.sizes[i]
-      }
-      arr.unshift([key, entry])
-    }
-    return arr
-  }
-
-  load(arr) {
-    this.clear()
-    for (const [key, entry] of arr) {
-      if (entry.start) {
-        // entry.start is a portable timestamp, but we may be using
-        // node's performance.now(), so calculate the offset.
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = Date.now() - entry.start
-        entry.start = perf.now() - age
-      }
-      this.set(key, entry.value, entry)
-    }
-  }
-
-  dispose(_v, _k, _reason) {}
-
-  set(
-    k,
-    v,
-    {
-      ttl = this.ttl,
-      start,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      status,
-    } = {}
-  ) {
-    size = this.requireSize(k, v, size, sizeCalculation)
-    // if the item doesn't fit, don't do anything
-    // NB: maxEntrySize set to maxSize by default
-    if (this.maxEntrySize && size > this.maxEntrySize) {
-      if (status) {
-        status.set = 'miss'
-        status.maxEntrySizeExceeded = true
-      }
-      // have to delete, in case a background fetch is there already.
-      // in non-async cases, this is a no-op
-      this.delete(k)
-      return this
-    }
-    let index = this.size === 0 ? undefined : this.keyMap.get(k)
-    if (index === undefined) {
-      // addition
-      index = this.newIndex()
-      this.keyList[index] = k
-      this.valList[index] = v
-      this.keyMap.set(k, index)
-      this.next[this.tail] = index
-      this.prev[index] = this.tail
-      this.tail = index
-      this.size++
-      this.addItemSize(index, size, status)
-      if (status) {
-        status.set = 'add'
-      }
-      noUpdateTTL = false
-    } else {
-      // update
-      this.moveToTail(index)
-      const oldVal = this.valList[index]
-      if (v !== oldVal) {
-        if (this.isBackgroundFetch(oldVal)) {
-          oldVal.__abortController.abort(new Error('replaced'))
-        } else {
-          if (!noDisposeOnSet) {
-            this.dispose(oldVal, k, 'set')
-            if (this.disposeAfter) {
-              this.disposed.push([oldVal, k, 'set'])
-            }
-          }
-        }
-        this.removeItemSize(index)
-        this.valList[index] = v
-        this.addItemSize(index, size, status)
-        if (status) {
-          status.set = 'replace'
-          const oldValue =
-            oldVal && this.isBackgroundFetch(oldVal)
-              ? oldVal.__staleWhileFetching
-              : oldVal
-          if (oldValue !== undefined) status.oldValue = oldValue
-        }
-      } else if (status) {
-        status.set = 'update'
-      }
-    }
-    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
-      this.initializeTTLTracking()
-    }
-    if (!noUpdateTTL) {
-      this.setItemTTL(index, ttl, start)
-    }
-    this.statusTTL(status, index)
-    if (this.disposeAfter) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return this
-  }
-
-  newIndex() {
-    if (this.size === 0) {
-      return this.tail
-    }
-    if (this.size === this.max && this.max !== 0) {
-      return this.evict(false)
-    }
-    if (this.free.length !== 0) {
-      return this.free.pop()
-    }
-    // initial fill, just keep writing down the list
-    return this.initialFill++
-  }
-
-  pop() {
-    if (this.size) {
-      const val = this.valList[this.head]
-      this.evict(true)
-      return val
-    }
-  }
-
-  evict(free) {
-    const head = this.head
-    const k = this.keyList[head]
-    const v = this.valList[head]
-    if (this.isBackgroundFetch(v)) {
-      v.__abortController.abort(new Error('evicted'))
-    } else {
-      this.dispose(v, k, 'evict')
-      if (this.disposeAfter) {
-        this.disposed.push([v, k, 'evict'])
-      }
-    }
-    this.removeItemSize(head)
-    // if we aren't about to use the index, then null these out
-    if (free) {
-      this.keyList[head] = null
-      this.valList[head] = null
-      this.free.push(head)
-    }
-    this.head = this.next[head]
-    this.keyMap.delete(k)
-    this.size--
-    return head
-  }
-
-  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      if (!this.isStale(index)) {
-        if (updateAgeOnHas) {
-          this.updateItemAge(index)
-        }
-        if (status) status.has = 'hit'
-        this.statusTTL(status, index)
-        return true
-      } else if (status) {
-        status.has = 'stale'
-        this.statusTTL(status, index)
-      }
-    } else if (status) {
-      status.has = 'miss'
-    }
-    return false
-  }
-
-  // like get(), but without any LRU updating or TTL expiration
-  peek(k, { allowStale = this.allowStale } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined && (allowStale || !this.isStale(index))) {
-      const v = this.valList[index]
-      // either stale and allowed, or forcing a refresh of non-stale value
-      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
-    }
-  }
-
-  backgroundFetch(k, index, options, context) {
-    const v = index === undefined ? undefined : this.valList[index]
-    if (this.isBackgroundFetch(v)) {
-      return v
-    }
-    const ac = new AC()
-    if (options.signal) {
-      options.signal.addEventListener('abort', () =>
-        ac.abort(options.signal.reason)
-      )
-    }
-    const fetchOpts = {
-      signal: ac.signal,
-      options,
-      context,
-    }
-    const cb = (v, updateCache = false) => {
-      const { aborted } = ac.signal
-      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
-      if (options.status) {
-        if (aborted && !updateCache) {
-          options.status.fetchAborted = true
-          options.status.fetchError = ac.signal.reason
-          if (ignoreAbort) options.status.fetchAbortIgnored = true
-        } else {
-          options.status.fetchResolved = true
-        }
-      }
-      if (aborted && !ignoreAbort && !updateCache) {
-        return fetchFail(ac.signal.reason)
-      }
-      // either we didn't abort, and are still here, or we did, and ignored
-      if (this.valList[index] === p) {
-        if (v === undefined) {
-          if (p.__staleWhileFetching) {
-            this.valList[index] = p.__staleWhileFetching
-          } else {
-            this.delete(k)
-          }
-        } else {
-          if (options.status) options.status.fetchUpdated = true
-          this.set(k, v, fetchOpts.options)
-        }
-      }
-      return v
-    }
-    const eb = er => {
-      if (options.status) {
-        options.status.fetchRejected = true
-        options.status.fetchError = er
-      }
-      return fetchFail(er)
-    }
-    const fetchFail = er => {
-      const { aborted } = ac.signal
-      const allowStaleAborted =
-        aborted && options.allowStaleOnFetchAbort
-      const allowStale =
-        allowStaleAborted || options.allowStaleOnFetchRejection
-      const noDelete = allowStale || options.noDeleteOnFetchRejection
-      if (this.valList[index] === p) {
-        // if we allow stale on fetch rejections, then we need to ensure that
-        // the stale value is not removed from the cache when the fetch fails.
-        const del = !noDelete || p.__staleWhileFetching === undefined
-        if (del) {
-          this.delete(k)
-        } else if (!allowStaleAborted) {
-          // still replace the *promise* with the stale value,
-          // since we are done with the promise at this point.
-          // leave it untouched if we're still waiting for an
-          // aborted background fetch that hasn't yet returned.
-          this.valList[index] = p.__staleWhileFetching
-        }
-      }
-      if (allowStale) {
-        if (options.status && p.__staleWhileFetching !== undefined) {
-          options.status.returnedStale = true
-        }
-        return p.__staleWhileFetching
-      } else if (p.__returned === p) {
-        throw er
-      }
-    }
-    const pcall = (res, rej) => {
-      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
-      // ignored, we go until we finish, regardless.
-      // defer check until we are actually aborting,
-      // so fetchMethod can override.
-      ac.signal.addEventListener('abort', () => {
-        if (
-          !options.ignoreFetchAbort ||
-          options.allowStaleOnFetchAbort
-        ) {
-          res()
-          // when it eventually resolves, update the cache.
-          if (options.allowStaleOnFetchAbort) {
-            res = v => cb(v, true)
-          }
-        }
-      })
-    }
-    if (options.status) options.status.fetchDispatched = true
-    const p = new Promise(pcall).then(cb, eb)
-    p.__abortController = ac
-    p.__staleWhileFetching = v
-    p.__returned = null
-    if (index === undefined) {
-      // internal, don't expose status.
-      this.set(k, p, { ...fetchOpts.options, status: undefined })
-      index = this.keyMap.get(k)
-    } else {
-      this.valList[index] = p
-    }
-    return p
-  }
-
-  isBackgroundFetch(p) {
-    return (
-      p &&
-      typeof p === 'object' &&
-      typeof p.then === 'function' &&
-      Object.prototype.hasOwnProperty.call(
-        p,
-        '__staleWhileFetching'
-      ) &&
-      Object.prototype.hasOwnProperty.call(p, '__returned') &&
-      (p.__returned === p || p.__returned === null)
-    )
-  }
-
-  // this takes the union of get() and set() opts, because it does both
-  async fetch(
-    k,
-    {
-      // get options
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      // set options
-      ttl = this.ttl,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      // fetch exclusive options
-      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
-      ignoreFetchAbort = this.ignoreFetchAbort,
-      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
-      fetchContext = this.fetchContext,
-      forceRefresh = false,
-      status,
-      signal,
-    } = {}
-  ) {
-    if (!this.fetchMethod) {
-      if (status) status.fetch = 'get'
-      return this.get(k, {
-        allowStale,
-        updateAgeOnGet,
-        noDeleteOnStaleGet,
-        status,
-      })
-    }
-
-    const options = {
-      allowStale,
-      updateAgeOnGet,
-      noDeleteOnStaleGet,
-      ttl,
-      noDisposeOnSet,
-      size,
-      sizeCalculation,
-      noUpdateTTL,
-      noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-      status,
-      signal,
-    }
-
-    let index = this.keyMap.get(k)
-    if (index === undefined) {
-      if (status) status.fetch = 'miss'
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      return (p.__returned = p)
-    } else {
-      // in cache, maybe already fetching
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        const stale =
-          allowStale && v.__staleWhileFetching !== undefined
-        if (status) {
-          status.fetch = 'inflight'
-          if (stale) status.returnedStale = true
-        }
-        return stale ? v.__staleWhileFetching : (v.__returned = v)
-      }
-
-      // if we force a refresh, that means do NOT serve the cached value,
-      // unless we are already in the process of refreshing the cache.
-      const isStale = this.isStale(index)
-      if (!forceRefresh && !isStale) {
-        if (status) status.fetch = 'hit'
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        this.statusTTL(status, index)
-        return v
-      }
-
-      // ok, it is stale or a forced refresh, and not already fetching.
-      // refresh the cache.
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      const hasStale = p.__staleWhileFetching !== undefined
-      const staleVal = hasStale && allowStale
-      if (status) {
-        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
-        if (staleVal && isStale) status.returnedStale = true
-      }
-      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
-    }
-  }
-
-  get(
-    k,
-    {
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      status,
-    } = {}
-  ) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      const value = this.valList[index]
-      const fetching = this.isBackgroundFetch(value)
-      this.statusTTL(status, index)
-      if (this.isStale(index)) {
-        if (status) status.get = 'stale'
-        // delete only if not an in-flight background fetch
-        if (!fetching) {
-          if (!noDeleteOnStaleGet) {
-            this.delete(k)
-          }
-          if (status) status.returnedStale = allowStale
-          return allowStale ? value : undefined
-        } else {
-          if (status) {
-            status.returnedStale =
-              allowStale && value.__staleWhileFetching !== undefined
-          }
-          return allowStale ? value.__staleWhileFetching : undefined
-        }
-      } else {
-        if (status) status.get = 'hit'
-        // if we're currently fetching it, we don't actually have it yet
-        // it's not stale, which means this isn't a staleWhileRefetching.
-        // If it's not stale, and fetching, AND has a __staleWhileFetching
-        // value, then that means the user fetched with {forceRefresh:true},
-        // so it's safe to return that value.
-        if (fetching) {
-          return value.__staleWhileFetching
-        }
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        return value
-      }
-    } else if (status) {
-      status.get = 'miss'
-    }
-  }
-
-  connect(p, n) {
-    this.prev[n] = p
-    this.next[p] = n
-  }
-
-  moveToTail(index) {
-    // if tail already, nothing to do
-    // if head, move head to next[index]
-    // else
-    //   move next[prev[index]] to next[index] (head has no prev)
-    //   move prev[next[index]] to prev[index]
-    // prev[index] = tail
-    // next[tail] = index
-    // tail = index
-    if (index !== this.tail) {
-      if (index === this.head) {
-        this.head = this.next[index]
-      } else {
-        this.connect(this.prev[index], this.next[index])
-      }
-      this.connect(this.tail, index)
-      this.tail = index
-    }
-  }
-
-  get del() {
-    deprecatedMethod('del', 'delete')
-    return this.delete
-  }
-
-  delete(k) {
-    let deleted = false
-    if (this.size !== 0) {
-      const index = this.keyMap.get(k)
-      if (index !== undefined) {
-        deleted = true
-        if (this.size === 1) {
-          this.clear()
-        } else {
-          this.removeItemSize(index)
-          const v = this.valList[index]
-          if (this.isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('deleted'))
-          } else {
-            this.dispose(v, k, 'delete')
-            if (this.disposeAfter) {
-              this.disposed.push([v, k, 'delete'])
-            }
-          }
-          this.keyMap.delete(k)
-          this.keyList[index] = null
-          this.valList[index] = null
-          if (index === this.tail) {
-            this.tail = this.prev[index]
-          } else if (index === this.head) {
-            this.head = this.next[index]
-          } else {
-            this.next[this.prev[index]] = this.next[index]
-            this.prev[this.next[index]] = this.prev[index]
-          }
-          this.size--
-          this.free.push(index)
-        }
-      }
-    }
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return deleted
-  }
-
-  clear() {
-    for (const index of this.rindexes({ allowStale: true })) {
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        v.__abortController.abort(new Error('deleted'))
-      } else {
-        const k = this.keyList[index]
-        this.dispose(v, k, 'delete')
-        if (this.disposeAfter) {
-          this.disposed.push([v, k, 'delete'])
-        }
-      }
-    }
-
-    this.keyMap.clear()
-    this.valList.fill(null)
-    this.keyList.fill(null)
-    if (this.ttls) {
-      this.ttls.fill(0)
-      this.starts.fill(0)
-    }
-    if (this.sizes) {
-      this.sizes.fill(0)
-    }
-    this.head = 0
-    this.tail = 0
-    this.initialFill = 1
-    this.free.length = 0
-    this.calculatedSize = 0
-    this.size = 0
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-  }
-
-  get reset() {
-    deprecatedMethod('reset', 'clear')
-    return this.clear
-  }
-
-  get length() {
-    deprecatedProperty('length', 'size')
-    return this.size
-  }
-
-  static get AbortController() {
-    return AC
-  }
-  static get AbortSignal() {
-    return AS
-  }
-}
-
-module.exports = LRUCache
diff --git a/node_modules/init-package-json/node_modules/lru-cache/index.mjs b/node_modules/init-package-json/node_modules/lru-cache/index.mjs
deleted file mode 100644
index 4a0b4813ec515..0000000000000
--- a/node_modules/init-package-json/node_modules/lru-cache/index.mjs
+++ /dev/null
@@ -1,1227 +0,0 @@
-const perf =
-  typeof performance === 'object' &&
-  performance &&
-  typeof performance.now === 'function'
-    ? performance
-    : Date
-
-const hasAbortController = typeof AbortController === 'function'
-
-// minimal backwards-compatibility polyfill
-// this doesn't have nearly all the checks and whatnot that
-// actual AbortController/Signal has, but it's enough for
-// our purposes, and if used properly, behaves the same.
-const AC = hasAbortController
-  ? AbortController
-  : class AbortController {
-      constructor() {
-        this.signal = new AS()
-      }
-      abort(reason = new Error('This operation was aborted')) {
-        this.signal.reason = this.signal.reason || reason
-        this.signal.aborted = true
-        this.signal.dispatchEvent({
-          type: 'abort',
-          target: this.signal,
-        })
-      }
-    }
-
-const hasAbortSignal = typeof AbortSignal === 'function'
-// Some polyfills put this on the AC class, not global
-const hasACAbortSignal = typeof AC.AbortSignal === 'function'
-const AS = hasAbortSignal
-  ? AbortSignal
-  : hasACAbortSignal
-  ? AC.AbortController
-  : class AbortSignal {
-      constructor() {
-        this.reason = undefined
-        this.aborted = false
-        this._listeners = []
-      }
-      dispatchEvent(e) {
-        if (e.type === 'abort') {
-          this.aborted = true
-          this.onabort(e)
-          this._listeners.forEach(f => f(e), this)
-        }
-      }
-      onabort() {}
-      addEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners.push(fn)
-        }
-      }
-      removeEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners = this._listeners.filter(f => f !== fn)
-        }
-      }
-    }
-
-const warned = new Set()
-const deprecatedOption = (opt, instead) => {
-  const code = `LRU_CACHE_OPTION_${opt}`
-  if (shouldWarn(code)) {
-    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
-  }
-}
-const deprecatedMethod = (method, instead) => {
-  const code = `LRU_CACHE_METHOD_${method}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
-    warn(code, `${method} method`, `cache.${instead}()`, get)
-  }
-}
-const deprecatedProperty = (field, instead) => {
-  const code = `LRU_CACHE_PROPERTY_${field}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
-    warn(code, `${field} property`, `cache.${instead}`, get)
-  }
-}
-
-const emitWarning = (...a) => {
-  typeof process === 'object' &&
-  process &&
-  typeof process.emitWarning === 'function'
-    ? process.emitWarning(...a)
-    : console.error(...a)
-}
-
-const shouldWarn = code => !warned.has(code)
-
-const warn = (code, what, instead, fn) => {
-  warned.add(code)
-  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
-  emitWarning(msg, 'DeprecationWarning', code, fn)
-}
-
-const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
-
-/* istanbul ignore next - This is a little bit ridiculous, tbh.
- * The maximum array length is 2^32-1 or thereabouts on most JS impls.
- * And well before that point, you're caching the entire world, I mean,
- * that's ~32GB of just integers for the next/prev links, plus whatever
- * else to hold that many keys and values.  Just filling the memory with
- * zeroes at init time is brutal when you get that big.
- * But why not be complete?
- * Maybe in the future, these limits will have expanded. */
-const getUintArray = max =>
-  !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-    ? Uint8Array
-    : max <= Math.pow(2, 16)
-    ? Uint16Array
-    : max <= Math.pow(2, 32)
-    ? Uint32Array
-    : max <= Number.MAX_SAFE_INTEGER
-    ? ZeroArray
-    : null
-
-class ZeroArray extends Array {
-  constructor(size) {
-    super(size)
-    this.fill(0)
-  }
-}
-
-class Stack {
-  constructor(max) {
-    if (max === 0) {
-      return []
-    }
-    const UintArray = getUintArray(max)
-    this.heap = new UintArray(max)
-    this.length = 0
-  }
-  push(n) {
-    this.heap[this.length++] = n
-  }
-  pop() {
-    return this.heap[--this.length]
-  }
-}
-
-class LRUCache {
-  constructor(options = {}) {
-    const {
-      max = 0,
-      ttl,
-      ttlResolution = 1,
-      ttlAutopurge,
-      updateAgeOnGet,
-      updateAgeOnHas,
-      allowStale,
-      dispose,
-      disposeAfter,
-      noDisposeOnSet,
-      noUpdateTTL,
-      maxSize = 0,
-      maxEntrySize = 0,
-      sizeCalculation,
-      fetchMethod,
-      fetchContext,
-      noDeleteOnFetchRejection,
-      noDeleteOnStaleGet,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-    } = options
-
-    // deprecated options, don't trigger a warning for getting them if
-    // the thing being passed in is another LRUCache we're copying.
-    const { length, maxAge, stale } =
-      options instanceof LRUCache ? {} : options
-
-    if (max !== 0 && !isPosInt(max)) {
-      throw new TypeError('max option must be a nonnegative integer')
-    }
-
-    const UintArray = max ? getUintArray(max) : Array
-    if (!UintArray) {
-      throw new Error('invalid max value: ' + max)
-    }
-
-    this.max = max
-    this.maxSize = maxSize
-    this.maxEntrySize = maxEntrySize || this.maxSize
-    this.sizeCalculation = sizeCalculation || length
-    if (this.sizeCalculation) {
-      if (!this.maxSize && !this.maxEntrySize) {
-        throw new TypeError(
-          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
-        )
-      }
-      if (typeof this.sizeCalculation !== 'function') {
-        throw new TypeError('sizeCalculation set to non-function')
-      }
-    }
-
-    this.fetchMethod = fetchMethod || null
-    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
-      throw new TypeError(
-        'fetchMethod must be a function if specified'
-      )
-    }
-
-    this.fetchContext = fetchContext
-    if (!this.fetchMethod && fetchContext !== undefined) {
-      throw new TypeError(
-        'cannot set fetchContext without fetchMethod'
-      )
-    }
-
-    this.keyMap = new Map()
-    this.keyList = new Array(max).fill(null)
-    this.valList = new Array(max).fill(null)
-    this.next = new UintArray(max)
-    this.prev = new UintArray(max)
-    this.head = 0
-    this.tail = 0
-    this.free = new Stack(max)
-    this.initialFill = 1
-    this.size = 0
-
-    if (typeof dispose === 'function') {
-      this.dispose = dispose
-    }
-    if (typeof disposeAfter === 'function') {
-      this.disposeAfter = disposeAfter
-      this.disposed = []
-    } else {
-      this.disposeAfter = null
-      this.disposed = null
-    }
-    this.noDisposeOnSet = !!noDisposeOnSet
-    this.noUpdateTTL = !!noUpdateTTL
-    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
-    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
-    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
-    this.ignoreFetchAbort = !!ignoreFetchAbort
-
-    // NB: maxEntrySize is set to maxSize if it's set
-    if (this.maxEntrySize !== 0) {
-      if (this.maxSize !== 0) {
-        if (!isPosInt(this.maxSize)) {
-          throw new TypeError(
-            'maxSize must be a positive integer if specified'
-          )
-        }
-      }
-      if (!isPosInt(this.maxEntrySize)) {
-        throw new TypeError(
-          'maxEntrySize must be a positive integer if specified'
-        )
-      }
-      this.initializeSizeTracking()
-    }
-
-    this.allowStale = !!allowStale || !!stale
-    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
-    this.updateAgeOnGet = !!updateAgeOnGet
-    this.updateAgeOnHas = !!updateAgeOnHas
-    this.ttlResolution =
-      isPosInt(ttlResolution) || ttlResolution === 0
-        ? ttlResolution
-        : 1
-    this.ttlAutopurge = !!ttlAutopurge
-    this.ttl = ttl || maxAge || 0
-    if (this.ttl) {
-      if (!isPosInt(this.ttl)) {
-        throw new TypeError(
-          'ttl must be a positive integer if specified'
-        )
-      }
-      this.initializeTTLTracking()
-    }
-
-    // do not allow completely unbounded caches
-    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
-      throw new TypeError(
-        'At least one of max, maxSize, or ttl is required'
-      )
-    }
-    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
-      const code = 'LRU_CACHE_UNBOUNDED'
-      if (shouldWarn(code)) {
-        warned.add(code)
-        const msg =
-          'TTL caching without ttlAutopurge, max, or maxSize can ' +
-          'result in unbounded memory consumption.'
-        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
-      }
-    }
-
-    if (stale) {
-      deprecatedOption('stale', 'allowStale')
-    }
-    if (maxAge) {
-      deprecatedOption('maxAge', 'ttl')
-    }
-    if (length) {
-      deprecatedOption('length', 'sizeCalculation')
-    }
-  }
-
-  getRemainingTTL(key) {
-    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
-  }
-
-  initializeTTLTracking() {
-    this.ttls = new ZeroArray(this.max)
-    this.starts = new ZeroArray(this.max)
-
-    this.setItemTTL = (index, ttl, start = perf.now()) => {
-      this.starts[index] = ttl !== 0 ? start : 0
-      this.ttls[index] = ttl
-      if (ttl !== 0 && this.ttlAutopurge) {
-        const t = setTimeout(() => {
-          if (this.isStale(index)) {
-            this.delete(this.keyList[index])
-          }
-        }, ttl + 1)
-        /* istanbul ignore else - unref() not supported on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-    }
-
-    this.updateItemAge = index => {
-      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
-    }
-
-    this.statusTTL = (status, index) => {
-      if (status) {
-        status.ttl = this.ttls[index]
-        status.start = this.starts[index]
-        status.now = cachedNow || getNow()
-        status.remainingTTL = status.now + status.ttl - status.start
-      }
-    }
-
-    // debounce calls to perf.now() to 1s so we're not hitting
-    // that costly call repeatedly.
-    let cachedNow = 0
-    const getNow = () => {
-      const n = perf.now()
-      if (this.ttlResolution > 0) {
-        cachedNow = n
-        const t = setTimeout(
-          () => (cachedNow = 0),
-          this.ttlResolution
-        )
-        /* istanbul ignore else - not available on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-      return n
-    }
-
-    this.getRemainingTTL = key => {
-      const index = this.keyMap.get(key)
-      if (index === undefined) {
-        return 0
-      }
-      return this.ttls[index] === 0 || this.starts[index] === 0
-        ? Infinity
-        : this.starts[index] +
-            this.ttls[index] -
-            (cachedNow || getNow())
-    }
-
-    this.isStale = index => {
-      return (
-        this.ttls[index] !== 0 &&
-        this.starts[index] !== 0 &&
-        (cachedNow || getNow()) - this.starts[index] >
-          this.ttls[index]
-      )
-    }
-  }
-  updateItemAge(_index) {}
-  statusTTL(_status, _index) {}
-  setItemTTL(_index, _ttl, _start) {}
-  isStale(_index) {
-    return false
-  }
-
-  initializeSizeTracking() {
-    this.calculatedSize = 0
-    this.sizes = new ZeroArray(this.max)
-    this.removeItemSize = index => {
-      this.calculatedSize -= this.sizes[index]
-      this.sizes[index] = 0
-    }
-    this.requireSize = (k, v, size, sizeCalculation) => {
-      // provisionally accept background fetches.
-      // actual value size will be checked when they return.
-      if (this.isBackgroundFetch(v)) {
-        return 0
-      }
-      if (!isPosInt(size)) {
-        if (sizeCalculation) {
-          if (typeof sizeCalculation !== 'function') {
-            throw new TypeError('sizeCalculation must be a function')
-          }
-          size = sizeCalculation(v, k)
-          if (!isPosInt(size)) {
-            throw new TypeError(
-              'sizeCalculation return invalid (expect positive integer)'
-            )
-          }
-        } else {
-          throw new TypeError(
-            'invalid size value (must be positive integer). ' +
-              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
-              'must be set.'
-          )
-        }
-      }
-      return size
-    }
-    this.addItemSize = (index, size, status) => {
-      this.sizes[index] = size
-      if (this.maxSize) {
-        const maxSize = this.maxSize - this.sizes[index]
-        while (this.calculatedSize > maxSize) {
-          this.evict(true)
-        }
-      }
-      this.calculatedSize += this.sizes[index]
-      if (status) {
-        status.entrySize = size
-        status.totalCalculatedSize = this.calculatedSize
-      }
-    }
-  }
-  removeItemSize(_index) {}
-  addItemSize(_index, _size) {}
-  requireSize(_k, _v, size, sizeCalculation) {
-    if (size || sizeCalculation) {
-      throw new TypeError(
-        'cannot set size without setting maxSize or maxEntrySize on cache'
-      )
-    }
-  }
-
-  *indexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.tail; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.head) {
-          break
-        } else {
-          i = this.prev[i]
-        }
-      }
-    }
-  }
-
-  *rindexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.head; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.tail) {
-          break
-        } else {
-          i = this.next[i]
-        }
-      }
-    }
-  }
-
-  isValidIndex(index) {
-    return (
-      index !== undefined &&
-      this.keyMap.get(this.keyList[index]) === index
-    )
-  }
-
-  *entries() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-  *rentries() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-
-  *keys() {
-    for (const i of this.indexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-  *rkeys() {
-    for (const i of this.rindexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-
-  *values() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-  *rvalues() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-
-  [Symbol.iterator]() {
-    return this.entries()
-  }
-
-  find(fn, getOptions) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      if (fn(value, this.keyList[i], this)) {
-        return this.get(this.keyList[i], getOptions)
-      }
-    }
-  }
-
-  forEach(fn, thisp = this) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  rforEach(fn, thisp = this) {
-    for (const i of this.rindexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  get prune() {
-    deprecatedMethod('prune', 'purgeStale')
-    return this.purgeStale
-  }
-
-  purgeStale() {
-    let deleted = false
-    for (const i of this.rindexes({ allowStale: true })) {
-      if (this.isStale(i)) {
-        this.delete(this.keyList[i])
-        deleted = true
-      }
-    }
-    return deleted
-  }
-
-  dump() {
-    const arr = []
-    for (const i of this.indexes({ allowStale: true })) {
-      const key = this.keyList[i]
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      const entry = { value }
-      if (this.ttls) {
-        entry.ttl = this.ttls[i]
-        // always dump the start relative to a portable timestamp
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = perf.now() - this.starts[i]
-        entry.start = Math.floor(Date.now() - age)
-      }
-      if (this.sizes) {
-        entry.size = this.sizes[i]
-      }
-      arr.unshift([key, entry])
-    }
-    return arr
-  }
-
-  load(arr) {
-    this.clear()
-    for (const [key, entry] of arr) {
-      if (entry.start) {
-        // entry.start is a portable timestamp, but we may be using
-        // node's performance.now(), so calculate the offset.
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = Date.now() - entry.start
-        entry.start = perf.now() - age
-      }
-      this.set(key, entry.value, entry)
-    }
-  }
-
-  dispose(_v, _k, _reason) {}
-
-  set(
-    k,
-    v,
-    {
-      ttl = this.ttl,
-      start,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      status,
-    } = {}
-  ) {
-    size = this.requireSize(k, v, size, sizeCalculation)
-    // if the item doesn't fit, don't do anything
-    // NB: maxEntrySize set to maxSize by default
-    if (this.maxEntrySize && size > this.maxEntrySize) {
-      if (status) {
-        status.set = 'miss'
-        status.maxEntrySizeExceeded = true
-      }
-      // have to delete, in case a background fetch is there already.
-      // in non-async cases, this is a no-op
-      this.delete(k)
-      return this
-    }
-    let index = this.size === 0 ? undefined : this.keyMap.get(k)
-    if (index === undefined) {
-      // addition
-      index = this.newIndex()
-      this.keyList[index] = k
-      this.valList[index] = v
-      this.keyMap.set(k, index)
-      this.next[this.tail] = index
-      this.prev[index] = this.tail
-      this.tail = index
-      this.size++
-      this.addItemSize(index, size, status)
-      if (status) {
-        status.set = 'add'
-      }
-      noUpdateTTL = false
-    } else {
-      // update
-      this.moveToTail(index)
-      const oldVal = this.valList[index]
-      if (v !== oldVal) {
-        if (this.isBackgroundFetch(oldVal)) {
-          oldVal.__abortController.abort(new Error('replaced'))
-        } else {
-          if (!noDisposeOnSet) {
-            this.dispose(oldVal, k, 'set')
-            if (this.disposeAfter) {
-              this.disposed.push([oldVal, k, 'set'])
-            }
-          }
-        }
-        this.removeItemSize(index)
-        this.valList[index] = v
-        this.addItemSize(index, size, status)
-        if (status) {
-          status.set = 'replace'
-          const oldValue =
-            oldVal && this.isBackgroundFetch(oldVal)
-              ? oldVal.__staleWhileFetching
-              : oldVal
-          if (oldValue !== undefined) status.oldValue = oldValue
-        }
-      } else if (status) {
-        status.set = 'update'
-      }
-    }
-    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
-      this.initializeTTLTracking()
-    }
-    if (!noUpdateTTL) {
-      this.setItemTTL(index, ttl, start)
-    }
-    this.statusTTL(status, index)
-    if (this.disposeAfter) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return this
-  }
-
-  newIndex() {
-    if (this.size === 0) {
-      return this.tail
-    }
-    if (this.size === this.max && this.max !== 0) {
-      return this.evict(false)
-    }
-    if (this.free.length !== 0) {
-      return this.free.pop()
-    }
-    // initial fill, just keep writing down the list
-    return this.initialFill++
-  }
-
-  pop() {
-    if (this.size) {
-      const val = this.valList[this.head]
-      this.evict(true)
-      return val
-    }
-  }
-
-  evict(free) {
-    const head = this.head
-    const k = this.keyList[head]
-    const v = this.valList[head]
-    if (this.isBackgroundFetch(v)) {
-      v.__abortController.abort(new Error('evicted'))
-    } else {
-      this.dispose(v, k, 'evict')
-      if (this.disposeAfter) {
-        this.disposed.push([v, k, 'evict'])
-      }
-    }
-    this.removeItemSize(head)
-    // if we aren't about to use the index, then null these out
-    if (free) {
-      this.keyList[head] = null
-      this.valList[head] = null
-      this.free.push(head)
-    }
-    this.head = this.next[head]
-    this.keyMap.delete(k)
-    this.size--
-    return head
-  }
-
-  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      if (!this.isStale(index)) {
-        if (updateAgeOnHas) {
-          this.updateItemAge(index)
-        }
-        if (status) status.has = 'hit'
-        this.statusTTL(status, index)
-        return true
-      } else if (status) {
-        status.has = 'stale'
-        this.statusTTL(status, index)
-      }
-    } else if (status) {
-      status.has = 'miss'
-    }
-    return false
-  }
-
-  // like get(), but without any LRU updating or TTL expiration
-  peek(k, { allowStale = this.allowStale } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined && (allowStale || !this.isStale(index))) {
-      const v = this.valList[index]
-      // either stale and allowed, or forcing a refresh of non-stale value
-      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
-    }
-  }
-
-  backgroundFetch(k, index, options, context) {
-    const v = index === undefined ? undefined : this.valList[index]
-    if (this.isBackgroundFetch(v)) {
-      return v
-    }
-    const ac = new AC()
-    if (options.signal) {
-      options.signal.addEventListener('abort', () =>
-        ac.abort(options.signal.reason)
-      )
-    }
-    const fetchOpts = {
-      signal: ac.signal,
-      options,
-      context,
-    }
-    const cb = (v, updateCache = false) => {
-      const { aborted } = ac.signal
-      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
-      if (options.status) {
-        if (aborted && !updateCache) {
-          options.status.fetchAborted = true
-          options.status.fetchError = ac.signal.reason
-          if (ignoreAbort) options.status.fetchAbortIgnored = true
-        } else {
-          options.status.fetchResolved = true
-        }
-      }
-      if (aborted && !ignoreAbort && !updateCache) {
-        return fetchFail(ac.signal.reason)
-      }
-      // either we didn't abort, and are still here, or we did, and ignored
-      if (this.valList[index] === p) {
-        if (v === undefined) {
-          if (p.__staleWhileFetching) {
-            this.valList[index] = p.__staleWhileFetching
-          } else {
-            this.delete(k)
-          }
-        } else {
-          if (options.status) options.status.fetchUpdated = true
-          this.set(k, v, fetchOpts.options)
-        }
-      }
-      return v
-    }
-    const eb = er => {
-      if (options.status) {
-        options.status.fetchRejected = true
-        options.status.fetchError = er
-      }
-      return fetchFail(er)
-    }
-    const fetchFail = er => {
-      const { aborted } = ac.signal
-      const allowStaleAborted =
-        aborted && options.allowStaleOnFetchAbort
-      const allowStale =
-        allowStaleAborted || options.allowStaleOnFetchRejection
-      const noDelete = allowStale || options.noDeleteOnFetchRejection
-      if (this.valList[index] === p) {
-        // if we allow stale on fetch rejections, then we need to ensure that
-        // the stale value is not removed from the cache when the fetch fails.
-        const del = !noDelete || p.__staleWhileFetching === undefined
-        if (del) {
-          this.delete(k)
-        } else if (!allowStaleAborted) {
-          // still replace the *promise* with the stale value,
-          // since we are done with the promise at this point.
-          // leave it untouched if we're still waiting for an
-          // aborted background fetch that hasn't yet returned.
-          this.valList[index] = p.__staleWhileFetching
-        }
-      }
-      if (allowStale) {
-        if (options.status && p.__staleWhileFetching !== undefined) {
-          options.status.returnedStale = true
-        }
-        return p.__staleWhileFetching
-      } else if (p.__returned === p) {
-        throw er
-      }
-    }
-    const pcall = (res, rej) => {
-      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
-      // ignored, we go until we finish, regardless.
-      // defer check until we are actually aborting,
-      // so fetchMethod can override.
-      ac.signal.addEventListener('abort', () => {
-        if (
-          !options.ignoreFetchAbort ||
-          options.allowStaleOnFetchAbort
-        ) {
-          res()
-          // when it eventually resolves, update the cache.
-          if (options.allowStaleOnFetchAbort) {
-            res = v => cb(v, true)
-          }
-        }
-      })
-    }
-    if (options.status) options.status.fetchDispatched = true
-    const p = new Promise(pcall).then(cb, eb)
-    p.__abortController = ac
-    p.__staleWhileFetching = v
-    p.__returned = null
-    if (index === undefined) {
-      // internal, don't expose status.
-      this.set(k, p, { ...fetchOpts.options, status: undefined })
-      index = this.keyMap.get(k)
-    } else {
-      this.valList[index] = p
-    }
-    return p
-  }
-
-  isBackgroundFetch(p) {
-    return (
-      p &&
-      typeof p === 'object' &&
-      typeof p.then === 'function' &&
-      Object.prototype.hasOwnProperty.call(
-        p,
-        '__staleWhileFetching'
-      ) &&
-      Object.prototype.hasOwnProperty.call(p, '__returned') &&
-      (p.__returned === p || p.__returned === null)
-    )
-  }
-
-  // this takes the union of get() and set() opts, because it does both
-  async fetch(
-    k,
-    {
-      // get options
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      // set options
-      ttl = this.ttl,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      // fetch exclusive options
-      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
-      ignoreFetchAbort = this.ignoreFetchAbort,
-      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
-      fetchContext = this.fetchContext,
-      forceRefresh = false,
-      status,
-      signal,
-    } = {}
-  ) {
-    if (!this.fetchMethod) {
-      if (status) status.fetch = 'get'
-      return this.get(k, {
-        allowStale,
-        updateAgeOnGet,
-        noDeleteOnStaleGet,
-        status,
-      })
-    }
-
-    const options = {
-      allowStale,
-      updateAgeOnGet,
-      noDeleteOnStaleGet,
-      ttl,
-      noDisposeOnSet,
-      size,
-      sizeCalculation,
-      noUpdateTTL,
-      noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-      status,
-      signal,
-    }
-
-    let index = this.keyMap.get(k)
-    if (index === undefined) {
-      if (status) status.fetch = 'miss'
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      return (p.__returned = p)
-    } else {
-      // in cache, maybe already fetching
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        const stale =
-          allowStale && v.__staleWhileFetching !== undefined
-        if (status) {
-          status.fetch = 'inflight'
-          if (stale) status.returnedStale = true
-        }
-        return stale ? v.__staleWhileFetching : (v.__returned = v)
-      }
-
-      // if we force a refresh, that means do NOT serve the cached value,
-      // unless we are already in the process of refreshing the cache.
-      const isStale = this.isStale(index)
-      if (!forceRefresh && !isStale) {
-        if (status) status.fetch = 'hit'
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        this.statusTTL(status, index)
-        return v
-      }
-
-      // ok, it is stale or a forced refresh, and not already fetching.
-      // refresh the cache.
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      const hasStale = p.__staleWhileFetching !== undefined
-      const staleVal = hasStale && allowStale
-      if (status) {
-        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
-        if (staleVal && isStale) status.returnedStale = true
-      }
-      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
-    }
-  }
-
-  get(
-    k,
-    {
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      status,
-    } = {}
-  ) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      const value = this.valList[index]
-      const fetching = this.isBackgroundFetch(value)
-      this.statusTTL(status, index)
-      if (this.isStale(index)) {
-        if (status) status.get = 'stale'
-        // delete only if not an in-flight background fetch
-        if (!fetching) {
-          if (!noDeleteOnStaleGet) {
-            this.delete(k)
-          }
-          if (status) status.returnedStale = allowStale
-          return allowStale ? value : undefined
-        } else {
-          if (status) {
-            status.returnedStale =
-              allowStale && value.__staleWhileFetching !== undefined
-          }
-          return allowStale ? value.__staleWhileFetching : undefined
-        }
-      } else {
-        if (status) status.get = 'hit'
-        // if we're currently fetching it, we don't actually have it yet
-        // it's not stale, which means this isn't a staleWhileRefetching.
-        // If it's not stale, and fetching, AND has a __staleWhileFetching
-        // value, then that means the user fetched with {forceRefresh:true},
-        // so it's safe to return that value.
-        if (fetching) {
-          return value.__staleWhileFetching
-        }
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        return value
-      }
-    } else if (status) {
-      status.get = 'miss'
-    }
-  }
-
-  connect(p, n) {
-    this.prev[n] = p
-    this.next[p] = n
-  }
-
-  moveToTail(index) {
-    // if tail already, nothing to do
-    // if head, move head to next[index]
-    // else
-    //   move next[prev[index]] to next[index] (head has no prev)
-    //   move prev[next[index]] to prev[index]
-    // prev[index] = tail
-    // next[tail] = index
-    // tail = index
-    if (index !== this.tail) {
-      if (index === this.head) {
-        this.head = this.next[index]
-      } else {
-        this.connect(this.prev[index], this.next[index])
-      }
-      this.connect(this.tail, index)
-      this.tail = index
-    }
-  }
-
-  get del() {
-    deprecatedMethod('del', 'delete')
-    return this.delete
-  }
-
-  delete(k) {
-    let deleted = false
-    if (this.size !== 0) {
-      const index = this.keyMap.get(k)
-      if (index !== undefined) {
-        deleted = true
-        if (this.size === 1) {
-          this.clear()
-        } else {
-          this.removeItemSize(index)
-          const v = this.valList[index]
-          if (this.isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('deleted'))
-          } else {
-            this.dispose(v, k, 'delete')
-            if (this.disposeAfter) {
-              this.disposed.push([v, k, 'delete'])
-            }
-          }
-          this.keyMap.delete(k)
-          this.keyList[index] = null
-          this.valList[index] = null
-          if (index === this.tail) {
-            this.tail = this.prev[index]
-          } else if (index === this.head) {
-            this.head = this.next[index]
-          } else {
-            this.next[this.prev[index]] = this.next[index]
-            this.prev[this.next[index]] = this.prev[index]
-          }
-          this.size--
-          this.free.push(index)
-        }
-      }
-    }
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return deleted
-  }
-
-  clear() {
-    for (const index of this.rindexes({ allowStale: true })) {
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        v.__abortController.abort(new Error('deleted'))
-      } else {
-        const k = this.keyList[index]
-        this.dispose(v, k, 'delete')
-        if (this.disposeAfter) {
-          this.disposed.push([v, k, 'delete'])
-        }
-      }
-    }
-
-    this.keyMap.clear()
-    this.valList.fill(null)
-    this.keyList.fill(null)
-    if (this.ttls) {
-      this.ttls.fill(0)
-      this.starts.fill(0)
-    }
-    if (this.sizes) {
-      this.sizes.fill(0)
-    }
-    this.head = 0
-    this.tail = 0
-    this.initialFill = 1
-    this.free.length = 0
-    this.calculatedSize = 0
-    this.size = 0
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-  }
-
-  get reset() {
-    deprecatedMethod('reset', 'clear')
-    return this.clear
-  }
-
-  get length() {
-    deprecatedProperty('length', 'size')
-    return this.size
-  }
-
-  static get AbortController() {
-    return AC
-  }
-  static get AbortSignal() {
-    return AS
-  }
-}
-
-export default LRUCache
diff --git a/node_modules/init-package-json/node_modules/lru-cache/package.json b/node_modules/init-package-json/node_modules/lru-cache/package.json
deleted file mode 100644
index 9684991727e7a..0000000000000
--- a/node_modules/init-package-json/node_modules/lru-cache/package.json
+++ /dev/null
@@ -1,96 +0,0 @@
-{
-  "name": "lru-cache",
-  "description": "A cache object that deletes the least-recently-used items.",
-  "version": "7.18.3",
-  "author": "Isaac Z. Schlueter ",
-  "keywords": [
-    "mru",
-    "lru",
-    "cache"
-  ],
-  "sideEffects": false,
-  "scripts": {
-    "build": "npm run prepare",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "prepare": "node ./scripts/transpile-to-esm.js",
-    "size": "size-limit",
-    "test": "tap",
-    "snap": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write .",
-    "typedoc": "typedoc ./index.d.ts"
-  },
-  "type": "commonjs",
-  "main": "./index.js",
-  "module": "./index.mjs",
-  "types": "./index.d.ts",
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./index.d.ts",
-        "default": "./index.mjs"
-      },
-      "require": {
-        "types": "./index.d.ts",
-        "default": "./index.js"
-      }
-    },
-    "./package.json": "./package.json"
-  },
-  "repository": "git://github.com/isaacs/node-lru-cache.git",
-  "devDependencies": {
-    "@size-limit/preset-small-lib": "^7.0.8",
-    "@types/node": "^17.0.31",
-    "@types/tap": "^15.0.6",
-    "benchmark": "^2.1.4",
-    "c8": "^7.11.2",
-    "clock-mock": "^1.0.6",
-    "eslint-config-prettier": "^8.5.0",
-    "prettier": "^2.6.2",
-    "size-limit": "^7.0.8",
-    "tap": "^16.3.4",
-    "ts-node": "^10.7.0",
-    "tslib": "^2.4.0",
-    "typedoc": "^0.23.24",
-    "typescript": "^4.6.4"
-  },
-  "license": "ISC",
-  "files": [
-    "index.js",
-    "index.mjs",
-    "index.d.ts"
-  ],
-  "engines": {
-    "node": ">=12"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tap": {
-    "nyc-arg": [
-      "--include=index.js"
-    ],
-    "node-arg": [
-      "--expose-gc",
-      "--require",
-      "ts-node/register"
-    ],
-    "ts": false
-  },
-  "size-limit": [
-    {
-      "path": "./index.js"
-    }
-  ]
-}
diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/LICENSE b/node_modules/init-package-json/node_modules/normalize-package-data/LICENSE
new file mode 100644
index 0000000000000..19d1364a8ac08
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/normalize-package-data/LICENSE
@@ -0,0 +1,15 @@
+This package contains code originally written by Isaac Z. Schlueter.
+Used with permission.
+
+Copyright (c) Meryn Stol ("Author")
+All rights reserved.
+
+The BSD License
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/extract_description.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/extract_description.js
new file mode 100644
index 0000000000000..631966b5f29af
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/extract_description.js
@@ -0,0 +1,24 @@
+module.exports = extractDescription
+
+// Extracts description from contents of a readme file in markdown format
+function extractDescription (d) {
+  if (!d) {
+    return
+  }
+  if (d === 'ERROR: No README data found!') {
+    return
+  }
+  // the first block of text before the first heading
+  // that isn't the first line heading
+  d = d.trim().split('\n')
+  let s = 0
+  while (d[s] && d[s].trim().match(/^(#|$)/)) {
+    s++
+  }
+  const l = d.length
+  let e = s + 1
+  while (e < l && d[e].trim()) {
+    e++
+  }
+  return d.slice(s, e).join(' ').trim()
+}
diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/fixer.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/fixer.js
new file mode 100644
index 0000000000000..bb78231d83ca9
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/fixer.js
@@ -0,0 +1,475 @@
+var isValidSemver = require('semver/functions/valid')
+var cleanSemver = require('semver/functions/clean')
+var validateLicense = require('validate-npm-package-license')
+var hostedGitInfo = require('hosted-git-info')
+var isBuiltinModule = require('is-core-module')
+var depTypes = ['dependencies', 'devDependencies', 'optionalDependencies']
+var extractDescription = require('./extract_description')
+var url = require('url')
+var typos = require('./typos.json')
+
+var isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.'))
+
+module.exports = {
+  // default warning function
+  warn: function () {},
+
+  fixRepositoryField: function (data) {
+    if (data.repositories) {
+      this.warn('repositories')
+      data.repository = data.repositories[0]
+    }
+    if (!data.repository) {
+      return this.warn('missingRepository')
+    }
+    if (typeof data.repository === 'string') {
+      data.repository = {
+        type: 'git',
+        url: data.repository,
+      }
+    }
+    var r = data.repository.url || ''
+    if (r) {
+      var hosted = hostedGitInfo.fromUrl(r)
+      if (hosted) {
+        r = data.repository.url
+          = hosted.getDefaultRepresentation() === 'shortcut' ? hosted.https() : hosted.toString()
+      }
+    }
+
+    if (r.match(/github.com\/[^/]+\/[^/]+\.git\.git$/)) {
+      this.warn('brokenGitUrl', r)
+    }
+  },
+
+  fixTypos: function (data) {
+    Object.keys(typos.topLevel).forEach(function (d) {
+      if (Object.prototype.hasOwnProperty.call(data, d)) {
+        this.warn('typo', d, typos.topLevel[d])
+      }
+    }, this)
+  },
+
+  fixScriptsField: function (data) {
+    if (!data.scripts) {
+      return
+    }
+    if (typeof data.scripts !== 'object') {
+      this.warn('nonObjectScripts')
+      delete data.scripts
+      return
+    }
+    Object.keys(data.scripts).forEach(function (k) {
+      if (typeof data.scripts[k] !== 'string') {
+        this.warn('nonStringScript')
+        delete data.scripts[k]
+      } else if (typos.script[k] && !data.scripts[typos.script[k]]) {
+        this.warn('typo', k, typos.script[k], 'scripts')
+      }
+    }, this)
+  },
+
+  fixFilesField: function (data) {
+    var files = data.files
+    if (files && !Array.isArray(files)) {
+      this.warn('nonArrayFiles')
+      delete data.files
+    } else if (data.files) {
+      data.files = data.files.filter(function (file) {
+        if (!file || typeof file !== 'string') {
+          this.warn('invalidFilename', file)
+          return false
+        } else {
+          return true
+        }
+      }, this)
+    }
+  },
+
+  fixBinField: function (data) {
+    if (!data.bin) {
+      return
+    }
+    if (typeof data.bin === 'string') {
+      var b = {}
+      var match
+      if (match = data.name.match(/^@[^/]+[/](.*)$/)) {
+        b[match[1]] = data.bin
+      } else {
+        b[data.name] = data.bin
+      }
+      data.bin = b
+    }
+  },
+
+  fixManField: function (data) {
+    if (!data.man) {
+      return
+    }
+    if (typeof data.man === 'string') {
+      data.man = [data.man]
+    }
+  },
+  fixBundleDependenciesField: function (data) {
+    var bdd = 'bundledDependencies'
+    var bd = 'bundleDependencies'
+    if (data[bdd] && !data[bd]) {
+      data[bd] = data[bdd]
+      delete data[bdd]
+    }
+    if (data[bd] && !Array.isArray(data[bd])) {
+      this.warn('nonArrayBundleDependencies')
+      delete data[bd]
+    } else if (data[bd]) {
+      data[bd] = data[bd].filter(function (filtered) {
+        if (!filtered || typeof filtered !== 'string') {
+          this.warn('nonStringBundleDependency', filtered)
+          return false
+        } else {
+          if (!data.dependencies) {
+            data.dependencies = {}
+          }
+          if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) {
+            this.warn('nonDependencyBundleDependency', filtered)
+            data.dependencies[filtered] = '*'
+          }
+          return true
+        }
+      }, this)
+    }
+  },
+
+  fixDependencies: function (data, strict) {
+    objectifyDeps(data, this.warn)
+    addOptionalDepsToDeps(data, this.warn)
+    this.fixBundleDependenciesField(data)
+
+    ;['dependencies', 'devDependencies'].forEach(function (deps) {
+      if (!(deps in data)) {
+        return
+      }
+      if (!data[deps] || typeof data[deps] !== 'object') {
+        this.warn('nonObjectDependencies', deps)
+        delete data[deps]
+        return
+      }
+      Object.keys(data[deps]).forEach(function (d) {
+        var r = data[deps][d]
+        if (typeof r !== 'string') {
+          this.warn('nonStringDependency', d, JSON.stringify(r))
+          delete data[deps][d]
+        }
+        var hosted = hostedGitInfo.fromUrl(data[deps][d])
+        if (hosted) {
+          data[deps][d] = hosted.toString()
+        }
+      }, this)
+    }, this)
+  },
+
+  fixModulesField: function (data) {
+    if (data.modules) {
+      this.warn('deprecatedModules')
+      delete data.modules
+    }
+  },
+
+  fixKeywordsField: function (data) {
+    if (typeof data.keywords === 'string') {
+      data.keywords = data.keywords.split(/,\s+/)
+    }
+    if (data.keywords && !Array.isArray(data.keywords)) {
+      delete data.keywords
+      this.warn('nonArrayKeywords')
+    } else if (data.keywords) {
+      data.keywords = data.keywords.filter(function (kw) {
+        if (typeof kw !== 'string' || !kw) {
+          this.warn('nonStringKeyword')
+          return false
+        } else {
+          return true
+        }
+      }, this)
+    }
+  },
+
+  fixVersionField: function (data, strict) {
+    // allow "loose" semver 1.0 versions in non-strict mode
+    // enforce strict semver 2.0 compliance in strict mode
+    var loose = !strict
+    if (!data.version) {
+      data.version = ''
+      return true
+    }
+    if (!isValidSemver(data.version, loose)) {
+      throw new Error('Invalid version: "' + data.version + '"')
+    }
+    data.version = cleanSemver(data.version, loose)
+    return true
+  },
+
+  fixPeople: function (data) {
+    modifyPeople(data, unParsePerson)
+    modifyPeople(data, parsePerson)
+  },
+
+  fixNameField: function (data, options) {
+    if (typeof options === 'boolean') {
+      options = { strict: options }
+    } else if (typeof options === 'undefined') {
+      options = {}
+    }
+    var strict = options.strict
+    if (!data.name && !strict) {
+      data.name = ''
+      return
+    }
+    if (typeof data.name !== 'string') {
+      throw new Error('name field must be a string.')
+    }
+    if (!strict) {
+      data.name = data.name.trim()
+    }
+    ensureValidName(data.name, strict, options.allowLegacyCase)
+    if (isBuiltinModule(data.name)) {
+      this.warn('conflictingName', data.name)
+    }
+  },
+
+  fixDescriptionField: function (data) {
+    if (data.description && typeof data.description !== 'string') {
+      this.warn('nonStringDescription')
+      delete data.description
+    }
+    if (data.readme && !data.description) {
+      data.description = extractDescription(data.readme)
+    }
+    if (data.description === undefined) {
+      delete data.description
+    }
+    if (!data.description) {
+      this.warn('missingDescription')
+    }
+  },
+
+  fixReadmeField: function (data) {
+    if (!data.readme) {
+      this.warn('missingReadme')
+      data.readme = 'ERROR: No README data found!'
+    }
+  },
+
+  fixBugsField: function (data) {
+    if (!data.bugs && data.repository && data.repository.url) {
+      var hosted = hostedGitInfo.fromUrl(data.repository.url)
+      if (hosted && hosted.bugs()) {
+        data.bugs = { url: hosted.bugs() }
+      }
+    } else if (data.bugs) {
+      if (typeof data.bugs === 'string') {
+        if (isEmail(data.bugs)) {
+          data.bugs = { email: data.bugs }
+        /* eslint-disable-next-line node/no-deprecated-api */
+        } else if (url.parse(data.bugs).protocol) {
+          data.bugs = { url: data.bugs }
+        } else {
+          this.warn('nonEmailUrlBugsString')
+        }
+      } else {
+        bugsTypos(data.bugs, this.warn)
+        var oldBugs = data.bugs
+        data.bugs = {}
+        if (oldBugs.url) {
+          /* eslint-disable-next-line node/no-deprecated-api */
+          if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) {
+            data.bugs.url = oldBugs.url
+          } else {
+            this.warn('nonUrlBugsUrlField')
+          }
+        }
+        if (oldBugs.email) {
+          if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) {
+            data.bugs.email = oldBugs.email
+          } else {
+            this.warn('nonEmailBugsEmailField')
+          }
+        }
+      }
+      if (!data.bugs.email && !data.bugs.url) {
+        delete data.bugs
+        this.warn('emptyNormalizedBugs')
+      }
+    }
+  },
+
+  fixHomepageField: function (data) {
+    if (!data.homepage && data.repository && data.repository.url) {
+      var hosted = hostedGitInfo.fromUrl(data.repository.url)
+      if (hosted && hosted.docs()) {
+        data.homepage = hosted.docs()
+      }
+    }
+    if (!data.homepage) {
+      return
+    }
+
+    if (typeof data.homepage !== 'string') {
+      this.warn('nonUrlHomepage')
+      return delete data.homepage
+    }
+    /* eslint-disable-next-line node/no-deprecated-api */
+    if (!url.parse(data.homepage).protocol) {
+      data.homepage = 'http://' + data.homepage
+    }
+  },
+
+  fixLicenseField: function (data) {
+    const license = data.license || data.licence
+    if (!license) {
+      return this.warn('missingLicense')
+    }
+    if (
+      typeof (license) !== 'string' ||
+      license.length < 1 ||
+      license.trim() === ''
+    ) {
+      return this.warn('invalidLicense')
+    }
+    if (!validateLicense(license).validForNewPackages) {
+      return this.warn('invalidLicense')
+    }
+  },
+}
+
+function isValidScopedPackageName (spec) {
+  if (spec.charAt(0) !== '@') {
+    return false
+  }
+
+  var rest = spec.slice(1).split('/')
+  if (rest.length !== 2) {
+    return false
+  }
+
+  return rest[0] && rest[1] &&
+    rest[0] === encodeURIComponent(rest[0]) &&
+    rest[1] === encodeURIComponent(rest[1])
+}
+
+function isCorrectlyEncodedName (spec) {
+  return !spec.match(/[/@\s+%:]/) &&
+    spec === encodeURIComponent(spec)
+}
+
+function ensureValidName (name, strict, allowLegacyCase) {
+  if (name.charAt(0) === '.' ||
+      !(isValidScopedPackageName(name) || isCorrectlyEncodedName(name)) ||
+      (strict && (!allowLegacyCase) && name !== name.toLowerCase()) ||
+      name.toLowerCase() === 'node_modules' ||
+      name.toLowerCase() === 'favicon.ico') {
+    throw new Error('Invalid name: ' + JSON.stringify(name))
+  }
+}
+
+function modifyPeople (data, fn) {
+  if (data.author) {
+    data.author = fn(data.author)
+  }['maintainers', 'contributors'].forEach(function (set) {
+    if (!Array.isArray(data[set])) {
+      return
+    }
+    data[set] = data[set].map(fn)
+  })
+  return data
+}
+
+function unParsePerson (person) {
+  if (typeof person === 'string') {
+    return person
+  }
+  var name = person.name || ''
+  var u = person.url || person.web
+  var wrappedUrl = u ? (' (' + u + ')') : ''
+  var e = person.email || person.mail
+  var wrappedEmail = e ? (' <' + e + '>') : ''
+  return name + wrappedEmail + wrappedUrl
+}
+
+function parsePerson (person) {
+  if (typeof person !== 'string') {
+    return person
+  }
+  var matchedName = person.match(/^([^(<]+)/)
+  var matchedUrl = person.match(/\(([^()]+)\)/)
+  var matchedEmail = person.match(/<([^<>]+)>/)
+  var obj = {}
+  if (matchedName && matchedName[0].trim()) {
+    obj.name = matchedName[0].trim()
+  }
+  if (matchedEmail) {
+    obj.email = matchedEmail[1]
+  }
+  if (matchedUrl) {
+    obj.url = matchedUrl[1]
+  }
+  return obj
+}
+
+function addOptionalDepsToDeps (data, warn) {
+  var o = data.optionalDependencies
+  if (!o) {
+    return
+  }
+  var d = data.dependencies || {}
+  Object.keys(o).forEach(function (k) {
+    d[k] = o[k]
+  })
+  data.dependencies = d
+}
+
+function depObjectify (deps, type, warn) {
+  if (!deps) {
+    return {}
+  }
+  if (typeof deps === 'string') {
+    deps = deps.trim().split(/[\n\r\s\t ,]+/)
+  }
+  if (!Array.isArray(deps)) {
+    return deps
+  }
+  warn('deprecatedArrayDependencies', type)
+  var o = {}
+  deps.filter(function (d) {
+    return typeof d === 'string'
+  }).forEach(function (d) {
+    d = d.trim().split(/(:?[@\s><=])/)
+    var dn = d.shift()
+    var dv = d.join('')
+    dv = dv.trim()
+    dv = dv.replace(/^@/, '')
+    o[dn] = dv
+  })
+  return o
+}
+
+function objectifyDeps (data, warn) {
+  depTypes.forEach(function (type) {
+    if (!data[type]) {
+      return
+    }
+    data[type] = depObjectify(data[type], type, warn)
+  })
+}
+
+function bugsTypos (bugs, warn) {
+  if (!bugs) {
+    return
+  }
+  Object.keys(bugs).forEach(function (k) {
+    if (typos.bugs[k]) {
+      warn('typo', k, typos.bugs[k], 'bugs')
+      bugs[typos.bugs[k]] = bugs[k]
+      delete bugs[k]
+    }
+  })
+}
diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/make_warning.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/make_warning.js
new file mode 100644
index 0000000000000..3be9c86539952
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/make_warning.js
@@ -0,0 +1,22 @@
+var util = require('util')
+var messages = require('./warning_messages.json')
+
+module.exports = function () {
+  var args = Array.prototype.slice.call(arguments, 0)
+  var warningName = args.shift()
+  if (warningName === 'typo') {
+    return makeTypoWarning.apply(null, args)
+  } else {
+    var msgTemplate = messages[warningName] ? messages[warningName] : warningName + ": '%s'"
+    args.unshift(msgTemplate)
+    return util.format.apply(null, args)
+  }
+}
+
+function makeTypoWarning (providedName, probableName, field) {
+  if (field) {
+    providedName = field + "['" + providedName + "']"
+    probableName = field + "['" + probableName + "']"
+  }
+  return util.format(messages.typo, providedName, probableName)
+}
diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/normalize.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/normalize.js
new file mode 100644
index 0000000000000..bf71d2c1e2235
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/normalize.js
@@ -0,0 +1,48 @@
+module.exports = normalize
+
+var fixer = require('./fixer')
+normalize.fixer = fixer
+
+var makeWarning = require('./make_warning')
+
+var fieldsToFix = ['name', 'version', 'description', 'repository', 'modules', 'scripts',
+  'files', 'bin', 'man', 'bugs', 'keywords', 'readme', 'homepage', 'license']
+var otherThingsToFix = ['dependencies', 'people', 'typos']
+
+var thingsToFix = fieldsToFix.map(function (fieldName) {
+  return ucFirst(fieldName) + 'Field'
+})
+// two ways to do this in CoffeeScript on only one line, sub-70 chars:
+// thingsToFix = fieldsToFix.map (name) -> ucFirst(name) + "Field"
+// thingsToFix = (ucFirst(name) + "Field" for name in fieldsToFix)
+thingsToFix = thingsToFix.concat(otherThingsToFix)
+
+function normalize (data, warn, strict) {
+  if (warn === true) {
+    warn = null
+    strict = true
+  }
+  if (!strict) {
+    strict = false
+  }
+  if (!warn || data.private) {
+    warn = function (msg) { /* noop */ }
+  }
+
+  if (data.scripts &&
+      data.scripts.install === 'node-gyp rebuild' &&
+      !data.scripts.preinstall) {
+    data.gypfile = true
+  }
+  fixer.warn = function () {
+    warn(makeWarning.apply(null, arguments))
+  }
+  thingsToFix.forEach(function (thingName) {
+    fixer['fix' + ucFirst(thingName)](data, strict)
+  })
+  data._id = data.name + '@' + data.version
+}
+
+function ucFirst (string) {
+  return string.charAt(0).toUpperCase() + string.slice(1)
+}
diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/safe_format.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/safe_format.js
new file mode 100644
index 0000000000000..5fc888e5450cd
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/safe_format.js
@@ -0,0 +1,11 @@
+var util = require('util')
+
+module.exports = function () {
+  var args = Array.prototype.slice.call(arguments, 0)
+  args.forEach(function (arg) {
+    if (!arg) {
+      throw new TypeError('Bad arguments.')
+    }
+  })
+  return util.format.apply(null, arguments)
+}
diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/typos.json b/node_modules/init-package-json/node_modules/normalize-package-data/lib/typos.json
new file mode 100644
index 0000000000000..7f9dd283b30ff
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/typos.json
@@ -0,0 +1,25 @@
+{
+  "topLevel": {
+    "dependancies": "dependencies"
+   ,"dependecies": "dependencies"
+   ,"depdenencies": "dependencies"
+   ,"devEependencies": "devDependencies"
+   ,"depends": "dependencies"
+   ,"dev-dependencies": "devDependencies"
+   ,"devDependences": "devDependencies"
+   ,"devDepenencies": "devDependencies"
+   ,"devdependencies": "devDependencies"
+   ,"repostitory": "repository"
+   ,"repo": "repository"
+   ,"prefereGlobal": "preferGlobal"
+   ,"hompage": "homepage"
+   ,"hampage": "homepage"
+   ,"autohr": "author"
+   ,"autor": "author"
+   ,"contributers": "contributors"
+   ,"publicationConfig": "publishConfig"
+   ,"script": "scripts"
+  },
+  "bugs": { "web": "url", "name": "url" },
+  "script": { "server": "start", "tests": "test" }
+}
diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/warning_messages.json b/node_modules/init-package-json/node_modules/normalize-package-data/lib/warning_messages.json
new file mode 100644
index 0000000000000..4890f506ed965
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/warning_messages.json
@@ -0,0 +1,30 @@
+{
+  "repositories": "'repositories' (plural) Not supported. Please pick one as the 'repository' field"
+  ,"missingRepository": "No repository field."
+  ,"brokenGitUrl": "Probably broken git url: %s"
+  ,"nonObjectScripts": "scripts must be an object"
+  ,"nonStringScript": "script values must be string commands"
+  ,"nonArrayFiles": "Invalid 'files' member"
+  ,"invalidFilename": "Invalid filename in 'files' list: %s"
+  ,"nonArrayBundleDependencies": "Invalid 'bundleDependencies' list. Must be array of package names"
+  ,"nonStringBundleDependency": "Invalid bundleDependencies member: %s"
+  ,"nonDependencyBundleDependency": "Non-dependency in bundleDependencies: %s"
+  ,"nonObjectDependencies": "%s field must be an object"
+  ,"nonStringDependency": "Invalid dependency: %s %s"
+  ,"deprecatedArrayDependencies": "specifying %s as array is deprecated"
+  ,"deprecatedModules": "modules field is deprecated"
+  ,"nonArrayKeywords": "keywords should be an array of strings"
+  ,"nonStringKeyword": "keywords should be an array of strings"
+  ,"conflictingName": "%s is also the name of a node core module."
+  ,"nonStringDescription": "'description' field should be a string"
+  ,"missingDescription": "No description"
+  ,"missingReadme": "No README data"
+  ,"missingLicense": "No license field."
+  ,"nonEmailUrlBugsString": "Bug string field must be url, email, or {email,url}"
+  ,"nonUrlBugsUrlField": "bugs.url field must be a string url. Deleted."
+  ,"nonEmailBugsEmailField": "bugs.email field must be a string email. Deleted."
+  ,"emptyNormalizedBugs": "Normalized value of bugs field is an empty object. Deleted."
+  ,"nonUrlHomepage": "homepage field must be a string url. Deleted."
+  ,"invalidLicense": "license should be a valid SPDX license expression"
+  ,"typo": "%s should probably be %s."
+}
diff --git a/node_modules/init-package-json/node_modules/npm-package-arg/package.json b/node_modules/init-package-json/node_modules/normalize-package-data/package.json
similarity index 55%
rename from node_modules/init-package-json/node_modules/npm-package-arg/package.json
rename to node_modules/init-package-json/node_modules/normalize-package-data/package.json
index bb9e71b258a93..48d2371d4a66b 100644
--- a/node_modules/init-package-json/node_modules/npm-package-arg/package.json
+++ b/node_modules/init-package-json/node_modules/normalize-package-data/package.json
@@ -1,59 +1,62 @@
 {
-  "name": "npm-package-arg",
-  "version": "10.1.0",
-  "description": "Parse the things that can be arguments to `npm install`",
-  "main": "./lib/npa.js",
-  "directories": {
-    "test": "test"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "dependencies": {
-    "hosted-git-info": "^6.0.0",
-    "proc-log": "^3.0.0",
-    "semver": "^7.3.5",
-    "validate-npm-package-name": "^5.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.10.0",
-    "tap": "^16.0.1"
+  "name": "normalize-package-data",
+  "version": "6.0.0",
+  "author": "GitHub Inc.",
+  "description": "Normalizes data that can be found in package.json files.",
+  "license": "BSD-2-Clause",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/normalize-package-data.git"
   },
+  "main": "lib/normalize.js",
   "scripts": {
     "test": "tap",
-    "snap": "tap",
     "npmclilint": "npmcli-lint",
     "lint": "eslint \"**/*.js\"",
     "lintfix": "npm run lint -- --fix",
     "posttest": "npm run lint",
     "postsnap": "npm run lintfix --",
     "postlint": "template-oss-check",
+    "snap": "tap",
     "template-oss-apply": "template-oss-apply --force"
   },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/npm-package-arg.git"
+  "dependencies": {
+    "hosted-git-info": "^7.0.0",
+    "is-core-module": "^2.8.1",
+    "semver": "^7.3.5",
+    "validate-npm-package-license": "^3.0.4"
   },
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/npm-package-arg/issues"
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "tap": "^16.0.1"
   },
-  "homepage": "https://github.com/npm/npm-package-arg",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.18.0",
+    "publish": "true",
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ]
   },
   "tap": {
-    "branches": 97,
+    "branches": 86,
+    "functions": 92,
+    "lines": 86,
+    "statements": 86,
     "nyc-arg": [
       "--exclude",
       "tap-snapshots/**"
     ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.10.0"
   }
 }
diff --git a/node_modules/init-package-json/node_modules/npm-package-arg/LICENSE b/node_modules/init-package-json/node_modules/npm-package-arg/LICENSE
deleted file mode 100644
index 19cec97b18468..0000000000000
--- a/node_modules/init-package-json/node_modules/npm-package-arg/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/init-package-json/node_modules/npm-package-arg/lib/npa.js b/node_modules/init-package-json/node_modules/npm-package-arg/lib/npa.js
deleted file mode 100644
index 36bd18cd9f9a6..0000000000000
--- a/node_modules/init-package-json/node_modules/npm-package-arg/lib/npa.js
+++ /dev/null
@@ -1,431 +0,0 @@
-'use strict'
-module.exports = npa
-module.exports.resolve = resolve
-module.exports.toPurl = toPurl
-module.exports.Result = Result
-
-const url = require('url')
-const HostedGit = require('hosted-git-info')
-const semver = require('semver')
-const path = global.FAKE_WINDOWS ? require('path').win32 : require('path')
-const validatePackageName = require('validate-npm-package-name')
-const { homedir } = require('os')
-const log = require('proc-log')
-
-const isWindows = process.platform === 'win32' || global.FAKE_WINDOWS
-const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
-const isURL = /^(?:git[+])?[a-z]+:/i
-const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
-const isFilename = /[.](?:tgz|tar.gz|tar)$/i
-
-function npa (arg, where) {
-  let name
-  let spec
-  if (typeof arg === 'object') {
-    if (arg instanceof Result && (!where || where === arg.where)) {
-      return arg
-    } else if (arg.name && arg.rawSpec) {
-      return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
-    } else {
-      return npa(arg.raw, where || arg.where)
-    }
-  }
-  const nameEndsAt = arg[0] === '@' ? arg.slice(1).indexOf('@') + 1 : arg.indexOf('@')
-  const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
-  if (isURL.test(arg)) {
-    spec = arg
-  } else if (isGit.test(arg)) {
-    spec = `git+ssh://${arg}`
-  } else if (namePart[0] !== '@' && (hasSlashes.test(namePart) || isFilename.test(namePart))) {
-    spec = arg
-  } else if (nameEndsAt > 0) {
-    name = namePart
-    spec = arg.slice(nameEndsAt + 1) || '*'
-  } else {
-    const valid = validatePackageName(arg)
-    if (valid.validForOldPackages) {
-      name = arg
-      spec = '*'
-    } else {
-      spec = arg
-    }
-  }
-  return resolve(name, spec, where, arg)
-}
-
-const isFilespec = isWindows ? /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/ : /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
-
-function resolve (name, spec, where, arg) {
-  const res = new Result({
-    raw: arg,
-    name: name,
-    rawSpec: spec,
-    fromArgument: arg != null,
-  })
-
-  if (name) {
-    res.setName(name)
-  }
-
-  if (spec && (isFilespec.test(spec) || /^file:/i.test(spec))) {
-    return fromFile(res, where)
-  } else if (spec && /^npm:/i.test(spec)) {
-    return fromAlias(res, where)
-  }
-
-  const hosted = HostedGit.fromUrl(spec, {
-    noGitPlus: true,
-    noCommittish: true,
-  })
-  if (hosted) {
-    return fromHostedGit(res, hosted)
-  } else if (spec && isURL.test(spec)) {
-    return fromURL(res)
-  } else if (spec && (hasSlashes.test(spec) || isFilename.test(spec))) {
-    return fromFile(res, where)
-  } else {
-    return fromRegistry(res)
-  }
-}
-
-const defaultRegistry = 'https://registry.npmjs.org'
-
-function toPurl (arg, reg = defaultRegistry) {
-  const res = npa(arg)
-
-  if (res.type !== 'version') {
-    throw invalidPurlType(res.type, res.raw)
-  }
-
-  // URI-encode leading @ of scoped packages
-  let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec
-  if (reg !== defaultRegistry) {
-    purl += '?repository_url=' + reg
-  }
-
-  return purl
-}
-
-function invalidPackageName (name, valid, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`)
-  err.code = 'EINVALIDPACKAGENAME'
-  return err
-}
-
-function invalidTagName (name, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`)
-  err.code = 'EINVALIDTAGNAME'
-  return err
-}
-
-function invalidPurlType (type, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`)
-  err.code = 'EINVALIDPURLTYPE'
-  return err
-}
-
-function Result (opts) {
-  this.type = opts.type
-  this.registry = opts.registry
-  this.where = opts.where
-  if (opts.raw == null) {
-    this.raw = opts.name ? opts.name + '@' + opts.rawSpec : opts.rawSpec
-  } else {
-    this.raw = opts.raw
-  }
-
-  this.name = undefined
-  this.escapedName = undefined
-  this.scope = undefined
-  this.rawSpec = opts.rawSpec || ''
-  this.saveSpec = opts.saveSpec
-  this.fetchSpec = opts.fetchSpec
-  if (opts.name) {
-    this.setName(opts.name)
-  }
-  this.gitRange = opts.gitRange
-  this.gitCommittish = opts.gitCommittish
-  this.gitSubdir = opts.gitSubdir
-  this.hosted = opts.hosted
-}
-
-Result.prototype.setName = function (name) {
-  const valid = validatePackageName(name)
-  if (!valid.validForOldPackages) {
-    throw invalidPackageName(name, valid, this.raw)
-  }
-
-  this.name = name
-  this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
-  // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
-  this.escapedName = name.replace('/', '%2f')
-  return this
-}
-
-Result.prototype.toString = function () {
-  const full = []
-  if (this.name != null && this.name !== '') {
-    full.push(this.name)
-  }
-  const spec = this.saveSpec || this.fetchSpec || this.rawSpec
-  if (spec != null && spec !== '') {
-    full.push(spec)
-  }
-  return full.length ? full.join('@') : this.raw
-}
-
-Result.prototype.toJSON = function () {
-  const result = Object.assign({}, this)
-  delete result.hosted
-  return result
-}
-
-function setGitCommittish (res, committish) {
-  if (!committish) {
-    res.gitCommittish = null
-    return res
-  }
-
-  // for each :: separated item:
-  for (const part of committish.split('::')) {
-    // if the item has no : the n it is a commit-ish
-    if (!part.includes(':')) {
-      if (res.gitRange) {
-        throw new Error('cannot override existing semver range with a committish')
-      }
-      if (res.gitCommittish) {
-        throw new Error('cannot override existing committish with a second committish')
-      }
-      res.gitCommittish = part
-      continue
-    }
-    // split on name:value
-    const [name, value] = part.split(':')
-    // if name is semver do semver lookup of ref or tag
-    if (name === 'semver') {
-      if (res.gitCommittish) {
-        throw new Error('cannot override existing committish with a semver range')
-      }
-      if (res.gitRange) {
-        throw new Error('cannot override existing semver range with a second semver range')
-      }
-      res.gitRange = decodeURIComponent(value)
-      continue
-    }
-    if (name === 'path') {
-      if (res.gitSubdir) {
-        throw new Error('cannot override existing path with a second path')
-      }
-      res.gitSubdir = `/${value}`
-      continue
-    }
-    log.warn('npm-package-arg', `ignoring unknown key "${name}"`)
-  }
-
-  return res
-}
-
-function fromFile (res, where) {
-  if (!where) {
-    where = process.cwd()
-  }
-  res.type = isFilename.test(res.rawSpec) ? 'file' : 'directory'
-  res.where = where
-
-  // always put the '/' on where when resolving urls, or else
-  // file:foo from /path/to/bar goes to /path/to/foo, when we want
-  // it to be /path/to/bar/foo
-
-  let specUrl
-  let resolvedUrl
-  const prefix = (!/^file:/.test(res.rawSpec) ? 'file:' : '')
-  const rawWithPrefix = prefix + res.rawSpec
-  let rawNoPrefix = rawWithPrefix.replace(/^file:/, '')
-  try {
-    resolvedUrl = new url.URL(rawWithPrefix, `file://${path.resolve(where)}/`)
-    specUrl = new url.URL(rawWithPrefix)
-  } catch (originalError) {
-    const er = new Error('Invalid file: URL, must comply with RFC 8909')
-    throw Object.assign(er, {
-      raw: res.rawSpec,
-      spec: res,
-      where,
-      originalError,
-    })
-  }
-
-  // environment switch for testing
-  if (process.env.NPM_PACKAGE_ARG_8909_STRICT !== '1') {
-    // XXX backwards compatibility lack of compliance with 8909
-    // Remove when we want a breaking change to come into RFC compliance.
-    if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
-      const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///')
-      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
-      specUrl = new url.URL(rawSpec)
-      rawNoPrefix = rawSpec.replace(/^file:/, '')
-    }
-    // turn file:/../foo into file:../foo
-    // for 1, 2 or 3 leading slashes since we attempted
-    // in the previous step to make it a file protocol url with a leading slash
-    if (/^\/{1,3}\.\.?(\/|$)/.test(rawNoPrefix)) {
-      const rawSpec = res.rawSpec.replace(/^file:\/{1,3}/, 'file:')
-      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
-      specUrl = new url.URL(rawSpec)
-      rawNoPrefix = rawSpec.replace(/^file:/, '')
-    }
-    // XXX end 8909 violation backwards compatibility section
-  }
-
-  // file:foo - relative url to ./foo
-  // file:/foo - absolute path /foo
-  // file:///foo - absolute path to /foo, no authority host
-  // file://localhost/foo - absolute path to /foo, on localhost
-  // file://foo - absolute path to / on foo host (error!)
-  if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
-    const msg = `Invalid file: URL, must be absolute if // present`
-    throw Object.assign(new Error(msg), {
-      raw: res.rawSpec,
-      parsed: resolvedUrl,
-    })
-  }
-
-  // turn /C:/blah into just C:/blah on windows
-  let specPath = decodeURIComponent(specUrl.pathname)
-  let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
-  if (isWindows) {
-    specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1')
-    resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1')
-  }
-
-  // replace ~ with homedir, but keep the ~ in the saveSpec
-  // otherwise, make it relative to where param
-  if (/^\/~(\/|$)/.test(specPath)) {
-    res.saveSpec = `file:${specPath.substr(1)}`
-    resolvedPath = path.resolve(homedir(), specPath.substr(3))
-  } else if (!path.isAbsolute(rawNoPrefix)) {
-    res.saveSpec = `file:${path.relative(where, resolvedPath)}`
-  } else {
-    res.saveSpec = `file:${path.resolve(resolvedPath)}`
-  }
-
-  res.fetchSpec = path.resolve(where, resolvedPath)
-  return res
-}
-
-function fromHostedGit (res, hosted) {
-  res.type = 'git'
-  res.hosted = hosted
-  res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
-  res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
-  return setGitCommittish(res, hosted.committish)
-}
-
-function unsupportedURLType (protocol, spec) {
-  const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
-  err.code = 'EUNSUPPORTEDPROTOCOL'
-  return err
-}
-
-function matchGitScp (spec) {
-  // git ssh specifiers are overloaded to also use scp-style git
-  // specifiers, so we have to parse those out and treat them special.
-  // They are NOT true URIs, so we can't hand them to `url.parse`.
-  //
-  // This regex looks for things that look like:
-  // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
-  //
-  // ...and various combinations. The username in the beginning is *required*.
-  const matched = spec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
-  return matched && !matched[1].match(/:[0-9]+\/?.*$/i) && {
-    fetchSpec: matched[1],
-    gitCommittish: matched[2] == null ? null : matched[2],
-  }
-}
-
-function fromURL (res) {
-  // eslint-disable-next-line node/no-deprecated-api
-  const urlparse = url.parse(res.rawSpec)
-  res.saveSpec = res.rawSpec
-  // check the protocol, and then see if it's git or not
-  switch (urlparse.protocol) {
-    case 'git:':
-    case 'git+http:':
-    case 'git+https:':
-    case 'git+rsync:':
-    case 'git+ftp:':
-    case 'git+file:':
-    case 'git+ssh:': {
-      res.type = 'git'
-      const match = urlparse.protocol === 'git+ssh:' ? matchGitScp(res.rawSpec)
-        : null
-      if (match) {
-        setGitCommittish(res, match.gitCommittish)
-        res.fetchSpec = match.fetchSpec
-      } else {
-        setGitCommittish(res, urlparse.hash != null ? urlparse.hash.slice(1) : '')
-        urlparse.protocol = urlparse.protocol.replace(/^git[+]/, '')
-        if (urlparse.protocol === 'file:' && /^git\+file:\/\/[a-z]:/i.test(res.rawSpec)) {
-          // keep the drive letter : on windows file paths
-          urlparse.host += ':'
-          urlparse.hostname += ':'
-        }
-        delete urlparse.hash
-        res.fetchSpec = url.format(urlparse)
-      }
-      break
-    }
-    case 'http:':
-    case 'https:':
-      res.type = 'remote'
-      res.fetchSpec = res.saveSpec
-      break
-
-    default:
-      throw unsupportedURLType(urlparse.protocol, res.rawSpec)
-  }
-
-  return res
-}
-
-function fromAlias (res, where) {
-  const subSpec = npa(res.rawSpec.substr(4), where)
-  if (subSpec.type === 'alias') {
-    throw new Error('nested aliases not supported')
-  }
-
-  if (!subSpec.registry) {
-    throw new Error('aliases only work for registry deps')
-  }
-
-  res.subSpec = subSpec
-  res.registry = true
-  res.type = 'alias'
-  res.saveSpec = null
-  res.fetchSpec = null
-  return res
-}
-
-function fromRegistry (res) {
-  res.registry = true
-  const spec = res.rawSpec.trim()
-  // no save spec for registry components as we save based on the fetched
-  // version, not on the argument so this can't compute that.
-  res.saveSpec = null
-  res.fetchSpec = spec
-  const version = semver.valid(spec, true)
-  const range = semver.validRange(spec, true)
-  if (version) {
-    res.type = 'version'
-  } else if (range) {
-    res.type = 'range'
-  } else {
-    if (encodeURIComponent(spec) !== spec) {
-      throw invalidTagName(spec, res.raw)
-    }
-    res.type = 'tag'
-  }
-  return res
-}
diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/LICENSE b/node_modules/init-package-json/node_modules/read-package-json/LICENSE
similarity index 93%
rename from node_modules/init-package-json/node_modules/hosted-git-info/LICENSE
rename to node_modules/init-package-json/node_modules/read-package-json/LICENSE
index 45055763dc838..052085c436514 100644
--- a/node_modules/init-package-json/node_modules/hosted-git-info/LICENSE
+++ b/node_modules/init-package-json/node_modules/read-package-json/LICENSE
@@ -1,4 +1,6 @@
-Copyright (c) 2015, Rebecca Turner
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter
 
 Permission to use, copy, modify, and/or distribute this software for any
 purpose with or without fee is hereby granted, provided that the above
diff --git a/node_modules/init-package-json/node_modules/read-package-json/lib/read-json.js b/node_modules/init-package-json/node_modules/read-package-json/lib/read-json.js
new file mode 100644
index 0000000000000..d35f09ebd208f
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/read-package-json/lib/read-json.js
@@ -0,0 +1,589 @@
+var fs = require('fs')
+
+var path = require('path')
+
+var { glob } = require('glob')
+var normalizeData = require('normalize-package-data')
+var safeJSON = require('json-parse-even-better-errors')
+var util = require('util')
+var normalizePackageBin = require('npm-normalize-package-bin')
+
+module.exports = readJson
+
+// put more stuff on here to customize.
+readJson.extraSet = [
+  bundleDependencies,
+  gypfile,
+  serverjs,
+  scriptpath,
+  authors,
+  readme,
+  mans,
+  bins,
+  githead,
+  fillTypes,
+]
+
+var typoWarned = {}
+var cache = {}
+
+function readJson (file, log_, strict_, cb_) {
+  var log, strict, cb
+  for (var i = 1; i < arguments.length - 1; i++) {
+    if (typeof arguments[i] === 'boolean') {
+      strict = arguments[i]
+    } else if (typeof arguments[i] === 'function') {
+      log = arguments[i]
+    }
+  }
+
+  if (!log) {
+    log = function () {}
+  }
+  cb = arguments[arguments.length - 1]
+
+  readJson_(file, log, strict, cb)
+}
+
+function readJson_ (file, log, strict, cb) {
+  fs.readFile(file, 'utf8', function (er, d) {
+    parseJson(file, er, d, log, strict, cb)
+  })
+}
+
+function stripBOM (content) {
+  // Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
+  // because the buffer-to-string conversion in `fs.readFileSync()`
+  // translates it to FEFF, the UTF-16 BOM.
+  if (content.charCodeAt(0) === 0xFEFF) {
+    content = content.slice(1)
+  }
+  return content
+}
+
+function jsonClone (obj) {
+  if (obj == null) {
+    return obj
+  } else if (Array.isArray(obj)) {
+    var newarr = new Array(obj.length)
+    for (var ii in obj) {
+      newarr[ii] = jsonClone(obj[ii])
+    }
+    return newarr
+  } else if (typeof obj === 'object') {
+    var newobj = {}
+    for (var kk in obj) {
+      newobj[kk] = jsonClone(obj[kk])
+    }
+    return newobj
+  } else {
+    return obj
+  }
+}
+
+function parseJson (file, er, d, log, strict, cb) {
+  if (er && er.code === 'ENOENT') {
+    return fs.stat(path.dirname(file), function (err, stat) {
+      if (!err && stat && !stat.isDirectory()) {
+        // ENOTDIR isn't used on Windows, but npm expects it.
+        er = Object.create(er)
+        er.code = 'ENOTDIR'
+        return cb(er)
+      } else {
+        return indexjs(file, er, log, strict, cb)
+      }
+    })
+  }
+  if (er) {
+    return cb(er)
+  }
+
+  if (cache[d]) {
+    return cb(null, jsonClone(cache[d]))
+  }
+
+  var data
+
+  try {
+    data = safeJSON(stripBOM(d))
+    for (var key in data) {
+      if (/^_/.test(key)) {
+        delete data[key]
+      }
+    }
+  } catch (jsonErr) {
+    data = parseIndex(d)
+    if (!data) {
+      return cb(parseError(jsonErr, file))
+    }
+  }
+  extrasCached(file, d, data, log, strict, cb)
+}
+
+function extrasCached (file, d, data, log, strict, cb) {
+  extras(file, data, log, strict, function (err, extrasData) {
+    if (!err) {
+      cache[d] = jsonClone(extrasData)
+    }
+    cb(err, extrasData)
+  })
+}
+
+function indexjs (file, er, log, strict, cb) {
+  if (path.basename(file) === 'index.js') {
+    return cb(er)
+  }
+
+  var index = path.resolve(path.dirname(file), 'index.js')
+  fs.readFile(index, 'utf8', function (er2, d) {
+    if (er2) {
+      return cb(er)
+    }
+
+    if (cache[d]) {
+      return cb(null, cache[d])
+    }
+
+    var data = parseIndex(d)
+    if (!data) {
+      return cb(er)
+    }
+
+    extrasCached(file, d, data, log, strict, cb)
+  })
+}
+
+readJson.extras = extras
+function extras (file, data, log_, strict_, cb_) {
+  var log, strict, cb
+  for (var i = 2; i < arguments.length - 1; i++) {
+    if (typeof arguments[i] === 'boolean') {
+      strict = arguments[i]
+    } else if (typeof arguments[i] === 'function') {
+      log = arguments[i]
+    }
+  }
+
+  if (!log) {
+    log = function () {}
+  }
+  cb = arguments[i]
+
+  var set = readJson.extraSet
+  var n = set.length
+  var errState = null
+  set.forEach(function (fn) {
+    fn(file, data, then)
+  })
+
+  function then (er) {
+    if (errState) {
+      return
+    }
+    if (er) {
+      return cb(errState = er)
+    }
+    if (--n > 0) {
+      return
+    }
+    final(file, data, log, strict, cb)
+  }
+}
+
+function scriptpath (file, data, cb) {
+  if (!data.scripts) {
+    return cb(null, data)
+  }
+  var k = Object.keys(data.scripts)
+  k.forEach(scriptpath_, data.scripts)
+  cb(null, data)
+}
+
+function scriptpath_ (key) {
+  var s = this[key]
+  // This is never allowed, and only causes problems
+  if (typeof s !== 'string') {
+    return delete this[key]
+  }
+
+  var spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
+  if (s.match(spre)) {
+    this[key] = this[key].replace(spre, '')
+  }
+}
+
+function gypfile (file, data, cb) {
+  var dir = path.dirname(file)
+  var s = data.scripts || {}
+  if (s.install || s.preinstall) {
+    return cb(null, data)
+  }
+
+  if (data.gypfile === false) {
+    return cb(null, data)
+  }
+  glob('*.gyp', { cwd: dir })
+    .then(files => gypfile_(file, data, files, cb))
+    .catch(er => cb(er))
+}
+
+function gypfile_ (file, data, files, cb) {
+  if (!files.length) {
+    return cb(null, data)
+  }
+  var s = data.scripts || {}
+  s.install = 'node-gyp rebuild'
+  data.scripts = s
+  data.gypfile = true
+  return cb(null, data)
+}
+
+function serverjs (file, data, cb) {
+  var dir = path.dirname(file)
+  var s = data.scripts || {}
+  if (s.start) {
+    return cb(null, data)
+  }
+  fs.access(path.join(dir, 'server.js'), (err) => {
+    if (!err) {
+      s.start = 'node server.js'
+      data.scripts = s
+    }
+    return cb(null, data)
+  })
+}
+
+function authors (file, data, cb) {
+  if (data.contributors) {
+    return cb(null, data)
+  }
+  var af = path.resolve(path.dirname(file), 'AUTHORS')
+  fs.readFile(af, 'utf8', function (er, ad) {
+    // ignore error.  just checking it.
+    if (er) {
+      return cb(null, data)
+    }
+    authors_(file, data, ad, cb)
+  })
+}
+
+function authors_ (file, data, ad, cb) {
+  ad = ad.split(/\r?\n/g).map(function (line) {
+    return line.replace(/^\s*#.*$/, '').trim()
+  }).filter(function (line) {
+    return line
+  })
+  data.contributors = ad
+  return cb(null, data)
+}
+
+function readme (file, data, cb) {
+  if (data.readme) {
+    return cb(null, data)
+  }
+  var dir = path.dirname(file)
+  var globOpts = { cwd: dir, nocase: true, mark: true }
+  glob('{README,README.*}', globOpts)
+    .then(files => {
+      // don't accept directories.
+      files = files.filter(function (filtered) {
+        return !filtered.match(/\/$/)
+      })
+      if (!files.length) {
+        return cb()
+      }
+      var fn = preferMarkdownReadme(files)
+      var rm = path.resolve(dir, fn)
+      return readme_(file, data, rm, cb)
+    })
+    .catch(er => cb(er))
+}
+
+function preferMarkdownReadme (files) {
+  var fallback = 0
+  var re = /\.m?a?r?k?d?o?w?n?$/i
+  for (var i = 0; i < files.length; i++) {
+    if (files[i].match(re)) {
+      return files[i]
+    } else if (files[i].match(/README$/)) {
+      fallback = i
+    }
+  }
+  // prefer README.md, followed by README; otherwise, return
+  // the first filename (which could be README)
+  return files[fallback]
+}
+
+function readme_ (file, data, rm, cb) {
+  var rmfn = path.basename(rm)
+  fs.readFile(rm, 'utf8', function (er, rmData) {
+    // maybe not readable, or something.
+    if (er) {
+      return cb()
+    }
+    data.readme = rmData
+    data.readmeFilename = rmfn
+    return cb(er, data)
+  })
+}
+
+function mans (file, data, cb) {
+  let cwd = data.directories && data.directories.man
+  if (data.man || !cwd) {
+    return cb(null, data)
+  }
+  const dirname = path.dirname(file)
+  cwd = path.resolve(path.dirname(file), cwd)
+  glob('**/*.[0-9]', { cwd })
+    .then(mansGlob => {
+      data.man = mansGlob.map(man =>
+        path.relative(dirname, path.join(cwd, man)).split(path.sep).join('/')
+      )
+      return cb(null, data)
+    })
+    .catch(er => cb(er))
+}
+
+function bins (file, data, cb) {
+  data = normalizePackageBin(data)
+
+  var m = data.directories && data.directories.bin
+  if (data.bin || !m) {
+    return cb(null, data)
+  }
+
+  m = path.resolve(path.dirname(file), path.join('.', path.join('/', m)))
+  glob('**', { cwd: m })
+    .then(binsGlob => bins_(file, data, binsGlob, cb))
+    .catch(er => cb(er))
+}
+
+function bins_ (file, data, binsGlob, cb) {
+  var m = (data.directories && data.directories.bin) || '.'
+  data.bin = binsGlob.reduce(function (acc, mf) {
+    if (mf && mf.charAt(0) !== '.') {
+      var f = path.basename(mf)
+      acc[f] = path.join(m, mf)
+    }
+    return acc
+  }, {})
+  return cb(null, normalizePackageBin(data))
+}
+
+function bundleDependencies (file, data, cb) {
+  var bd = 'bundleDependencies'
+  var bdd = 'bundledDependencies'
+  // normalize key name
+  if (data[bdd] !== undefined) {
+    if (data[bd] === undefined) {
+      data[bd] = data[bdd]
+    }
+    delete data[bdd]
+  }
+  if (data[bd] === false) {
+    delete data[bd]
+  } else if (data[bd] === true) {
+    data[bd] = Object.keys(data.dependencies || {})
+  } else if (data[bd] !== undefined && !Array.isArray(data[bd])) {
+    delete data[bd]
+  }
+  return cb(null, data)
+}
+
+function githead (file, data, cb) {
+  if (data.gitHead) {
+    return cb(null, data)
+  }
+  var dir = path.dirname(file)
+  var head = path.resolve(dir, '.git/HEAD')
+  fs.readFile(head, 'utf8', function (er, headData) {
+    if (er) {
+      var parent = path.dirname(dir)
+      if (parent === dir) {
+        return cb(null, data)
+      }
+      return githead(dir, data, cb)
+    }
+    githead_(data, dir, headData, cb)
+  })
+}
+
+function githead_ (data, dir, head, cb) {
+  if (!head.match(/^ref: /)) {
+    data.gitHead = head.trim()
+    return cb(null, data)
+  }
+  var headRef = head.replace(/^ref: /, '').trim()
+  var headFile = path.resolve(dir, '.git', headRef)
+  fs.readFile(headFile, 'utf8', function (er, headData) {
+    if (er || !headData) {
+      var packFile = path.resolve(dir, '.git/packed-refs')
+      return fs.readFile(packFile, 'utf8', function (readFileErr, refs) {
+        if (readFileErr || !refs) {
+          return cb(null, data)
+        }
+        refs = refs.split('\n')
+        for (var i = 0; i < refs.length; i++) {
+          var match = refs[i].match(/^([0-9a-f]{40}) (.+)$/)
+          if (match && match[2].trim() === headRef) {
+            data.gitHead = match[1]
+            break
+          }
+        }
+        return cb(null, data)
+      })
+    }
+    headData = headData.replace(/^ref: /, '').trim()
+    data.gitHead = headData
+    return cb(null, data)
+  })
+}
+
+/**
+ * Warn if the bin references don't point to anything.  This might be better in
+ * normalize-package-data if it had access to the file path.
+ */
+function checkBinReferences_ (file, data, warn, cb) {
+  if (!(data.bin instanceof Object)) {
+    return cb()
+  }
+
+  var keys = Object.keys(data.bin)
+  var keysLeft = keys.length
+  if (!keysLeft) {
+    return cb()
+  }
+
+  function handleExists (relName, result) {
+    keysLeft--
+    if (!result) {
+      warn('No bin file found at ' + relName)
+    }
+    if (!keysLeft) {
+      cb()
+    }
+  }
+
+  keys.forEach(function (key) {
+    var dirName = path.dirname(file)
+    var relName = data.bin[key]
+    /* istanbul ignore if - impossible, bins have been normalized */
+    if (typeof relName !== 'string') {
+      var msg = 'Bin filename for ' + key +
+        ' is not a string: ' + util.inspect(relName)
+      warn(msg)
+      delete data.bin[key]
+      handleExists(relName, true)
+      return
+    }
+    var binPath = path.resolve(dirName, relName)
+    fs.stat(binPath, (err) => handleExists(relName, !err))
+  })
+}
+
+function final (file, data, log, strict, cb) {
+  var pId = makePackageId(data)
+
+  function warn (msg) {
+    if (typoWarned[pId]) {
+      return
+    }
+    if (log) {
+      log('package.json', pId, msg)
+    }
+  }
+
+  try {
+    normalizeData(data, warn, strict)
+  } catch (error) {
+    return cb(error)
+  }
+
+  checkBinReferences_(file, data, warn, function () {
+    typoWarned[pId] = true
+    cb(null, data)
+  })
+}
+
+function fillTypes (file, data, cb) {
+  var index = data.main || 'index.js'
+
+  if (typeof index !== 'string') {
+    return cb(new TypeError('The "main" attribute must be of type string.'))
+  }
+
+  // TODO exports is much more complicated than this in verbose format
+  // We need to support for instance
+
+  // "exports": {
+  //   ".": [
+  //     {
+  //       "default": "./lib/npm.js"
+  //     },
+  //     "./lib/npm.js"
+  //   ],
+  //   "./package.json": "./package.json"
+  // },
+  // as well as conditional exports
+
+  // if (data.exports && typeof data.exports === 'string') {
+  //   index = data.exports
+  // }
+
+  // if (data.exports && data.exports['.']) {
+  //   index = data.exports['.']
+  //   if (typeof index !== 'string') {
+  //   }
+  // }
+
+  var extless =
+    path.join(path.dirname(index), path.basename(index, path.extname(index)))
+  var dts = `./${extless}.d.ts`
+  var dtsPath = path.join(path.dirname(file), dts)
+  var hasDTSFields = 'types' in data || 'typings' in data
+  if (!hasDTSFields && fs.existsSync(dtsPath)) {
+    data.types = dts.split(path.sep).join('/')
+  }
+
+  cb(null, data)
+}
+
+function makePackageId (data) {
+  var name = cleanString(data.name)
+  var ver = cleanString(data.version)
+  return name + '@' + ver
+}
+
+function cleanString (str) {
+  return (!str || typeof (str) !== 'string') ? '' : str.trim()
+}
+
+// /**package { "name": "foo", "version": "1.2.3", ... } **/
+function parseIndex (data) {
+  data = data.split(/^\/\*\*package(?:\s|$)/m)
+
+  if (data.length < 2) {
+    return null
+  }
+  data = data[1]
+  data = data.split(/\*\*\/$/m)
+
+  if (data.length < 2) {
+    return null
+  }
+  data = data[0]
+  data = data.replace(/^\s*\*/mg, '')
+
+  try {
+    return safeJSON(data)
+  } catch (er) {
+    return null
+  }
+}
+
+function parseError (ex, file) {
+  var e = new Error('Failed to parse json\n' + ex.message)
+  e.code = 'EJSONPARSE'
+  e.path = file
+  return e
+}
diff --git a/node_modules/init-package-json/node_modules/read-package-json/package.json b/node_modules/init-package-json/node_modules/read-package-json/package.json
new file mode 100644
index 0000000000000..01061f2bc2792
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/read-package-json/package.json
@@ -0,0 +1,65 @@
+{
+  "name": "read-package-json",
+  "version": "7.0.0",
+  "author": "GitHub Inc.",
+  "description": "The thing npm uses to read package.json files with semantics and defaults and validation",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/read-package-json.git"
+  },
+  "main": "lib/read-json.js",
+  "scripts": {
+    "prerelease": "npm t",
+    "postrelease": "npm publish && git push --follow-tags",
+    "release": "standard-version -s",
+    "test": "tap",
+    "npmclilint": "npmcli-lint",
+    "lint": "eslint \"**/*.js\"",
+    "lintfix": "npm run lint -- --fix",
+    "posttest": "npm run lint",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "snap": "tap",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "dependencies": {
+    "glob": "^10.2.2",
+    "json-parse-even-better-errors": "^3.0.0",
+    "normalize-package-data": "^6.0.0",
+    "npm-normalize-package-bin": "^3.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "tap": "^16.0.1"
+  },
+  "license": "ISC",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "engines": {
+    "node": "^16.14.0 || >=18.0.0"
+  },
+  "tap": {
+    "branches": 73,
+    "functions": 77,
+    "lines": 77,
+    "statements": 77,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.18.0",
+    "publish": "true",
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ]
+  }
+}
diff --git a/node_modules/init-package-json/package.json b/node_modules/init-package-json/package.json
index e2cb1fe25ebba..a164169a74df3 100644
--- a/node_modules/init-package-json/package.json
+++ b/node_modules/init-package-json/package.json
@@ -1,6 +1,6 @@
 {
   "name": "init-package-json",
-  "version": "5.0.0",
+  "version": "6.0.0",
   "main": "lib/init-package-json.js",
   "scripts": {
     "test": "tap",
@@ -19,22 +19,22 @@
   "license": "ISC",
   "description": "A node module to get your node module started",
   "dependencies": {
-    "npm-package-arg": "^10.0.0",
+    "npm-package-arg": "^11.0.0",
     "promzard": "^1.0.0",
     "read": "^2.0.0",
-    "read-package-json": "^6.0.0",
+    "read-package-json": "^7.0.0",
     "semver": "^7.3.5",
     "validate-npm-package-license": "^3.0.4",
     "validate-npm-package-name": "^5.0.0"
   },
   "devDependencies": {
-    "@npmcli/config": "^6.0.0",
+    "@npmcli/config": "^7.0.0",
     "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.11.3",
+    "@npmcli/template-oss": "4.18.0",
     "tap": "^16.0.1"
   },
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "tap": {
     "statements": 95,
@@ -63,6 +63,13 @@
   ],
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.11.3"
+    "version": "4.18.0",
+    "publish": true,
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ]
   }
 }
diff --git a/package-lock.json b/package-lock.json
index 0d868b1c1b358..5335acdd5ce59 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -108,7 +108,7 @@
         "graceful-fs": "^4.2.11",
         "hosted-git-info": "^7.0.0",
         "ini": "^4.1.1",
-        "init-package-json": "^5.0.0",
+        "init-package-json": "^6.0.0",
         "is-cidr": "^4.0.2",
         "json-parse-even-better-errors": "^3.0.0",
         "libnpmaccess": "^7.0.2",
@@ -6897,57 +6897,51 @@
       }
     },
     "node_modules/init-package-json": {
-      "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/init-package-json/-/init-package-json-5.0.0.tgz",
-      "integrity": "sha512-kBhlSheBfYmq3e0L1ii+VKe3zBTLL5lDCDWR+f9dLmEGSB3MqLlMlsolubSsyI88Bg6EA+BIMlomAnQ1SwgQBw==",
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/init-package-json/-/init-package-json-6.0.0.tgz",
+      "integrity": "sha512-AmXD+Aht5iZGo/y1KUtZSUQ1SltesXHxQuc7qeNz0eUGx/8WgkHeeQLSFdM8l9YpmnnamGIbAxVdAs2xoLRKRQ==",
       "inBundle": true,
       "dependencies": {
-        "npm-package-arg": "^10.0.0",
+        "npm-package-arg": "^11.0.0",
         "promzard": "^1.0.0",
         "read": "^2.0.0",
-        "read-package-json": "^6.0.0",
+        "read-package-json": "^7.0.0",
         "semver": "^7.3.5",
         "validate-npm-package-license": "^3.0.4",
         "validate-npm-package-name": "^5.0.0"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/init-package-json/node_modules/hosted-git-info": {
-      "version": "6.1.1",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
-      "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
+    "node_modules/init-package-json/node_modules/normalize-package-data": {
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.0.tgz",
+      "integrity": "sha512-UL7ELRVxYBHBgYEtZCXjxuD5vPxnmvMGq0jp/dGPKKrN7tfsBh2IY7TlJ15WWwdjRWD3RJbnsygUurTK3xkPkg==",
       "inBundle": true,
       "dependencies": {
-        "lru-cache": "^7.5.1"
+        "hosted-git-info": "^7.0.0",
+        "is-core-module": "^2.8.1",
+        "semver": "^7.3.5",
+        "validate-npm-package-license": "^3.0.4"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/init-package-json/node_modules/lru-cache": {
-      "version": "7.18.3",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
-      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
-      "inBundle": true,
-      "engines": {
-        "node": ">=12"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/init-package-json/node_modules/npm-package-arg": {
-      "version": "10.1.0",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
-      "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==",
+    "node_modules/init-package-json/node_modules/read-package-json": {
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-7.0.0.tgz",
+      "integrity": "sha512-uL4Z10OKV4p6vbdvIXB+OzhInYtIozl/VxUBPgNkBuUi2DeRonnuspmaVAMcrkmfjKGNmRndyQAbE7/AmzGwFg==",
       "inBundle": true,
       "dependencies": {
-        "hosted-git-info": "^6.0.0",
-        "proc-log": "^3.0.0",
-        "semver": "^7.3.5",
-        "validate-npm-package-name": "^5.0.0"
+        "glob": "^10.2.2",
+        "json-parse-even-better-errors": "^3.0.0",
+        "normalize-package-data": "^6.0.0",
+        "npm-normalize-package-bin": "^3.0.0"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "node_modules/internal-slot": {
@@ -9910,7 +9904,6 @@
       "version": "5.0.0",
       "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-5.0.0.tgz",
       "integrity": "sha512-h9iPVIfrVZ9wVYQnxFgtw1ugSvGEMOlyPWWtm8BMJhnwyEL/FLbYbTY3V3PpjI/BUK67n9PEWDu6eHzu1fB15Q==",
-      "inBundle": true,
       "dependencies": {
         "hosted-git-info": "^6.0.0",
         "is-core-module": "^2.8.1",
@@ -9925,7 +9918,6 @@
       "version": "6.1.1",
       "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
       "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
-      "inBundle": true,
       "dependencies": {
         "lru-cache": "^7.5.1"
       },
@@ -9937,7 +9929,6 @@
       "version": "7.18.3",
       "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
       "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
-      "inBundle": true,
       "engines": {
         "node": ">=12"
       }
@@ -11277,7 +11268,6 @@
       "version": "6.0.4",
       "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-6.0.4.tgz",
       "integrity": "sha512-AEtWXYfopBj2z5N5PbkAOeNHRPUg5q+Nen7QLxV8M2zJq1ym6/lCz3fYNTCXe19puu2d06jfHhrP7v/S2PtMMw==",
-      "inBundle": true,
       "dependencies": {
         "glob": "^10.2.2",
         "json-parse-even-better-errors": "^3.0.0",
diff --git a/package.json b/package.json
index f13feb344bbfa..06672c07f8fdb 100644
--- a/package.json
+++ b/package.json
@@ -73,7 +73,7 @@
     "graceful-fs": "^4.2.11",
     "hosted-git-info": "^7.0.0",
     "ini": "^4.1.1",
-    "init-package-json": "^5.0.0",
+    "init-package-json": "^6.0.0",
     "is-cidr": "^4.0.2",
     "json-parse-even-better-errors": "^3.0.0",
     "libnpmaccess": "^7.0.2",

From c13a01e07544d5bb1de2afdf66b55896df64e228 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Tue, 15 Aug 2023 13:54:18 -0700
Subject: [PATCH 39/68] deps: @npmcli/git@5.0.1

---
 node_modules/.gitignore                       |    8 -
 .../git/node_modules/hosted-git-info/LICENSE  |   13 -
 .../hosted-git-info/lib/from-url.js           |  122 --
 .../node_modules/hosted-git-info/lib/hosts.js |  228 ---
 .../node_modules/hosted-git-info/lib/index.js |  179 ---
 .../hosted-git-info/lib/parse-url.js          |   78 --
 .../node_modules/lru-cache/LICENSE            |   15 -
 .../node_modules/lru-cache/index.js           | 1227 -----------------
 .../node_modules/lru-cache/index.mjs          | 1227 -----------------
 .../node_modules/lru-cache/package.json       |   96 --
 .../node_modules/hosted-git-info/package.json |   59 -
 .../git/node_modules/npm-package-arg/LICENSE  |   15 -
 .../node_modules/npm-package-arg/lib/npa.js   |  431 ------
 .../node_modules/npm-package-arg/package.json |   59 -
 .../node_modules/npm-pick-manifest/LICENSE.md |   16 -
 .../npm-pick-manifest/lib/index.js            |  218 ---
 .../npm-pick-manifest/package.json            |   57 -
 node_modules/@npmcli/git/package.json         |    6 +-
 package-lock.json                             |   63 +-
 package.json                                  |    2 +-
 workspaces/libnpmversion/package.json         |    2 +-
 21 files changed, 11 insertions(+), 4110 deletions(-)
 delete mode 100644 node_modules/@npmcli/git/node_modules/hosted-git-info/LICENSE
 delete mode 100644 node_modules/@npmcli/git/node_modules/hosted-git-info/lib/from-url.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/hosted-git-info/lib/hosts.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/hosted-git-info/lib/index.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/hosted-git-info/lib/parse-url.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/LICENSE
 delete mode 100644 node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/index.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/index.mjs
 delete mode 100644 node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/package.json
 delete mode 100644 node_modules/@npmcli/git/node_modules/hosted-git-info/package.json
 delete mode 100644 node_modules/@npmcli/git/node_modules/npm-package-arg/LICENSE
 delete mode 100644 node_modules/@npmcli/git/node_modules/npm-package-arg/lib/npa.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/npm-package-arg/package.json
 delete mode 100644 node_modules/@npmcli/git/node_modules/npm-pick-manifest/LICENSE.md
 delete mode 100644 node_modules/@npmcli/git/node_modules/npm-pick-manifest/lib/index.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/npm-pick-manifest/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 106eec7635e0c..6d8161877fc36 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -22,14 +22,6 @@
 !/@npmcli/disparity-colors
 !/@npmcli/fs
 !/@npmcli/git
-!/@npmcli/git/node_modules/
-/@npmcli/git/node_modules/*
-!/@npmcli/git/node_modules/hosted-git-info
-!/@npmcli/git/node_modules/hosted-git-info/node_modules/
-/@npmcli/git/node_modules/hosted-git-info/node_modules/*
-!/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache
-!/@npmcli/git/node_modules/npm-package-arg
-!/@npmcli/git/node_modules/npm-pick-manifest
 !/@npmcli/installed-package-contents
 !/@npmcli/map-workspaces
 !/@npmcli/metavuln-calculator
diff --git a/node_modules/@npmcli/git/node_modules/hosted-git-info/LICENSE b/node_modules/@npmcli/git/node_modules/hosted-git-info/LICENSE
deleted file mode 100644
index 45055763dc838..0000000000000
--- a/node_modules/@npmcli/git/node_modules/hosted-git-info/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright (c) 2015, Rebecca Turner
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/from-url.js b/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/from-url.js
deleted file mode 100644
index efc1247d59d12..0000000000000
--- a/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/from-url.js
+++ /dev/null
@@ -1,122 +0,0 @@
-'use strict'
-
-const parseUrl = require('./parse-url')
-
-// look for github shorthand inputs, such as npm/cli
-const isGitHubShorthand = (arg) => {
-  // it cannot contain whitespace before the first #
-  // it cannot start with a / because that's probably an absolute file path
-  // but it must include a slash since repos are username/repository
-  // it cannot start with a . because that's probably a relative file path
-  // it cannot start with an @ because that's a scoped package if it passes the other tests
-  // it cannot contain a : before a # because that tells us that there's a protocol
-  // a second / may not exist before a #
-  const firstHash = arg.indexOf('#')
-  const firstSlash = arg.indexOf('/')
-  const secondSlash = arg.indexOf('/', firstSlash + 1)
-  const firstColon = arg.indexOf(':')
-  const firstSpace = /\s/.exec(arg)
-  const firstAt = arg.indexOf('@')
-
-  const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash)
-  const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash)
-  const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash)
-  const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash)
-  const hasSlash = firstSlash > 0
-  // if a # is found, what we really want to know is that the character
-  // immediately before # is not a /
-  const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/')
-  const doesNotStartWithDot = !arg.startsWith('.')
-
-  return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash &&
-    doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash &&
-    secondSlashOnlyAfterHash
-}
-
-module.exports = (giturl, opts, { gitHosts, protocols }) => {
-  if (!giturl) {
-    return
-  }
-
-  const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl
-  const parsed = parseUrl(correctedUrl, protocols)
-  if (!parsed) {
-    return
-  }
-
-  const gitHostShortcut = gitHosts.byShortcut[parsed.protocol]
-  const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.')
-    ? parsed.hostname.slice(4)
-    : parsed.hostname]
-  const gitHostName = gitHostShortcut || gitHostDomain
-  if (!gitHostName) {
-    return
-  }
-
-  const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain]
-  let auth = null
-  if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) {
-    auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}`
-  }
-
-  let committish = null
-  let user = null
-  let project = null
-  let defaultRepresentation = null
-
-  try {
-    if (gitHostShortcut) {
-      let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname
-      const firstAt = pathname.indexOf('@')
-      // we ignore auth for shortcuts, so just trim it out
-      if (firstAt > -1) {
-        pathname = pathname.slice(firstAt + 1)
-      }
-
-      const lastSlash = pathname.lastIndexOf('/')
-      if (lastSlash > -1) {
-        user = decodeURIComponent(pathname.slice(0, lastSlash))
-        // we want nulls only, never empty strings
-        if (!user) {
-          user = null
-        }
-        project = decodeURIComponent(pathname.slice(lastSlash + 1))
-      } else {
-        project = decodeURIComponent(pathname)
-      }
-
-      if (project.endsWith('.git')) {
-        project = project.slice(0, -4)
-      }
-
-      if (parsed.hash) {
-        committish = decodeURIComponent(parsed.hash.slice(1))
-      }
-
-      defaultRepresentation = 'shortcut'
-    } else {
-      if (!gitHostInfo.protocols.includes(parsed.protocol)) {
-        return
-      }
-
-      const segments = gitHostInfo.extract(parsed)
-      if (!segments) {
-        return
-      }
-
-      user = segments.user && decodeURIComponent(segments.user)
-      project = decodeURIComponent(segments.project)
-      committish = decodeURIComponent(segments.committish)
-      defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1)
-    }
-  } catch (err) {
-    /* istanbul ignore else */
-    if (err instanceof URIError) {
-      return
-    } else {
-      throw err
-    }
-  }
-
-  return [gitHostName, user, auth, project, committish, defaultRepresentation, opts]
-}
diff --git a/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/hosts.js b/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/hosts.js
deleted file mode 100644
index 013712b7842c8..0000000000000
--- a/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/hosts.js
+++ /dev/null
@@ -1,228 +0,0 @@
-/* eslint-disable max-len */
-
-'use strict'
-
-const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
-const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
-const formatHashFragment = (f) => f.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-')
-
-const defaults = {
-  sshtemplate: ({ domain, user, project, committish }) =>
-    `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, user, project, committish }) =>
-    `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath, path }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`,
-  browsetemplate: ({ domain, user, project, committish, treepath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  docstemplate: ({ domain, user, project, treepath, committish }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`,
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`,
-  shortcuttemplate: ({ type, user, project, committish }) =>
-    `${type}:${user}/${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ user, project, committish }) =>
-    `${user}/${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, user, project }) =>
-    `https://${domain}/${user}/${project}/issues`,
-  hashformat: formatHashFragment,
-}
-
-const hosts = {}
-hosts.github = {
-  // First two are insecure and generally shouldn't be used any more, but
-  // they are still supported.
-  protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'github.com',
-  treepath: 'tree',
-  blobpath: 'blob',
-  editpath: 'edit',
-  filetemplate: ({ auth, user, project, committish, path }) =>
-    `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`,
-  gittemplate: ({ auth, domain, user, project, committish }) =>
-    `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, type, committish] = url.pathname.split('/', 5)
-    if (type && type !== 'tree') {
-      return
-    }
-
-    if (!type) {
-      committish = url.hash.slice(1)
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish }
-  },
-}
-
-hosts.bitbucket = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'bitbucket.org',
-  treepath: 'src',
-  blobpath: 'src',
-  editpath: '?mode=edit',
-  edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (['get'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gitlab = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gitlab.com',
-  treepath: 'tree',
-  blobpath: 'tree',
-  editpath: '-/edit',
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    const path = url.pathname.slice(1)
-    if (path.includes('/-/') || path.includes('/archive.tar.gz')) {
-      return
-    }
-
-    const segments = path.split('/')
-    let project = segments.pop()
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    const user = segments.join('/')
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gist = {
-  protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gist.github.com',
-  editpath: 'edit',
-  sshtemplate: ({ domain, project, committish }) =>
-    `git@${domain}:${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, project, committish }) =>
-    `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`,
-  browsetemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  browseblobtemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  docstemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  httpstemplate: ({ domain, project, committish }) =>
-    `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ user, project, committish, path }) =>
-    `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`,
-  shortcuttemplate: ({ type, project, committish }) =>
-    `${type}:${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ project, committish }) =>
-    `${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, project }) =>
-    `https://${domain}/${project}`,
-  gittemplate: ({ domain, project, committish }) =>
-    `git://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ project, committish }) =>
-    `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (aux === 'raw') {
-      return
-    }
-
-    if (!project) {
-      if (!user) {
-        return
-      }
-
-      project = user
-      user = null
-    }
-
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-  hashformat: function (fragment) {
-    return fragment && 'file-' + formatHashFragment(fragment)
-  },
-}
-
-hosts.sourcehut = {
-  protocols: ['git+ssh:', 'https:'],
-  domain: 'git.sr.ht',
-  treepath: 'tree',
-  blobpath: 'tree',
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`,
-  httpstemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`,
-  bugstemplate: ({ user, project }) =>
-    `https://todo.sr.ht/${user}/${project}`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-
-    // tarball url
-    if (['archive'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  hosts[name] = Object.assign({}, defaults, host)
-}
-
-module.exports = hosts
diff --git a/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/index.js b/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/index.js
deleted file mode 100644
index a7339c217e9a3..0000000000000
--- a/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/index.js
+++ /dev/null
@@ -1,179 +0,0 @@
-'use strict'
-
-const LRU = require('lru-cache')
-const hosts = require('./hosts.js')
-const fromUrl = require('./from-url.js')
-const parseUrl = require('./parse-url.js')
-
-const cache = new LRU({ max: 1000 })
-
-class GitHost {
-  constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
-    Object.assign(this, GitHost.#gitHosts[type], {
-      type,
-      user,
-      auth,
-      project,
-      committish,
-      default: defaultRepresentation,
-      opts,
-    })
-  }
-
-  static #gitHosts = { byShortcut: {}, byDomain: {} }
-  static #protocols = {
-    'git+ssh:': { name: 'sshurl' },
-    'ssh:': { name: 'sshurl' },
-    'git+https:': { name: 'https', auth: true },
-    'git:': { auth: true },
-    'http:': { auth: true },
-    'https:': { auth: true },
-    'git+http:': { auth: true },
-  }
-
-  static addHost (name, host) {
-    GitHost.#gitHosts[name] = host
-    GitHost.#gitHosts.byDomain[host.domain] = name
-    GitHost.#gitHosts.byShortcut[`${name}:`] = name
-    GitHost.#protocols[`${name}:`] = { name }
-  }
-
-  static fromUrl (giturl, opts) {
-    if (typeof giturl !== 'string') {
-      return
-    }
-
-    const key = giturl + JSON.stringify(opts || {})
-
-    if (!cache.has(key)) {
-      const hostArgs = fromUrl(giturl, opts, {
-        gitHosts: GitHost.#gitHosts,
-        protocols: GitHost.#protocols,
-      })
-      cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined)
-    }
-
-    return cache.get(key)
-  }
-
-  static parseUrl (url) {
-    return parseUrl(url)
-  }
-
-  #fill (template, opts) {
-    if (typeof template !== 'function') {
-      return null
-    }
-
-    const options = { ...this, ...this.opts, ...opts }
-
-    // the path should always be set so we don't end up with 'undefined' in urls
-    if (!options.path) {
-      options.path = ''
-    }
-
-    // template functions will insert the leading slash themselves
-    if (options.path.startsWith('/')) {
-      options.path = options.path.slice(1)
-    }
-
-    if (options.noCommittish) {
-      options.committish = null
-    }
-
-    const result = template(options)
-    return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result
-  }
-
-  hash () {
-    return this.committish ? `#${this.committish}` : ''
-  }
-
-  ssh (opts) {
-    return this.#fill(this.sshtemplate, opts)
-  }
-
-  sshurl (opts) {
-    return this.#fill(this.sshurltemplate, opts)
-  }
-
-  browse (path, ...args) {
-    // not a string, treat path as opts
-    if (typeof path !== 'string') {
-      return this.#fill(this.browsetemplate, path)
-    }
-
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browsetreetemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  // If the path is known to be a file, then browseFile should be used. For some hosts
-  // the url is the same as browse, but for others like GitHub a file can use both `/tree/`
-  // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/`
-  // path will redirect to a specific commit. Using the `/blob/` path avoids this and
-  // does not redirect to a different commit.
-  browseFile (path, ...args) {
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browseblobtemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  docs (opts) {
-    return this.#fill(this.docstemplate, opts)
-  }
-
-  bugs (opts) {
-    return this.#fill(this.bugstemplate, opts)
-  }
-
-  https (opts) {
-    return this.#fill(this.httpstemplate, opts)
-  }
-
-  git (opts) {
-    return this.#fill(this.gittemplate, opts)
-  }
-
-  shortcut (opts) {
-    return this.#fill(this.shortcuttemplate, opts)
-  }
-
-  path (opts) {
-    return this.#fill(this.pathtemplate, opts)
-  }
-
-  tarball (opts) {
-    return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false })
-  }
-
-  file (path, opts) {
-    return this.#fill(this.filetemplate, { ...opts, path })
-  }
-
-  edit (path, opts) {
-    return this.#fill(this.edittemplate, { ...opts, path })
-  }
-
-  getDefaultRepresentation () {
-    return this.default
-  }
-
-  toString (opts) {
-    if (this.default && typeof this[this.default] === 'function') {
-      return this[this.default](opts)
-    }
-
-    return this.sshurl(opts)
-  }
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  GitHost.addHost(name, host)
-}
-
-module.exports = GitHost
diff --git a/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/parse-url.js
deleted file mode 100644
index 7d5489c008ab4..0000000000000
--- a/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/parse-url.js
+++ /dev/null
@@ -1,78 +0,0 @@
-const url = require('url')
-
-const lastIndexOfBefore = (str, char, beforeChar) => {
-  const startPosition = str.indexOf(beforeChar)
-  return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity)
-}
-
-const safeUrl = (u) => {
-  try {
-    return new url.URL(u)
-  } catch {
-    // this fn should never throw
-  }
-}
-
-// accepts input like git:github.com:user/repo and inserts the // after the first :
-const correctProtocol = (arg, protocols) => {
-  const firstColon = arg.indexOf(':')
-  const proto = arg.slice(0, firstColon + 1)
-  if (Object.prototype.hasOwnProperty.call(protocols, proto)) {
-    return arg
-  }
-
-  const firstAt = arg.indexOf('@')
-  if (firstAt > -1) {
-    if (firstAt > firstColon) {
-      return `git+ssh://${arg}`
-    } else {
-      return arg
-    }
-  }
-
-  const doubleSlash = arg.indexOf('//')
-  if (doubleSlash === firstColon + 1) {
-    return arg
-  }
-
-  return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}`
-}
-
-// attempt to correct an scp style url so that it will parse with `new URL()`
-const correctUrl = (giturl) => {
-  // ignore @ that come after the first hash since the denotes the start
-  // of a committish which can contain @ characters
-  const firstAt = lastIndexOfBefore(giturl, '@', '#')
-  // ignore colons that come after the hash since that could include colons such as:
-  // git@github.com:user/package-2#semver:^1.0.0
-  const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#')
-
-  if (lastColonBeforeHash > firstAt) {
-    // the last : comes after the first @ (or there is no @)
-    // like it would in:
-    // proto://hostname.com:user/repo
-    // username@hostname.com:user/repo
-    // :password@hostname.com:user/repo
-    // username:password@hostname.com:user/repo
-    // proto://username@hostname.com:user/repo
-    // proto://:password@hostname.com:user/repo
-    // proto://username:password@hostname.com:user/repo
-    // then we replace the last : with a / to create a valid path
-    giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1)
-  }
-
-  if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) {
-    // we have no : at all
-    // as it would be in:
-    // username@hostname.com/user/repo
-    // then we prepend a protocol
-    giturl = `git+ssh://${giturl}`
-  }
-
-  return giturl
-}
-
-module.exports = (giturl, protocols) => {
-  const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl
-  return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol))
-}
diff --git a/node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/LICENSE b/node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/LICENSE
deleted file mode 100644
index f785757cd63f8..0000000000000
--- a/node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/index.js b/node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/index.js
deleted file mode 100644
index 48e99fe5e5a70..0000000000000
--- a/node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/index.js
+++ /dev/null
@@ -1,1227 +0,0 @@
-const perf =
-  typeof performance === 'object' &&
-  performance &&
-  typeof performance.now === 'function'
-    ? performance
-    : Date
-
-const hasAbortController = typeof AbortController === 'function'
-
-// minimal backwards-compatibility polyfill
-// this doesn't have nearly all the checks and whatnot that
-// actual AbortController/Signal has, but it's enough for
-// our purposes, and if used properly, behaves the same.
-const AC = hasAbortController
-  ? AbortController
-  : class AbortController {
-      constructor() {
-        this.signal = new AS()
-      }
-      abort(reason = new Error('This operation was aborted')) {
-        this.signal.reason = this.signal.reason || reason
-        this.signal.aborted = true
-        this.signal.dispatchEvent({
-          type: 'abort',
-          target: this.signal,
-        })
-      }
-    }
-
-const hasAbortSignal = typeof AbortSignal === 'function'
-// Some polyfills put this on the AC class, not global
-const hasACAbortSignal = typeof AC.AbortSignal === 'function'
-const AS = hasAbortSignal
-  ? AbortSignal
-  : hasACAbortSignal
-  ? AC.AbortController
-  : class AbortSignal {
-      constructor() {
-        this.reason = undefined
-        this.aborted = false
-        this._listeners = []
-      }
-      dispatchEvent(e) {
-        if (e.type === 'abort') {
-          this.aborted = true
-          this.onabort(e)
-          this._listeners.forEach(f => f(e), this)
-        }
-      }
-      onabort() {}
-      addEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners.push(fn)
-        }
-      }
-      removeEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners = this._listeners.filter(f => f !== fn)
-        }
-      }
-    }
-
-const warned = new Set()
-const deprecatedOption = (opt, instead) => {
-  const code = `LRU_CACHE_OPTION_${opt}`
-  if (shouldWarn(code)) {
-    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
-  }
-}
-const deprecatedMethod = (method, instead) => {
-  const code = `LRU_CACHE_METHOD_${method}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
-    warn(code, `${method} method`, `cache.${instead}()`, get)
-  }
-}
-const deprecatedProperty = (field, instead) => {
-  const code = `LRU_CACHE_PROPERTY_${field}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
-    warn(code, `${field} property`, `cache.${instead}`, get)
-  }
-}
-
-const emitWarning = (...a) => {
-  typeof process === 'object' &&
-  process &&
-  typeof process.emitWarning === 'function'
-    ? process.emitWarning(...a)
-    : console.error(...a)
-}
-
-const shouldWarn = code => !warned.has(code)
-
-const warn = (code, what, instead, fn) => {
-  warned.add(code)
-  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
-  emitWarning(msg, 'DeprecationWarning', code, fn)
-}
-
-const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
-
-/* istanbul ignore next - This is a little bit ridiculous, tbh.
- * The maximum array length is 2^32-1 or thereabouts on most JS impls.
- * And well before that point, you're caching the entire world, I mean,
- * that's ~32GB of just integers for the next/prev links, plus whatever
- * else to hold that many keys and values.  Just filling the memory with
- * zeroes at init time is brutal when you get that big.
- * But why not be complete?
- * Maybe in the future, these limits will have expanded. */
-const getUintArray = max =>
-  !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-    ? Uint8Array
-    : max <= Math.pow(2, 16)
-    ? Uint16Array
-    : max <= Math.pow(2, 32)
-    ? Uint32Array
-    : max <= Number.MAX_SAFE_INTEGER
-    ? ZeroArray
-    : null
-
-class ZeroArray extends Array {
-  constructor(size) {
-    super(size)
-    this.fill(0)
-  }
-}
-
-class Stack {
-  constructor(max) {
-    if (max === 0) {
-      return []
-    }
-    const UintArray = getUintArray(max)
-    this.heap = new UintArray(max)
-    this.length = 0
-  }
-  push(n) {
-    this.heap[this.length++] = n
-  }
-  pop() {
-    return this.heap[--this.length]
-  }
-}
-
-class LRUCache {
-  constructor(options = {}) {
-    const {
-      max = 0,
-      ttl,
-      ttlResolution = 1,
-      ttlAutopurge,
-      updateAgeOnGet,
-      updateAgeOnHas,
-      allowStale,
-      dispose,
-      disposeAfter,
-      noDisposeOnSet,
-      noUpdateTTL,
-      maxSize = 0,
-      maxEntrySize = 0,
-      sizeCalculation,
-      fetchMethod,
-      fetchContext,
-      noDeleteOnFetchRejection,
-      noDeleteOnStaleGet,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-    } = options
-
-    // deprecated options, don't trigger a warning for getting them if
-    // the thing being passed in is another LRUCache we're copying.
-    const { length, maxAge, stale } =
-      options instanceof LRUCache ? {} : options
-
-    if (max !== 0 && !isPosInt(max)) {
-      throw new TypeError('max option must be a nonnegative integer')
-    }
-
-    const UintArray = max ? getUintArray(max) : Array
-    if (!UintArray) {
-      throw new Error('invalid max value: ' + max)
-    }
-
-    this.max = max
-    this.maxSize = maxSize
-    this.maxEntrySize = maxEntrySize || this.maxSize
-    this.sizeCalculation = sizeCalculation || length
-    if (this.sizeCalculation) {
-      if (!this.maxSize && !this.maxEntrySize) {
-        throw new TypeError(
-          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
-        )
-      }
-      if (typeof this.sizeCalculation !== 'function') {
-        throw new TypeError('sizeCalculation set to non-function')
-      }
-    }
-
-    this.fetchMethod = fetchMethod || null
-    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
-      throw new TypeError(
-        'fetchMethod must be a function if specified'
-      )
-    }
-
-    this.fetchContext = fetchContext
-    if (!this.fetchMethod && fetchContext !== undefined) {
-      throw new TypeError(
-        'cannot set fetchContext without fetchMethod'
-      )
-    }
-
-    this.keyMap = new Map()
-    this.keyList = new Array(max).fill(null)
-    this.valList = new Array(max).fill(null)
-    this.next = new UintArray(max)
-    this.prev = new UintArray(max)
-    this.head = 0
-    this.tail = 0
-    this.free = new Stack(max)
-    this.initialFill = 1
-    this.size = 0
-
-    if (typeof dispose === 'function') {
-      this.dispose = dispose
-    }
-    if (typeof disposeAfter === 'function') {
-      this.disposeAfter = disposeAfter
-      this.disposed = []
-    } else {
-      this.disposeAfter = null
-      this.disposed = null
-    }
-    this.noDisposeOnSet = !!noDisposeOnSet
-    this.noUpdateTTL = !!noUpdateTTL
-    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
-    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
-    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
-    this.ignoreFetchAbort = !!ignoreFetchAbort
-
-    // NB: maxEntrySize is set to maxSize if it's set
-    if (this.maxEntrySize !== 0) {
-      if (this.maxSize !== 0) {
-        if (!isPosInt(this.maxSize)) {
-          throw new TypeError(
-            'maxSize must be a positive integer if specified'
-          )
-        }
-      }
-      if (!isPosInt(this.maxEntrySize)) {
-        throw new TypeError(
-          'maxEntrySize must be a positive integer if specified'
-        )
-      }
-      this.initializeSizeTracking()
-    }
-
-    this.allowStale = !!allowStale || !!stale
-    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
-    this.updateAgeOnGet = !!updateAgeOnGet
-    this.updateAgeOnHas = !!updateAgeOnHas
-    this.ttlResolution =
-      isPosInt(ttlResolution) || ttlResolution === 0
-        ? ttlResolution
-        : 1
-    this.ttlAutopurge = !!ttlAutopurge
-    this.ttl = ttl || maxAge || 0
-    if (this.ttl) {
-      if (!isPosInt(this.ttl)) {
-        throw new TypeError(
-          'ttl must be a positive integer if specified'
-        )
-      }
-      this.initializeTTLTracking()
-    }
-
-    // do not allow completely unbounded caches
-    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
-      throw new TypeError(
-        'At least one of max, maxSize, or ttl is required'
-      )
-    }
-    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
-      const code = 'LRU_CACHE_UNBOUNDED'
-      if (shouldWarn(code)) {
-        warned.add(code)
-        const msg =
-          'TTL caching without ttlAutopurge, max, or maxSize can ' +
-          'result in unbounded memory consumption.'
-        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
-      }
-    }
-
-    if (stale) {
-      deprecatedOption('stale', 'allowStale')
-    }
-    if (maxAge) {
-      deprecatedOption('maxAge', 'ttl')
-    }
-    if (length) {
-      deprecatedOption('length', 'sizeCalculation')
-    }
-  }
-
-  getRemainingTTL(key) {
-    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
-  }
-
-  initializeTTLTracking() {
-    this.ttls = new ZeroArray(this.max)
-    this.starts = new ZeroArray(this.max)
-
-    this.setItemTTL = (index, ttl, start = perf.now()) => {
-      this.starts[index] = ttl !== 0 ? start : 0
-      this.ttls[index] = ttl
-      if (ttl !== 0 && this.ttlAutopurge) {
-        const t = setTimeout(() => {
-          if (this.isStale(index)) {
-            this.delete(this.keyList[index])
-          }
-        }, ttl + 1)
-        /* istanbul ignore else - unref() not supported on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-    }
-
-    this.updateItemAge = index => {
-      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
-    }
-
-    this.statusTTL = (status, index) => {
-      if (status) {
-        status.ttl = this.ttls[index]
-        status.start = this.starts[index]
-        status.now = cachedNow || getNow()
-        status.remainingTTL = status.now + status.ttl - status.start
-      }
-    }
-
-    // debounce calls to perf.now() to 1s so we're not hitting
-    // that costly call repeatedly.
-    let cachedNow = 0
-    const getNow = () => {
-      const n = perf.now()
-      if (this.ttlResolution > 0) {
-        cachedNow = n
-        const t = setTimeout(
-          () => (cachedNow = 0),
-          this.ttlResolution
-        )
-        /* istanbul ignore else - not available on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-      return n
-    }
-
-    this.getRemainingTTL = key => {
-      const index = this.keyMap.get(key)
-      if (index === undefined) {
-        return 0
-      }
-      return this.ttls[index] === 0 || this.starts[index] === 0
-        ? Infinity
-        : this.starts[index] +
-            this.ttls[index] -
-            (cachedNow || getNow())
-    }
-
-    this.isStale = index => {
-      return (
-        this.ttls[index] !== 0 &&
-        this.starts[index] !== 0 &&
-        (cachedNow || getNow()) - this.starts[index] >
-          this.ttls[index]
-      )
-    }
-  }
-  updateItemAge(_index) {}
-  statusTTL(_status, _index) {}
-  setItemTTL(_index, _ttl, _start) {}
-  isStale(_index) {
-    return false
-  }
-
-  initializeSizeTracking() {
-    this.calculatedSize = 0
-    this.sizes = new ZeroArray(this.max)
-    this.removeItemSize = index => {
-      this.calculatedSize -= this.sizes[index]
-      this.sizes[index] = 0
-    }
-    this.requireSize = (k, v, size, sizeCalculation) => {
-      // provisionally accept background fetches.
-      // actual value size will be checked when they return.
-      if (this.isBackgroundFetch(v)) {
-        return 0
-      }
-      if (!isPosInt(size)) {
-        if (sizeCalculation) {
-          if (typeof sizeCalculation !== 'function') {
-            throw new TypeError('sizeCalculation must be a function')
-          }
-          size = sizeCalculation(v, k)
-          if (!isPosInt(size)) {
-            throw new TypeError(
-              'sizeCalculation return invalid (expect positive integer)'
-            )
-          }
-        } else {
-          throw new TypeError(
-            'invalid size value (must be positive integer). ' +
-              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
-              'must be set.'
-          )
-        }
-      }
-      return size
-    }
-    this.addItemSize = (index, size, status) => {
-      this.sizes[index] = size
-      if (this.maxSize) {
-        const maxSize = this.maxSize - this.sizes[index]
-        while (this.calculatedSize > maxSize) {
-          this.evict(true)
-        }
-      }
-      this.calculatedSize += this.sizes[index]
-      if (status) {
-        status.entrySize = size
-        status.totalCalculatedSize = this.calculatedSize
-      }
-    }
-  }
-  removeItemSize(_index) {}
-  addItemSize(_index, _size) {}
-  requireSize(_k, _v, size, sizeCalculation) {
-    if (size || sizeCalculation) {
-      throw new TypeError(
-        'cannot set size without setting maxSize or maxEntrySize on cache'
-      )
-    }
-  }
-
-  *indexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.tail; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.head) {
-          break
-        } else {
-          i = this.prev[i]
-        }
-      }
-    }
-  }
-
-  *rindexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.head; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.tail) {
-          break
-        } else {
-          i = this.next[i]
-        }
-      }
-    }
-  }
-
-  isValidIndex(index) {
-    return (
-      index !== undefined &&
-      this.keyMap.get(this.keyList[index]) === index
-    )
-  }
-
-  *entries() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-  *rentries() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-
-  *keys() {
-    for (const i of this.indexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-  *rkeys() {
-    for (const i of this.rindexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-
-  *values() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-  *rvalues() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-
-  [Symbol.iterator]() {
-    return this.entries()
-  }
-
-  find(fn, getOptions) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      if (fn(value, this.keyList[i], this)) {
-        return this.get(this.keyList[i], getOptions)
-      }
-    }
-  }
-
-  forEach(fn, thisp = this) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  rforEach(fn, thisp = this) {
-    for (const i of this.rindexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  get prune() {
-    deprecatedMethod('prune', 'purgeStale')
-    return this.purgeStale
-  }
-
-  purgeStale() {
-    let deleted = false
-    for (const i of this.rindexes({ allowStale: true })) {
-      if (this.isStale(i)) {
-        this.delete(this.keyList[i])
-        deleted = true
-      }
-    }
-    return deleted
-  }
-
-  dump() {
-    const arr = []
-    for (const i of this.indexes({ allowStale: true })) {
-      const key = this.keyList[i]
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      const entry = { value }
-      if (this.ttls) {
-        entry.ttl = this.ttls[i]
-        // always dump the start relative to a portable timestamp
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = perf.now() - this.starts[i]
-        entry.start = Math.floor(Date.now() - age)
-      }
-      if (this.sizes) {
-        entry.size = this.sizes[i]
-      }
-      arr.unshift([key, entry])
-    }
-    return arr
-  }
-
-  load(arr) {
-    this.clear()
-    for (const [key, entry] of arr) {
-      if (entry.start) {
-        // entry.start is a portable timestamp, but we may be using
-        // node's performance.now(), so calculate the offset.
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = Date.now() - entry.start
-        entry.start = perf.now() - age
-      }
-      this.set(key, entry.value, entry)
-    }
-  }
-
-  dispose(_v, _k, _reason) {}
-
-  set(
-    k,
-    v,
-    {
-      ttl = this.ttl,
-      start,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      status,
-    } = {}
-  ) {
-    size = this.requireSize(k, v, size, sizeCalculation)
-    // if the item doesn't fit, don't do anything
-    // NB: maxEntrySize set to maxSize by default
-    if (this.maxEntrySize && size > this.maxEntrySize) {
-      if (status) {
-        status.set = 'miss'
-        status.maxEntrySizeExceeded = true
-      }
-      // have to delete, in case a background fetch is there already.
-      // in non-async cases, this is a no-op
-      this.delete(k)
-      return this
-    }
-    let index = this.size === 0 ? undefined : this.keyMap.get(k)
-    if (index === undefined) {
-      // addition
-      index = this.newIndex()
-      this.keyList[index] = k
-      this.valList[index] = v
-      this.keyMap.set(k, index)
-      this.next[this.tail] = index
-      this.prev[index] = this.tail
-      this.tail = index
-      this.size++
-      this.addItemSize(index, size, status)
-      if (status) {
-        status.set = 'add'
-      }
-      noUpdateTTL = false
-    } else {
-      // update
-      this.moveToTail(index)
-      const oldVal = this.valList[index]
-      if (v !== oldVal) {
-        if (this.isBackgroundFetch(oldVal)) {
-          oldVal.__abortController.abort(new Error('replaced'))
-        } else {
-          if (!noDisposeOnSet) {
-            this.dispose(oldVal, k, 'set')
-            if (this.disposeAfter) {
-              this.disposed.push([oldVal, k, 'set'])
-            }
-          }
-        }
-        this.removeItemSize(index)
-        this.valList[index] = v
-        this.addItemSize(index, size, status)
-        if (status) {
-          status.set = 'replace'
-          const oldValue =
-            oldVal && this.isBackgroundFetch(oldVal)
-              ? oldVal.__staleWhileFetching
-              : oldVal
-          if (oldValue !== undefined) status.oldValue = oldValue
-        }
-      } else if (status) {
-        status.set = 'update'
-      }
-    }
-    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
-      this.initializeTTLTracking()
-    }
-    if (!noUpdateTTL) {
-      this.setItemTTL(index, ttl, start)
-    }
-    this.statusTTL(status, index)
-    if (this.disposeAfter) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return this
-  }
-
-  newIndex() {
-    if (this.size === 0) {
-      return this.tail
-    }
-    if (this.size === this.max && this.max !== 0) {
-      return this.evict(false)
-    }
-    if (this.free.length !== 0) {
-      return this.free.pop()
-    }
-    // initial fill, just keep writing down the list
-    return this.initialFill++
-  }
-
-  pop() {
-    if (this.size) {
-      const val = this.valList[this.head]
-      this.evict(true)
-      return val
-    }
-  }
-
-  evict(free) {
-    const head = this.head
-    const k = this.keyList[head]
-    const v = this.valList[head]
-    if (this.isBackgroundFetch(v)) {
-      v.__abortController.abort(new Error('evicted'))
-    } else {
-      this.dispose(v, k, 'evict')
-      if (this.disposeAfter) {
-        this.disposed.push([v, k, 'evict'])
-      }
-    }
-    this.removeItemSize(head)
-    // if we aren't about to use the index, then null these out
-    if (free) {
-      this.keyList[head] = null
-      this.valList[head] = null
-      this.free.push(head)
-    }
-    this.head = this.next[head]
-    this.keyMap.delete(k)
-    this.size--
-    return head
-  }
-
-  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      if (!this.isStale(index)) {
-        if (updateAgeOnHas) {
-          this.updateItemAge(index)
-        }
-        if (status) status.has = 'hit'
-        this.statusTTL(status, index)
-        return true
-      } else if (status) {
-        status.has = 'stale'
-        this.statusTTL(status, index)
-      }
-    } else if (status) {
-      status.has = 'miss'
-    }
-    return false
-  }
-
-  // like get(), but without any LRU updating or TTL expiration
-  peek(k, { allowStale = this.allowStale } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined && (allowStale || !this.isStale(index))) {
-      const v = this.valList[index]
-      // either stale and allowed, or forcing a refresh of non-stale value
-      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
-    }
-  }
-
-  backgroundFetch(k, index, options, context) {
-    const v = index === undefined ? undefined : this.valList[index]
-    if (this.isBackgroundFetch(v)) {
-      return v
-    }
-    const ac = new AC()
-    if (options.signal) {
-      options.signal.addEventListener('abort', () =>
-        ac.abort(options.signal.reason)
-      )
-    }
-    const fetchOpts = {
-      signal: ac.signal,
-      options,
-      context,
-    }
-    const cb = (v, updateCache = false) => {
-      const { aborted } = ac.signal
-      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
-      if (options.status) {
-        if (aborted && !updateCache) {
-          options.status.fetchAborted = true
-          options.status.fetchError = ac.signal.reason
-          if (ignoreAbort) options.status.fetchAbortIgnored = true
-        } else {
-          options.status.fetchResolved = true
-        }
-      }
-      if (aborted && !ignoreAbort && !updateCache) {
-        return fetchFail(ac.signal.reason)
-      }
-      // either we didn't abort, and are still here, or we did, and ignored
-      if (this.valList[index] === p) {
-        if (v === undefined) {
-          if (p.__staleWhileFetching) {
-            this.valList[index] = p.__staleWhileFetching
-          } else {
-            this.delete(k)
-          }
-        } else {
-          if (options.status) options.status.fetchUpdated = true
-          this.set(k, v, fetchOpts.options)
-        }
-      }
-      return v
-    }
-    const eb = er => {
-      if (options.status) {
-        options.status.fetchRejected = true
-        options.status.fetchError = er
-      }
-      return fetchFail(er)
-    }
-    const fetchFail = er => {
-      const { aborted } = ac.signal
-      const allowStaleAborted =
-        aborted && options.allowStaleOnFetchAbort
-      const allowStale =
-        allowStaleAborted || options.allowStaleOnFetchRejection
-      const noDelete = allowStale || options.noDeleteOnFetchRejection
-      if (this.valList[index] === p) {
-        // if we allow stale on fetch rejections, then we need to ensure that
-        // the stale value is not removed from the cache when the fetch fails.
-        const del = !noDelete || p.__staleWhileFetching === undefined
-        if (del) {
-          this.delete(k)
-        } else if (!allowStaleAborted) {
-          // still replace the *promise* with the stale value,
-          // since we are done with the promise at this point.
-          // leave it untouched if we're still waiting for an
-          // aborted background fetch that hasn't yet returned.
-          this.valList[index] = p.__staleWhileFetching
-        }
-      }
-      if (allowStale) {
-        if (options.status && p.__staleWhileFetching !== undefined) {
-          options.status.returnedStale = true
-        }
-        return p.__staleWhileFetching
-      } else if (p.__returned === p) {
-        throw er
-      }
-    }
-    const pcall = (res, rej) => {
-      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
-      // ignored, we go until we finish, regardless.
-      // defer check until we are actually aborting,
-      // so fetchMethod can override.
-      ac.signal.addEventListener('abort', () => {
-        if (
-          !options.ignoreFetchAbort ||
-          options.allowStaleOnFetchAbort
-        ) {
-          res()
-          // when it eventually resolves, update the cache.
-          if (options.allowStaleOnFetchAbort) {
-            res = v => cb(v, true)
-          }
-        }
-      })
-    }
-    if (options.status) options.status.fetchDispatched = true
-    const p = new Promise(pcall).then(cb, eb)
-    p.__abortController = ac
-    p.__staleWhileFetching = v
-    p.__returned = null
-    if (index === undefined) {
-      // internal, don't expose status.
-      this.set(k, p, { ...fetchOpts.options, status: undefined })
-      index = this.keyMap.get(k)
-    } else {
-      this.valList[index] = p
-    }
-    return p
-  }
-
-  isBackgroundFetch(p) {
-    return (
-      p &&
-      typeof p === 'object' &&
-      typeof p.then === 'function' &&
-      Object.prototype.hasOwnProperty.call(
-        p,
-        '__staleWhileFetching'
-      ) &&
-      Object.prototype.hasOwnProperty.call(p, '__returned') &&
-      (p.__returned === p || p.__returned === null)
-    )
-  }
-
-  // this takes the union of get() and set() opts, because it does both
-  async fetch(
-    k,
-    {
-      // get options
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      // set options
-      ttl = this.ttl,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      // fetch exclusive options
-      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
-      ignoreFetchAbort = this.ignoreFetchAbort,
-      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
-      fetchContext = this.fetchContext,
-      forceRefresh = false,
-      status,
-      signal,
-    } = {}
-  ) {
-    if (!this.fetchMethod) {
-      if (status) status.fetch = 'get'
-      return this.get(k, {
-        allowStale,
-        updateAgeOnGet,
-        noDeleteOnStaleGet,
-        status,
-      })
-    }
-
-    const options = {
-      allowStale,
-      updateAgeOnGet,
-      noDeleteOnStaleGet,
-      ttl,
-      noDisposeOnSet,
-      size,
-      sizeCalculation,
-      noUpdateTTL,
-      noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-      status,
-      signal,
-    }
-
-    let index = this.keyMap.get(k)
-    if (index === undefined) {
-      if (status) status.fetch = 'miss'
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      return (p.__returned = p)
-    } else {
-      // in cache, maybe already fetching
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        const stale =
-          allowStale && v.__staleWhileFetching !== undefined
-        if (status) {
-          status.fetch = 'inflight'
-          if (stale) status.returnedStale = true
-        }
-        return stale ? v.__staleWhileFetching : (v.__returned = v)
-      }
-
-      // if we force a refresh, that means do NOT serve the cached value,
-      // unless we are already in the process of refreshing the cache.
-      const isStale = this.isStale(index)
-      if (!forceRefresh && !isStale) {
-        if (status) status.fetch = 'hit'
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        this.statusTTL(status, index)
-        return v
-      }
-
-      // ok, it is stale or a forced refresh, and not already fetching.
-      // refresh the cache.
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      const hasStale = p.__staleWhileFetching !== undefined
-      const staleVal = hasStale && allowStale
-      if (status) {
-        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
-        if (staleVal && isStale) status.returnedStale = true
-      }
-      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
-    }
-  }
-
-  get(
-    k,
-    {
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      status,
-    } = {}
-  ) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      const value = this.valList[index]
-      const fetching = this.isBackgroundFetch(value)
-      this.statusTTL(status, index)
-      if (this.isStale(index)) {
-        if (status) status.get = 'stale'
-        // delete only if not an in-flight background fetch
-        if (!fetching) {
-          if (!noDeleteOnStaleGet) {
-            this.delete(k)
-          }
-          if (status) status.returnedStale = allowStale
-          return allowStale ? value : undefined
-        } else {
-          if (status) {
-            status.returnedStale =
-              allowStale && value.__staleWhileFetching !== undefined
-          }
-          return allowStale ? value.__staleWhileFetching : undefined
-        }
-      } else {
-        if (status) status.get = 'hit'
-        // if we're currently fetching it, we don't actually have it yet
-        // it's not stale, which means this isn't a staleWhileRefetching.
-        // If it's not stale, and fetching, AND has a __staleWhileFetching
-        // value, then that means the user fetched with {forceRefresh:true},
-        // so it's safe to return that value.
-        if (fetching) {
-          return value.__staleWhileFetching
-        }
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        return value
-      }
-    } else if (status) {
-      status.get = 'miss'
-    }
-  }
-
-  connect(p, n) {
-    this.prev[n] = p
-    this.next[p] = n
-  }
-
-  moveToTail(index) {
-    // if tail already, nothing to do
-    // if head, move head to next[index]
-    // else
-    //   move next[prev[index]] to next[index] (head has no prev)
-    //   move prev[next[index]] to prev[index]
-    // prev[index] = tail
-    // next[tail] = index
-    // tail = index
-    if (index !== this.tail) {
-      if (index === this.head) {
-        this.head = this.next[index]
-      } else {
-        this.connect(this.prev[index], this.next[index])
-      }
-      this.connect(this.tail, index)
-      this.tail = index
-    }
-  }
-
-  get del() {
-    deprecatedMethod('del', 'delete')
-    return this.delete
-  }
-
-  delete(k) {
-    let deleted = false
-    if (this.size !== 0) {
-      const index = this.keyMap.get(k)
-      if (index !== undefined) {
-        deleted = true
-        if (this.size === 1) {
-          this.clear()
-        } else {
-          this.removeItemSize(index)
-          const v = this.valList[index]
-          if (this.isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('deleted'))
-          } else {
-            this.dispose(v, k, 'delete')
-            if (this.disposeAfter) {
-              this.disposed.push([v, k, 'delete'])
-            }
-          }
-          this.keyMap.delete(k)
-          this.keyList[index] = null
-          this.valList[index] = null
-          if (index === this.tail) {
-            this.tail = this.prev[index]
-          } else if (index === this.head) {
-            this.head = this.next[index]
-          } else {
-            this.next[this.prev[index]] = this.next[index]
-            this.prev[this.next[index]] = this.prev[index]
-          }
-          this.size--
-          this.free.push(index)
-        }
-      }
-    }
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return deleted
-  }
-
-  clear() {
-    for (const index of this.rindexes({ allowStale: true })) {
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        v.__abortController.abort(new Error('deleted'))
-      } else {
-        const k = this.keyList[index]
-        this.dispose(v, k, 'delete')
-        if (this.disposeAfter) {
-          this.disposed.push([v, k, 'delete'])
-        }
-      }
-    }
-
-    this.keyMap.clear()
-    this.valList.fill(null)
-    this.keyList.fill(null)
-    if (this.ttls) {
-      this.ttls.fill(0)
-      this.starts.fill(0)
-    }
-    if (this.sizes) {
-      this.sizes.fill(0)
-    }
-    this.head = 0
-    this.tail = 0
-    this.initialFill = 1
-    this.free.length = 0
-    this.calculatedSize = 0
-    this.size = 0
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-  }
-
-  get reset() {
-    deprecatedMethod('reset', 'clear')
-    return this.clear
-  }
-
-  get length() {
-    deprecatedProperty('length', 'size')
-    return this.size
-  }
-
-  static get AbortController() {
-    return AC
-  }
-  static get AbortSignal() {
-    return AS
-  }
-}
-
-module.exports = LRUCache
diff --git a/node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/index.mjs b/node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/index.mjs
deleted file mode 100644
index 4a0b4813ec515..0000000000000
--- a/node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/index.mjs
+++ /dev/null
@@ -1,1227 +0,0 @@
-const perf =
-  typeof performance === 'object' &&
-  performance &&
-  typeof performance.now === 'function'
-    ? performance
-    : Date
-
-const hasAbortController = typeof AbortController === 'function'
-
-// minimal backwards-compatibility polyfill
-// this doesn't have nearly all the checks and whatnot that
-// actual AbortController/Signal has, but it's enough for
-// our purposes, and if used properly, behaves the same.
-const AC = hasAbortController
-  ? AbortController
-  : class AbortController {
-      constructor() {
-        this.signal = new AS()
-      }
-      abort(reason = new Error('This operation was aborted')) {
-        this.signal.reason = this.signal.reason || reason
-        this.signal.aborted = true
-        this.signal.dispatchEvent({
-          type: 'abort',
-          target: this.signal,
-        })
-      }
-    }
-
-const hasAbortSignal = typeof AbortSignal === 'function'
-// Some polyfills put this on the AC class, not global
-const hasACAbortSignal = typeof AC.AbortSignal === 'function'
-const AS = hasAbortSignal
-  ? AbortSignal
-  : hasACAbortSignal
-  ? AC.AbortController
-  : class AbortSignal {
-      constructor() {
-        this.reason = undefined
-        this.aborted = false
-        this._listeners = []
-      }
-      dispatchEvent(e) {
-        if (e.type === 'abort') {
-          this.aborted = true
-          this.onabort(e)
-          this._listeners.forEach(f => f(e), this)
-        }
-      }
-      onabort() {}
-      addEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners.push(fn)
-        }
-      }
-      removeEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners = this._listeners.filter(f => f !== fn)
-        }
-      }
-    }
-
-const warned = new Set()
-const deprecatedOption = (opt, instead) => {
-  const code = `LRU_CACHE_OPTION_${opt}`
-  if (shouldWarn(code)) {
-    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
-  }
-}
-const deprecatedMethod = (method, instead) => {
-  const code = `LRU_CACHE_METHOD_${method}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
-    warn(code, `${method} method`, `cache.${instead}()`, get)
-  }
-}
-const deprecatedProperty = (field, instead) => {
-  const code = `LRU_CACHE_PROPERTY_${field}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
-    warn(code, `${field} property`, `cache.${instead}`, get)
-  }
-}
-
-const emitWarning = (...a) => {
-  typeof process === 'object' &&
-  process &&
-  typeof process.emitWarning === 'function'
-    ? process.emitWarning(...a)
-    : console.error(...a)
-}
-
-const shouldWarn = code => !warned.has(code)
-
-const warn = (code, what, instead, fn) => {
-  warned.add(code)
-  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
-  emitWarning(msg, 'DeprecationWarning', code, fn)
-}
-
-const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
-
-/* istanbul ignore next - This is a little bit ridiculous, tbh.
- * The maximum array length is 2^32-1 or thereabouts on most JS impls.
- * And well before that point, you're caching the entire world, I mean,
- * that's ~32GB of just integers for the next/prev links, plus whatever
- * else to hold that many keys and values.  Just filling the memory with
- * zeroes at init time is brutal when you get that big.
- * But why not be complete?
- * Maybe in the future, these limits will have expanded. */
-const getUintArray = max =>
-  !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-    ? Uint8Array
-    : max <= Math.pow(2, 16)
-    ? Uint16Array
-    : max <= Math.pow(2, 32)
-    ? Uint32Array
-    : max <= Number.MAX_SAFE_INTEGER
-    ? ZeroArray
-    : null
-
-class ZeroArray extends Array {
-  constructor(size) {
-    super(size)
-    this.fill(0)
-  }
-}
-
-class Stack {
-  constructor(max) {
-    if (max === 0) {
-      return []
-    }
-    const UintArray = getUintArray(max)
-    this.heap = new UintArray(max)
-    this.length = 0
-  }
-  push(n) {
-    this.heap[this.length++] = n
-  }
-  pop() {
-    return this.heap[--this.length]
-  }
-}
-
-class LRUCache {
-  constructor(options = {}) {
-    const {
-      max = 0,
-      ttl,
-      ttlResolution = 1,
-      ttlAutopurge,
-      updateAgeOnGet,
-      updateAgeOnHas,
-      allowStale,
-      dispose,
-      disposeAfter,
-      noDisposeOnSet,
-      noUpdateTTL,
-      maxSize = 0,
-      maxEntrySize = 0,
-      sizeCalculation,
-      fetchMethod,
-      fetchContext,
-      noDeleteOnFetchRejection,
-      noDeleteOnStaleGet,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-    } = options
-
-    // deprecated options, don't trigger a warning for getting them if
-    // the thing being passed in is another LRUCache we're copying.
-    const { length, maxAge, stale } =
-      options instanceof LRUCache ? {} : options
-
-    if (max !== 0 && !isPosInt(max)) {
-      throw new TypeError('max option must be a nonnegative integer')
-    }
-
-    const UintArray = max ? getUintArray(max) : Array
-    if (!UintArray) {
-      throw new Error('invalid max value: ' + max)
-    }
-
-    this.max = max
-    this.maxSize = maxSize
-    this.maxEntrySize = maxEntrySize || this.maxSize
-    this.sizeCalculation = sizeCalculation || length
-    if (this.sizeCalculation) {
-      if (!this.maxSize && !this.maxEntrySize) {
-        throw new TypeError(
-          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
-        )
-      }
-      if (typeof this.sizeCalculation !== 'function') {
-        throw new TypeError('sizeCalculation set to non-function')
-      }
-    }
-
-    this.fetchMethod = fetchMethod || null
-    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
-      throw new TypeError(
-        'fetchMethod must be a function if specified'
-      )
-    }
-
-    this.fetchContext = fetchContext
-    if (!this.fetchMethod && fetchContext !== undefined) {
-      throw new TypeError(
-        'cannot set fetchContext without fetchMethod'
-      )
-    }
-
-    this.keyMap = new Map()
-    this.keyList = new Array(max).fill(null)
-    this.valList = new Array(max).fill(null)
-    this.next = new UintArray(max)
-    this.prev = new UintArray(max)
-    this.head = 0
-    this.tail = 0
-    this.free = new Stack(max)
-    this.initialFill = 1
-    this.size = 0
-
-    if (typeof dispose === 'function') {
-      this.dispose = dispose
-    }
-    if (typeof disposeAfter === 'function') {
-      this.disposeAfter = disposeAfter
-      this.disposed = []
-    } else {
-      this.disposeAfter = null
-      this.disposed = null
-    }
-    this.noDisposeOnSet = !!noDisposeOnSet
-    this.noUpdateTTL = !!noUpdateTTL
-    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
-    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
-    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
-    this.ignoreFetchAbort = !!ignoreFetchAbort
-
-    // NB: maxEntrySize is set to maxSize if it's set
-    if (this.maxEntrySize !== 0) {
-      if (this.maxSize !== 0) {
-        if (!isPosInt(this.maxSize)) {
-          throw new TypeError(
-            'maxSize must be a positive integer if specified'
-          )
-        }
-      }
-      if (!isPosInt(this.maxEntrySize)) {
-        throw new TypeError(
-          'maxEntrySize must be a positive integer if specified'
-        )
-      }
-      this.initializeSizeTracking()
-    }
-
-    this.allowStale = !!allowStale || !!stale
-    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
-    this.updateAgeOnGet = !!updateAgeOnGet
-    this.updateAgeOnHas = !!updateAgeOnHas
-    this.ttlResolution =
-      isPosInt(ttlResolution) || ttlResolution === 0
-        ? ttlResolution
-        : 1
-    this.ttlAutopurge = !!ttlAutopurge
-    this.ttl = ttl || maxAge || 0
-    if (this.ttl) {
-      if (!isPosInt(this.ttl)) {
-        throw new TypeError(
-          'ttl must be a positive integer if specified'
-        )
-      }
-      this.initializeTTLTracking()
-    }
-
-    // do not allow completely unbounded caches
-    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
-      throw new TypeError(
-        'At least one of max, maxSize, or ttl is required'
-      )
-    }
-    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
-      const code = 'LRU_CACHE_UNBOUNDED'
-      if (shouldWarn(code)) {
-        warned.add(code)
-        const msg =
-          'TTL caching without ttlAutopurge, max, or maxSize can ' +
-          'result in unbounded memory consumption.'
-        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
-      }
-    }
-
-    if (stale) {
-      deprecatedOption('stale', 'allowStale')
-    }
-    if (maxAge) {
-      deprecatedOption('maxAge', 'ttl')
-    }
-    if (length) {
-      deprecatedOption('length', 'sizeCalculation')
-    }
-  }
-
-  getRemainingTTL(key) {
-    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
-  }
-
-  initializeTTLTracking() {
-    this.ttls = new ZeroArray(this.max)
-    this.starts = new ZeroArray(this.max)
-
-    this.setItemTTL = (index, ttl, start = perf.now()) => {
-      this.starts[index] = ttl !== 0 ? start : 0
-      this.ttls[index] = ttl
-      if (ttl !== 0 && this.ttlAutopurge) {
-        const t = setTimeout(() => {
-          if (this.isStale(index)) {
-            this.delete(this.keyList[index])
-          }
-        }, ttl + 1)
-        /* istanbul ignore else - unref() not supported on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-    }
-
-    this.updateItemAge = index => {
-      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
-    }
-
-    this.statusTTL = (status, index) => {
-      if (status) {
-        status.ttl = this.ttls[index]
-        status.start = this.starts[index]
-        status.now = cachedNow || getNow()
-        status.remainingTTL = status.now + status.ttl - status.start
-      }
-    }
-
-    // debounce calls to perf.now() to 1s so we're not hitting
-    // that costly call repeatedly.
-    let cachedNow = 0
-    const getNow = () => {
-      const n = perf.now()
-      if (this.ttlResolution > 0) {
-        cachedNow = n
-        const t = setTimeout(
-          () => (cachedNow = 0),
-          this.ttlResolution
-        )
-        /* istanbul ignore else - not available on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-      return n
-    }
-
-    this.getRemainingTTL = key => {
-      const index = this.keyMap.get(key)
-      if (index === undefined) {
-        return 0
-      }
-      return this.ttls[index] === 0 || this.starts[index] === 0
-        ? Infinity
-        : this.starts[index] +
-            this.ttls[index] -
-            (cachedNow || getNow())
-    }
-
-    this.isStale = index => {
-      return (
-        this.ttls[index] !== 0 &&
-        this.starts[index] !== 0 &&
-        (cachedNow || getNow()) - this.starts[index] >
-          this.ttls[index]
-      )
-    }
-  }
-  updateItemAge(_index) {}
-  statusTTL(_status, _index) {}
-  setItemTTL(_index, _ttl, _start) {}
-  isStale(_index) {
-    return false
-  }
-
-  initializeSizeTracking() {
-    this.calculatedSize = 0
-    this.sizes = new ZeroArray(this.max)
-    this.removeItemSize = index => {
-      this.calculatedSize -= this.sizes[index]
-      this.sizes[index] = 0
-    }
-    this.requireSize = (k, v, size, sizeCalculation) => {
-      // provisionally accept background fetches.
-      // actual value size will be checked when they return.
-      if (this.isBackgroundFetch(v)) {
-        return 0
-      }
-      if (!isPosInt(size)) {
-        if (sizeCalculation) {
-          if (typeof sizeCalculation !== 'function') {
-            throw new TypeError('sizeCalculation must be a function')
-          }
-          size = sizeCalculation(v, k)
-          if (!isPosInt(size)) {
-            throw new TypeError(
-              'sizeCalculation return invalid (expect positive integer)'
-            )
-          }
-        } else {
-          throw new TypeError(
-            'invalid size value (must be positive integer). ' +
-              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
-              'must be set.'
-          )
-        }
-      }
-      return size
-    }
-    this.addItemSize = (index, size, status) => {
-      this.sizes[index] = size
-      if (this.maxSize) {
-        const maxSize = this.maxSize - this.sizes[index]
-        while (this.calculatedSize > maxSize) {
-          this.evict(true)
-        }
-      }
-      this.calculatedSize += this.sizes[index]
-      if (status) {
-        status.entrySize = size
-        status.totalCalculatedSize = this.calculatedSize
-      }
-    }
-  }
-  removeItemSize(_index) {}
-  addItemSize(_index, _size) {}
-  requireSize(_k, _v, size, sizeCalculation) {
-    if (size || sizeCalculation) {
-      throw new TypeError(
-        'cannot set size without setting maxSize or maxEntrySize on cache'
-      )
-    }
-  }
-
-  *indexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.tail; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.head) {
-          break
-        } else {
-          i = this.prev[i]
-        }
-      }
-    }
-  }
-
-  *rindexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.head; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.tail) {
-          break
-        } else {
-          i = this.next[i]
-        }
-      }
-    }
-  }
-
-  isValidIndex(index) {
-    return (
-      index !== undefined &&
-      this.keyMap.get(this.keyList[index]) === index
-    )
-  }
-
-  *entries() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-  *rentries() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-
-  *keys() {
-    for (const i of this.indexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-  *rkeys() {
-    for (const i of this.rindexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-
-  *values() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-  *rvalues() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-
-  [Symbol.iterator]() {
-    return this.entries()
-  }
-
-  find(fn, getOptions) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      if (fn(value, this.keyList[i], this)) {
-        return this.get(this.keyList[i], getOptions)
-      }
-    }
-  }
-
-  forEach(fn, thisp = this) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  rforEach(fn, thisp = this) {
-    for (const i of this.rindexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  get prune() {
-    deprecatedMethod('prune', 'purgeStale')
-    return this.purgeStale
-  }
-
-  purgeStale() {
-    let deleted = false
-    for (const i of this.rindexes({ allowStale: true })) {
-      if (this.isStale(i)) {
-        this.delete(this.keyList[i])
-        deleted = true
-      }
-    }
-    return deleted
-  }
-
-  dump() {
-    const arr = []
-    for (const i of this.indexes({ allowStale: true })) {
-      const key = this.keyList[i]
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      const entry = { value }
-      if (this.ttls) {
-        entry.ttl = this.ttls[i]
-        // always dump the start relative to a portable timestamp
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = perf.now() - this.starts[i]
-        entry.start = Math.floor(Date.now() - age)
-      }
-      if (this.sizes) {
-        entry.size = this.sizes[i]
-      }
-      arr.unshift([key, entry])
-    }
-    return arr
-  }
-
-  load(arr) {
-    this.clear()
-    for (const [key, entry] of arr) {
-      if (entry.start) {
-        // entry.start is a portable timestamp, but we may be using
-        // node's performance.now(), so calculate the offset.
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = Date.now() - entry.start
-        entry.start = perf.now() - age
-      }
-      this.set(key, entry.value, entry)
-    }
-  }
-
-  dispose(_v, _k, _reason) {}
-
-  set(
-    k,
-    v,
-    {
-      ttl = this.ttl,
-      start,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      status,
-    } = {}
-  ) {
-    size = this.requireSize(k, v, size, sizeCalculation)
-    // if the item doesn't fit, don't do anything
-    // NB: maxEntrySize set to maxSize by default
-    if (this.maxEntrySize && size > this.maxEntrySize) {
-      if (status) {
-        status.set = 'miss'
-        status.maxEntrySizeExceeded = true
-      }
-      // have to delete, in case a background fetch is there already.
-      // in non-async cases, this is a no-op
-      this.delete(k)
-      return this
-    }
-    let index = this.size === 0 ? undefined : this.keyMap.get(k)
-    if (index === undefined) {
-      // addition
-      index = this.newIndex()
-      this.keyList[index] = k
-      this.valList[index] = v
-      this.keyMap.set(k, index)
-      this.next[this.tail] = index
-      this.prev[index] = this.tail
-      this.tail = index
-      this.size++
-      this.addItemSize(index, size, status)
-      if (status) {
-        status.set = 'add'
-      }
-      noUpdateTTL = false
-    } else {
-      // update
-      this.moveToTail(index)
-      const oldVal = this.valList[index]
-      if (v !== oldVal) {
-        if (this.isBackgroundFetch(oldVal)) {
-          oldVal.__abortController.abort(new Error('replaced'))
-        } else {
-          if (!noDisposeOnSet) {
-            this.dispose(oldVal, k, 'set')
-            if (this.disposeAfter) {
-              this.disposed.push([oldVal, k, 'set'])
-            }
-          }
-        }
-        this.removeItemSize(index)
-        this.valList[index] = v
-        this.addItemSize(index, size, status)
-        if (status) {
-          status.set = 'replace'
-          const oldValue =
-            oldVal && this.isBackgroundFetch(oldVal)
-              ? oldVal.__staleWhileFetching
-              : oldVal
-          if (oldValue !== undefined) status.oldValue = oldValue
-        }
-      } else if (status) {
-        status.set = 'update'
-      }
-    }
-    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
-      this.initializeTTLTracking()
-    }
-    if (!noUpdateTTL) {
-      this.setItemTTL(index, ttl, start)
-    }
-    this.statusTTL(status, index)
-    if (this.disposeAfter) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return this
-  }
-
-  newIndex() {
-    if (this.size === 0) {
-      return this.tail
-    }
-    if (this.size === this.max && this.max !== 0) {
-      return this.evict(false)
-    }
-    if (this.free.length !== 0) {
-      return this.free.pop()
-    }
-    // initial fill, just keep writing down the list
-    return this.initialFill++
-  }
-
-  pop() {
-    if (this.size) {
-      const val = this.valList[this.head]
-      this.evict(true)
-      return val
-    }
-  }
-
-  evict(free) {
-    const head = this.head
-    const k = this.keyList[head]
-    const v = this.valList[head]
-    if (this.isBackgroundFetch(v)) {
-      v.__abortController.abort(new Error('evicted'))
-    } else {
-      this.dispose(v, k, 'evict')
-      if (this.disposeAfter) {
-        this.disposed.push([v, k, 'evict'])
-      }
-    }
-    this.removeItemSize(head)
-    // if we aren't about to use the index, then null these out
-    if (free) {
-      this.keyList[head] = null
-      this.valList[head] = null
-      this.free.push(head)
-    }
-    this.head = this.next[head]
-    this.keyMap.delete(k)
-    this.size--
-    return head
-  }
-
-  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      if (!this.isStale(index)) {
-        if (updateAgeOnHas) {
-          this.updateItemAge(index)
-        }
-        if (status) status.has = 'hit'
-        this.statusTTL(status, index)
-        return true
-      } else if (status) {
-        status.has = 'stale'
-        this.statusTTL(status, index)
-      }
-    } else if (status) {
-      status.has = 'miss'
-    }
-    return false
-  }
-
-  // like get(), but without any LRU updating or TTL expiration
-  peek(k, { allowStale = this.allowStale } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined && (allowStale || !this.isStale(index))) {
-      const v = this.valList[index]
-      // either stale and allowed, or forcing a refresh of non-stale value
-      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
-    }
-  }
-
-  backgroundFetch(k, index, options, context) {
-    const v = index === undefined ? undefined : this.valList[index]
-    if (this.isBackgroundFetch(v)) {
-      return v
-    }
-    const ac = new AC()
-    if (options.signal) {
-      options.signal.addEventListener('abort', () =>
-        ac.abort(options.signal.reason)
-      )
-    }
-    const fetchOpts = {
-      signal: ac.signal,
-      options,
-      context,
-    }
-    const cb = (v, updateCache = false) => {
-      const { aborted } = ac.signal
-      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
-      if (options.status) {
-        if (aborted && !updateCache) {
-          options.status.fetchAborted = true
-          options.status.fetchError = ac.signal.reason
-          if (ignoreAbort) options.status.fetchAbortIgnored = true
-        } else {
-          options.status.fetchResolved = true
-        }
-      }
-      if (aborted && !ignoreAbort && !updateCache) {
-        return fetchFail(ac.signal.reason)
-      }
-      // either we didn't abort, and are still here, or we did, and ignored
-      if (this.valList[index] === p) {
-        if (v === undefined) {
-          if (p.__staleWhileFetching) {
-            this.valList[index] = p.__staleWhileFetching
-          } else {
-            this.delete(k)
-          }
-        } else {
-          if (options.status) options.status.fetchUpdated = true
-          this.set(k, v, fetchOpts.options)
-        }
-      }
-      return v
-    }
-    const eb = er => {
-      if (options.status) {
-        options.status.fetchRejected = true
-        options.status.fetchError = er
-      }
-      return fetchFail(er)
-    }
-    const fetchFail = er => {
-      const { aborted } = ac.signal
-      const allowStaleAborted =
-        aborted && options.allowStaleOnFetchAbort
-      const allowStale =
-        allowStaleAborted || options.allowStaleOnFetchRejection
-      const noDelete = allowStale || options.noDeleteOnFetchRejection
-      if (this.valList[index] === p) {
-        // if we allow stale on fetch rejections, then we need to ensure that
-        // the stale value is not removed from the cache when the fetch fails.
-        const del = !noDelete || p.__staleWhileFetching === undefined
-        if (del) {
-          this.delete(k)
-        } else if (!allowStaleAborted) {
-          // still replace the *promise* with the stale value,
-          // since we are done with the promise at this point.
-          // leave it untouched if we're still waiting for an
-          // aborted background fetch that hasn't yet returned.
-          this.valList[index] = p.__staleWhileFetching
-        }
-      }
-      if (allowStale) {
-        if (options.status && p.__staleWhileFetching !== undefined) {
-          options.status.returnedStale = true
-        }
-        return p.__staleWhileFetching
-      } else if (p.__returned === p) {
-        throw er
-      }
-    }
-    const pcall = (res, rej) => {
-      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
-      // ignored, we go until we finish, regardless.
-      // defer check until we are actually aborting,
-      // so fetchMethod can override.
-      ac.signal.addEventListener('abort', () => {
-        if (
-          !options.ignoreFetchAbort ||
-          options.allowStaleOnFetchAbort
-        ) {
-          res()
-          // when it eventually resolves, update the cache.
-          if (options.allowStaleOnFetchAbort) {
-            res = v => cb(v, true)
-          }
-        }
-      })
-    }
-    if (options.status) options.status.fetchDispatched = true
-    const p = new Promise(pcall).then(cb, eb)
-    p.__abortController = ac
-    p.__staleWhileFetching = v
-    p.__returned = null
-    if (index === undefined) {
-      // internal, don't expose status.
-      this.set(k, p, { ...fetchOpts.options, status: undefined })
-      index = this.keyMap.get(k)
-    } else {
-      this.valList[index] = p
-    }
-    return p
-  }
-
-  isBackgroundFetch(p) {
-    return (
-      p &&
-      typeof p === 'object' &&
-      typeof p.then === 'function' &&
-      Object.prototype.hasOwnProperty.call(
-        p,
-        '__staleWhileFetching'
-      ) &&
-      Object.prototype.hasOwnProperty.call(p, '__returned') &&
-      (p.__returned === p || p.__returned === null)
-    )
-  }
-
-  // this takes the union of get() and set() opts, because it does both
-  async fetch(
-    k,
-    {
-      // get options
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      // set options
-      ttl = this.ttl,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      // fetch exclusive options
-      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
-      ignoreFetchAbort = this.ignoreFetchAbort,
-      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
-      fetchContext = this.fetchContext,
-      forceRefresh = false,
-      status,
-      signal,
-    } = {}
-  ) {
-    if (!this.fetchMethod) {
-      if (status) status.fetch = 'get'
-      return this.get(k, {
-        allowStale,
-        updateAgeOnGet,
-        noDeleteOnStaleGet,
-        status,
-      })
-    }
-
-    const options = {
-      allowStale,
-      updateAgeOnGet,
-      noDeleteOnStaleGet,
-      ttl,
-      noDisposeOnSet,
-      size,
-      sizeCalculation,
-      noUpdateTTL,
-      noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-      status,
-      signal,
-    }
-
-    let index = this.keyMap.get(k)
-    if (index === undefined) {
-      if (status) status.fetch = 'miss'
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      return (p.__returned = p)
-    } else {
-      // in cache, maybe already fetching
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        const stale =
-          allowStale && v.__staleWhileFetching !== undefined
-        if (status) {
-          status.fetch = 'inflight'
-          if (stale) status.returnedStale = true
-        }
-        return stale ? v.__staleWhileFetching : (v.__returned = v)
-      }
-
-      // if we force a refresh, that means do NOT serve the cached value,
-      // unless we are already in the process of refreshing the cache.
-      const isStale = this.isStale(index)
-      if (!forceRefresh && !isStale) {
-        if (status) status.fetch = 'hit'
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        this.statusTTL(status, index)
-        return v
-      }
-
-      // ok, it is stale or a forced refresh, and not already fetching.
-      // refresh the cache.
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      const hasStale = p.__staleWhileFetching !== undefined
-      const staleVal = hasStale && allowStale
-      if (status) {
-        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
-        if (staleVal && isStale) status.returnedStale = true
-      }
-      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
-    }
-  }
-
-  get(
-    k,
-    {
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      status,
-    } = {}
-  ) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      const value = this.valList[index]
-      const fetching = this.isBackgroundFetch(value)
-      this.statusTTL(status, index)
-      if (this.isStale(index)) {
-        if (status) status.get = 'stale'
-        // delete only if not an in-flight background fetch
-        if (!fetching) {
-          if (!noDeleteOnStaleGet) {
-            this.delete(k)
-          }
-          if (status) status.returnedStale = allowStale
-          return allowStale ? value : undefined
-        } else {
-          if (status) {
-            status.returnedStale =
-              allowStale && value.__staleWhileFetching !== undefined
-          }
-          return allowStale ? value.__staleWhileFetching : undefined
-        }
-      } else {
-        if (status) status.get = 'hit'
-        // if we're currently fetching it, we don't actually have it yet
-        // it's not stale, which means this isn't a staleWhileRefetching.
-        // If it's not stale, and fetching, AND has a __staleWhileFetching
-        // value, then that means the user fetched with {forceRefresh:true},
-        // so it's safe to return that value.
-        if (fetching) {
-          return value.__staleWhileFetching
-        }
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        return value
-      }
-    } else if (status) {
-      status.get = 'miss'
-    }
-  }
-
-  connect(p, n) {
-    this.prev[n] = p
-    this.next[p] = n
-  }
-
-  moveToTail(index) {
-    // if tail already, nothing to do
-    // if head, move head to next[index]
-    // else
-    //   move next[prev[index]] to next[index] (head has no prev)
-    //   move prev[next[index]] to prev[index]
-    // prev[index] = tail
-    // next[tail] = index
-    // tail = index
-    if (index !== this.tail) {
-      if (index === this.head) {
-        this.head = this.next[index]
-      } else {
-        this.connect(this.prev[index], this.next[index])
-      }
-      this.connect(this.tail, index)
-      this.tail = index
-    }
-  }
-
-  get del() {
-    deprecatedMethod('del', 'delete')
-    return this.delete
-  }
-
-  delete(k) {
-    let deleted = false
-    if (this.size !== 0) {
-      const index = this.keyMap.get(k)
-      if (index !== undefined) {
-        deleted = true
-        if (this.size === 1) {
-          this.clear()
-        } else {
-          this.removeItemSize(index)
-          const v = this.valList[index]
-          if (this.isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('deleted'))
-          } else {
-            this.dispose(v, k, 'delete')
-            if (this.disposeAfter) {
-              this.disposed.push([v, k, 'delete'])
-            }
-          }
-          this.keyMap.delete(k)
-          this.keyList[index] = null
-          this.valList[index] = null
-          if (index === this.tail) {
-            this.tail = this.prev[index]
-          } else if (index === this.head) {
-            this.head = this.next[index]
-          } else {
-            this.next[this.prev[index]] = this.next[index]
-            this.prev[this.next[index]] = this.prev[index]
-          }
-          this.size--
-          this.free.push(index)
-        }
-      }
-    }
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return deleted
-  }
-
-  clear() {
-    for (const index of this.rindexes({ allowStale: true })) {
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        v.__abortController.abort(new Error('deleted'))
-      } else {
-        const k = this.keyList[index]
-        this.dispose(v, k, 'delete')
-        if (this.disposeAfter) {
-          this.disposed.push([v, k, 'delete'])
-        }
-      }
-    }
-
-    this.keyMap.clear()
-    this.valList.fill(null)
-    this.keyList.fill(null)
-    if (this.ttls) {
-      this.ttls.fill(0)
-      this.starts.fill(0)
-    }
-    if (this.sizes) {
-      this.sizes.fill(0)
-    }
-    this.head = 0
-    this.tail = 0
-    this.initialFill = 1
-    this.free.length = 0
-    this.calculatedSize = 0
-    this.size = 0
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-  }
-
-  get reset() {
-    deprecatedMethod('reset', 'clear')
-    return this.clear
-  }
-
-  get length() {
-    deprecatedProperty('length', 'size')
-    return this.size
-  }
-
-  static get AbortController() {
-    return AC
-  }
-  static get AbortSignal() {
-    return AS
-  }
-}
-
-export default LRUCache
diff --git a/node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/package.json b/node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/package.json
deleted file mode 100644
index 9684991727e7a..0000000000000
--- a/node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache/package.json
+++ /dev/null
@@ -1,96 +0,0 @@
-{
-  "name": "lru-cache",
-  "description": "A cache object that deletes the least-recently-used items.",
-  "version": "7.18.3",
-  "author": "Isaac Z. Schlueter ",
-  "keywords": [
-    "mru",
-    "lru",
-    "cache"
-  ],
-  "sideEffects": false,
-  "scripts": {
-    "build": "npm run prepare",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "prepare": "node ./scripts/transpile-to-esm.js",
-    "size": "size-limit",
-    "test": "tap",
-    "snap": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write .",
-    "typedoc": "typedoc ./index.d.ts"
-  },
-  "type": "commonjs",
-  "main": "./index.js",
-  "module": "./index.mjs",
-  "types": "./index.d.ts",
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./index.d.ts",
-        "default": "./index.mjs"
-      },
-      "require": {
-        "types": "./index.d.ts",
-        "default": "./index.js"
-      }
-    },
-    "./package.json": "./package.json"
-  },
-  "repository": "git://github.com/isaacs/node-lru-cache.git",
-  "devDependencies": {
-    "@size-limit/preset-small-lib": "^7.0.8",
-    "@types/node": "^17.0.31",
-    "@types/tap": "^15.0.6",
-    "benchmark": "^2.1.4",
-    "c8": "^7.11.2",
-    "clock-mock": "^1.0.6",
-    "eslint-config-prettier": "^8.5.0",
-    "prettier": "^2.6.2",
-    "size-limit": "^7.0.8",
-    "tap": "^16.3.4",
-    "ts-node": "^10.7.0",
-    "tslib": "^2.4.0",
-    "typedoc": "^0.23.24",
-    "typescript": "^4.6.4"
-  },
-  "license": "ISC",
-  "files": [
-    "index.js",
-    "index.mjs",
-    "index.d.ts"
-  ],
-  "engines": {
-    "node": ">=12"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tap": {
-    "nyc-arg": [
-      "--include=index.js"
-    ],
-    "node-arg": [
-      "--expose-gc",
-      "--require",
-      "ts-node/register"
-    ],
-    "ts": false
-  },
-  "size-limit": [
-    {
-      "path": "./index.js"
-    }
-  ]
-}
diff --git a/node_modules/@npmcli/git/node_modules/hosted-git-info/package.json b/node_modules/@npmcli/git/node_modules/hosted-git-info/package.json
deleted file mode 100644
index 612259948afe7..0000000000000
--- a/node_modules/@npmcli/git/node_modules/hosted-git-info/package.json
+++ /dev/null
@@ -1,59 +0,0 @@
-{
-  "name": "hosted-git-info",
-  "version": "6.1.1",
-  "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
-  "main": "./lib/index.js",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/hosted-git-info.git"
-  },
-  "keywords": [
-    "git",
-    "github",
-    "bitbucket",
-    "gitlab"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/hosted-git-info/issues"
-  },
-  "homepage": "https://github.com/npm/hosted-git-info",
-  "scripts": {
-    "posttest": "npm run lint",
-    "snap": "tap",
-    "test": "tap",
-    "test:coverage": "tap --coverage-report=html",
-    "lint": "eslint \"**/*.js\"",
-    "postlint": "template-oss-check",
-    "lintfix": "npm run lint -- --fix",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "dependencies": {
-    "lru-cache": "^7.5.1"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.7.1",
-    "tap": "^16.0.1"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "tap": {
-    "color": 1,
-    "coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.7.1"
-  }
-}
diff --git a/node_modules/@npmcli/git/node_modules/npm-package-arg/LICENSE b/node_modules/@npmcli/git/node_modules/npm-package-arg/LICENSE
deleted file mode 100644
index 19cec97b18468..0000000000000
--- a/node_modules/@npmcli/git/node_modules/npm-package-arg/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/git/node_modules/npm-package-arg/lib/npa.js b/node_modules/@npmcli/git/node_modules/npm-package-arg/lib/npa.js
deleted file mode 100644
index 36bd18cd9f9a6..0000000000000
--- a/node_modules/@npmcli/git/node_modules/npm-package-arg/lib/npa.js
+++ /dev/null
@@ -1,431 +0,0 @@
-'use strict'
-module.exports = npa
-module.exports.resolve = resolve
-module.exports.toPurl = toPurl
-module.exports.Result = Result
-
-const url = require('url')
-const HostedGit = require('hosted-git-info')
-const semver = require('semver')
-const path = global.FAKE_WINDOWS ? require('path').win32 : require('path')
-const validatePackageName = require('validate-npm-package-name')
-const { homedir } = require('os')
-const log = require('proc-log')
-
-const isWindows = process.platform === 'win32' || global.FAKE_WINDOWS
-const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
-const isURL = /^(?:git[+])?[a-z]+:/i
-const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
-const isFilename = /[.](?:tgz|tar.gz|tar)$/i
-
-function npa (arg, where) {
-  let name
-  let spec
-  if (typeof arg === 'object') {
-    if (arg instanceof Result && (!where || where === arg.where)) {
-      return arg
-    } else if (arg.name && arg.rawSpec) {
-      return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
-    } else {
-      return npa(arg.raw, where || arg.where)
-    }
-  }
-  const nameEndsAt = arg[0] === '@' ? arg.slice(1).indexOf('@') + 1 : arg.indexOf('@')
-  const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
-  if (isURL.test(arg)) {
-    spec = arg
-  } else if (isGit.test(arg)) {
-    spec = `git+ssh://${arg}`
-  } else if (namePart[0] !== '@' && (hasSlashes.test(namePart) || isFilename.test(namePart))) {
-    spec = arg
-  } else if (nameEndsAt > 0) {
-    name = namePart
-    spec = arg.slice(nameEndsAt + 1) || '*'
-  } else {
-    const valid = validatePackageName(arg)
-    if (valid.validForOldPackages) {
-      name = arg
-      spec = '*'
-    } else {
-      spec = arg
-    }
-  }
-  return resolve(name, spec, where, arg)
-}
-
-const isFilespec = isWindows ? /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/ : /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
-
-function resolve (name, spec, where, arg) {
-  const res = new Result({
-    raw: arg,
-    name: name,
-    rawSpec: spec,
-    fromArgument: arg != null,
-  })
-
-  if (name) {
-    res.setName(name)
-  }
-
-  if (spec && (isFilespec.test(spec) || /^file:/i.test(spec))) {
-    return fromFile(res, where)
-  } else if (spec && /^npm:/i.test(spec)) {
-    return fromAlias(res, where)
-  }
-
-  const hosted = HostedGit.fromUrl(spec, {
-    noGitPlus: true,
-    noCommittish: true,
-  })
-  if (hosted) {
-    return fromHostedGit(res, hosted)
-  } else if (spec && isURL.test(spec)) {
-    return fromURL(res)
-  } else if (spec && (hasSlashes.test(spec) || isFilename.test(spec))) {
-    return fromFile(res, where)
-  } else {
-    return fromRegistry(res)
-  }
-}
-
-const defaultRegistry = 'https://registry.npmjs.org'
-
-function toPurl (arg, reg = defaultRegistry) {
-  const res = npa(arg)
-
-  if (res.type !== 'version') {
-    throw invalidPurlType(res.type, res.raw)
-  }
-
-  // URI-encode leading @ of scoped packages
-  let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec
-  if (reg !== defaultRegistry) {
-    purl += '?repository_url=' + reg
-  }
-
-  return purl
-}
-
-function invalidPackageName (name, valid, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`)
-  err.code = 'EINVALIDPACKAGENAME'
-  return err
-}
-
-function invalidTagName (name, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`)
-  err.code = 'EINVALIDTAGNAME'
-  return err
-}
-
-function invalidPurlType (type, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`)
-  err.code = 'EINVALIDPURLTYPE'
-  return err
-}
-
-function Result (opts) {
-  this.type = opts.type
-  this.registry = opts.registry
-  this.where = opts.where
-  if (opts.raw == null) {
-    this.raw = opts.name ? opts.name + '@' + opts.rawSpec : opts.rawSpec
-  } else {
-    this.raw = opts.raw
-  }
-
-  this.name = undefined
-  this.escapedName = undefined
-  this.scope = undefined
-  this.rawSpec = opts.rawSpec || ''
-  this.saveSpec = opts.saveSpec
-  this.fetchSpec = opts.fetchSpec
-  if (opts.name) {
-    this.setName(opts.name)
-  }
-  this.gitRange = opts.gitRange
-  this.gitCommittish = opts.gitCommittish
-  this.gitSubdir = opts.gitSubdir
-  this.hosted = opts.hosted
-}
-
-Result.prototype.setName = function (name) {
-  const valid = validatePackageName(name)
-  if (!valid.validForOldPackages) {
-    throw invalidPackageName(name, valid, this.raw)
-  }
-
-  this.name = name
-  this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
-  // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
-  this.escapedName = name.replace('/', '%2f')
-  return this
-}
-
-Result.prototype.toString = function () {
-  const full = []
-  if (this.name != null && this.name !== '') {
-    full.push(this.name)
-  }
-  const spec = this.saveSpec || this.fetchSpec || this.rawSpec
-  if (spec != null && spec !== '') {
-    full.push(spec)
-  }
-  return full.length ? full.join('@') : this.raw
-}
-
-Result.prototype.toJSON = function () {
-  const result = Object.assign({}, this)
-  delete result.hosted
-  return result
-}
-
-function setGitCommittish (res, committish) {
-  if (!committish) {
-    res.gitCommittish = null
-    return res
-  }
-
-  // for each :: separated item:
-  for (const part of committish.split('::')) {
-    // if the item has no : the n it is a commit-ish
-    if (!part.includes(':')) {
-      if (res.gitRange) {
-        throw new Error('cannot override existing semver range with a committish')
-      }
-      if (res.gitCommittish) {
-        throw new Error('cannot override existing committish with a second committish')
-      }
-      res.gitCommittish = part
-      continue
-    }
-    // split on name:value
-    const [name, value] = part.split(':')
-    // if name is semver do semver lookup of ref or tag
-    if (name === 'semver') {
-      if (res.gitCommittish) {
-        throw new Error('cannot override existing committish with a semver range')
-      }
-      if (res.gitRange) {
-        throw new Error('cannot override existing semver range with a second semver range')
-      }
-      res.gitRange = decodeURIComponent(value)
-      continue
-    }
-    if (name === 'path') {
-      if (res.gitSubdir) {
-        throw new Error('cannot override existing path with a second path')
-      }
-      res.gitSubdir = `/${value}`
-      continue
-    }
-    log.warn('npm-package-arg', `ignoring unknown key "${name}"`)
-  }
-
-  return res
-}
-
-function fromFile (res, where) {
-  if (!where) {
-    where = process.cwd()
-  }
-  res.type = isFilename.test(res.rawSpec) ? 'file' : 'directory'
-  res.where = where
-
-  // always put the '/' on where when resolving urls, or else
-  // file:foo from /path/to/bar goes to /path/to/foo, when we want
-  // it to be /path/to/bar/foo
-
-  let specUrl
-  let resolvedUrl
-  const prefix = (!/^file:/.test(res.rawSpec) ? 'file:' : '')
-  const rawWithPrefix = prefix + res.rawSpec
-  let rawNoPrefix = rawWithPrefix.replace(/^file:/, '')
-  try {
-    resolvedUrl = new url.URL(rawWithPrefix, `file://${path.resolve(where)}/`)
-    specUrl = new url.URL(rawWithPrefix)
-  } catch (originalError) {
-    const er = new Error('Invalid file: URL, must comply with RFC 8909')
-    throw Object.assign(er, {
-      raw: res.rawSpec,
-      spec: res,
-      where,
-      originalError,
-    })
-  }
-
-  // environment switch for testing
-  if (process.env.NPM_PACKAGE_ARG_8909_STRICT !== '1') {
-    // XXX backwards compatibility lack of compliance with 8909
-    // Remove when we want a breaking change to come into RFC compliance.
-    if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
-      const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///')
-      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
-      specUrl = new url.URL(rawSpec)
-      rawNoPrefix = rawSpec.replace(/^file:/, '')
-    }
-    // turn file:/../foo into file:../foo
-    // for 1, 2 or 3 leading slashes since we attempted
-    // in the previous step to make it a file protocol url with a leading slash
-    if (/^\/{1,3}\.\.?(\/|$)/.test(rawNoPrefix)) {
-      const rawSpec = res.rawSpec.replace(/^file:\/{1,3}/, 'file:')
-      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
-      specUrl = new url.URL(rawSpec)
-      rawNoPrefix = rawSpec.replace(/^file:/, '')
-    }
-    // XXX end 8909 violation backwards compatibility section
-  }
-
-  // file:foo - relative url to ./foo
-  // file:/foo - absolute path /foo
-  // file:///foo - absolute path to /foo, no authority host
-  // file://localhost/foo - absolute path to /foo, on localhost
-  // file://foo - absolute path to / on foo host (error!)
-  if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
-    const msg = `Invalid file: URL, must be absolute if // present`
-    throw Object.assign(new Error(msg), {
-      raw: res.rawSpec,
-      parsed: resolvedUrl,
-    })
-  }
-
-  // turn /C:/blah into just C:/blah on windows
-  let specPath = decodeURIComponent(specUrl.pathname)
-  let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
-  if (isWindows) {
-    specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1')
-    resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1')
-  }
-
-  // replace ~ with homedir, but keep the ~ in the saveSpec
-  // otherwise, make it relative to where param
-  if (/^\/~(\/|$)/.test(specPath)) {
-    res.saveSpec = `file:${specPath.substr(1)}`
-    resolvedPath = path.resolve(homedir(), specPath.substr(3))
-  } else if (!path.isAbsolute(rawNoPrefix)) {
-    res.saveSpec = `file:${path.relative(where, resolvedPath)}`
-  } else {
-    res.saveSpec = `file:${path.resolve(resolvedPath)}`
-  }
-
-  res.fetchSpec = path.resolve(where, resolvedPath)
-  return res
-}
-
-function fromHostedGit (res, hosted) {
-  res.type = 'git'
-  res.hosted = hosted
-  res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
-  res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
-  return setGitCommittish(res, hosted.committish)
-}
-
-function unsupportedURLType (protocol, spec) {
-  const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
-  err.code = 'EUNSUPPORTEDPROTOCOL'
-  return err
-}
-
-function matchGitScp (spec) {
-  // git ssh specifiers are overloaded to also use scp-style git
-  // specifiers, so we have to parse those out and treat them special.
-  // They are NOT true URIs, so we can't hand them to `url.parse`.
-  //
-  // This regex looks for things that look like:
-  // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
-  //
-  // ...and various combinations. The username in the beginning is *required*.
-  const matched = spec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
-  return matched && !matched[1].match(/:[0-9]+\/?.*$/i) && {
-    fetchSpec: matched[1],
-    gitCommittish: matched[2] == null ? null : matched[2],
-  }
-}
-
-function fromURL (res) {
-  // eslint-disable-next-line node/no-deprecated-api
-  const urlparse = url.parse(res.rawSpec)
-  res.saveSpec = res.rawSpec
-  // check the protocol, and then see if it's git or not
-  switch (urlparse.protocol) {
-    case 'git:':
-    case 'git+http:':
-    case 'git+https:':
-    case 'git+rsync:':
-    case 'git+ftp:':
-    case 'git+file:':
-    case 'git+ssh:': {
-      res.type = 'git'
-      const match = urlparse.protocol === 'git+ssh:' ? matchGitScp(res.rawSpec)
-        : null
-      if (match) {
-        setGitCommittish(res, match.gitCommittish)
-        res.fetchSpec = match.fetchSpec
-      } else {
-        setGitCommittish(res, urlparse.hash != null ? urlparse.hash.slice(1) : '')
-        urlparse.protocol = urlparse.protocol.replace(/^git[+]/, '')
-        if (urlparse.protocol === 'file:' && /^git\+file:\/\/[a-z]:/i.test(res.rawSpec)) {
-          // keep the drive letter : on windows file paths
-          urlparse.host += ':'
-          urlparse.hostname += ':'
-        }
-        delete urlparse.hash
-        res.fetchSpec = url.format(urlparse)
-      }
-      break
-    }
-    case 'http:':
-    case 'https:':
-      res.type = 'remote'
-      res.fetchSpec = res.saveSpec
-      break
-
-    default:
-      throw unsupportedURLType(urlparse.protocol, res.rawSpec)
-  }
-
-  return res
-}
-
-function fromAlias (res, where) {
-  const subSpec = npa(res.rawSpec.substr(4), where)
-  if (subSpec.type === 'alias') {
-    throw new Error('nested aliases not supported')
-  }
-
-  if (!subSpec.registry) {
-    throw new Error('aliases only work for registry deps')
-  }
-
-  res.subSpec = subSpec
-  res.registry = true
-  res.type = 'alias'
-  res.saveSpec = null
-  res.fetchSpec = null
-  return res
-}
-
-function fromRegistry (res) {
-  res.registry = true
-  const spec = res.rawSpec.trim()
-  // no save spec for registry components as we save based on the fetched
-  // version, not on the argument so this can't compute that.
-  res.saveSpec = null
-  res.fetchSpec = spec
-  const version = semver.valid(spec, true)
-  const range = semver.validRange(spec, true)
-  if (version) {
-    res.type = 'version'
-  } else if (range) {
-    res.type = 'range'
-  } else {
-    if (encodeURIComponent(spec) !== spec) {
-      throw invalidTagName(spec, res.raw)
-    }
-    res.type = 'tag'
-  }
-  return res
-}
diff --git a/node_modules/@npmcli/git/node_modules/npm-package-arg/package.json b/node_modules/@npmcli/git/node_modules/npm-package-arg/package.json
deleted file mode 100644
index bb9e71b258a93..0000000000000
--- a/node_modules/@npmcli/git/node_modules/npm-package-arg/package.json
+++ /dev/null
@@ -1,59 +0,0 @@
-{
-  "name": "npm-package-arg",
-  "version": "10.1.0",
-  "description": "Parse the things that can be arguments to `npm install`",
-  "main": "./lib/npa.js",
-  "directories": {
-    "test": "test"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "dependencies": {
-    "hosted-git-info": "^6.0.0",
-    "proc-log": "^3.0.0",
-    "semver": "^7.3.5",
-    "validate-npm-package-name": "^5.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.10.0",
-    "tap": "^16.0.1"
-  },
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "npmclilint": "npmcli-lint",
-    "lint": "eslint \"**/*.js\"",
-    "lintfix": "npm run lint -- --fix",
-    "posttest": "npm run lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/npm-package-arg.git"
-  },
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/npm-package-arg/issues"
-  },
-  "homepage": "https://github.com/npm/npm-package-arg",
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "tap": {
-    "branches": 97,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.10.0"
-  }
-}
diff --git a/node_modules/@npmcli/git/node_modules/npm-pick-manifest/LICENSE.md b/node_modules/@npmcli/git/node_modules/npm-pick-manifest/LICENSE.md
deleted file mode 100644
index 8d28acf866d93..0000000000000
--- a/node_modules/@npmcli/git/node_modules/npm-pick-manifest/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/git/node_modules/npm-pick-manifest/lib/index.js b/node_modules/@npmcli/git/node_modules/npm-pick-manifest/lib/index.js
deleted file mode 100644
index 8dbd2721c8996..0000000000000
--- a/node_modules/@npmcli/git/node_modules/npm-pick-manifest/lib/index.js
+++ /dev/null
@@ -1,218 +0,0 @@
-'use strict'
-
-const npa = require('npm-package-arg')
-const semver = require('semver')
-const { checkEngine } = require('npm-install-checks')
-const normalizeBin = require('npm-normalize-package-bin')
-
-const engineOk = (manifest, npmVersion, nodeVersion) => {
-  try {
-    checkEngine(manifest, npmVersion, nodeVersion)
-    return true
-  } catch (_) {
-    return false
-  }
-}
-
-const isBefore = (verTimes, ver, time) =>
-  !verTimes || !verTimes[ver] || Date.parse(verTimes[ver]) <= time
-
-const avoidSemverOpt = { includePrerelease: true, loose: true }
-const shouldAvoid = (ver, avoid) =>
-  avoid && semver.satisfies(ver, avoid, avoidSemverOpt)
-
-const decorateAvoid = (result, avoid) =>
-  result && shouldAvoid(result.version, avoid)
-    ? { ...result, _shouldAvoid: true }
-    : result
-
-const pickManifest = (packument, wanted, opts) => {
-  const {
-    defaultTag = 'latest',
-    before = null,
-    nodeVersion = process.version,
-    npmVersion = null,
-    includeStaged = false,
-    avoid = null,
-    avoidStrict = false,
-  } = opts
-
-  const { name, time: verTimes } = packument
-  const versions = packument.versions || {}
-
-  if (avoidStrict) {
-    const looseOpts = {
-      ...opts,
-      avoidStrict: false,
-    }
-
-    const result = pickManifest(packument, wanted, looseOpts)
-    if (!result || !result._shouldAvoid) {
-      return result
-    }
-
-    const caret = pickManifest(packument, `^${result.version}`, looseOpts)
-    if (!caret || !caret._shouldAvoid) {
-      return {
-        ...caret,
-        _outsideDependencyRange: true,
-        _isSemVerMajor: false,
-      }
-    }
-
-    const star = pickManifest(packument, '*', looseOpts)
-    if (!star || !star._shouldAvoid) {
-      return {
-        ...star,
-        _outsideDependencyRange: true,
-        _isSemVerMajor: true,
-      }
-    }
-
-    throw Object.assign(new Error(`No avoidable versions for ${name}`), {
-      code: 'ETARGET',
-      name,
-      wanted,
-      avoid,
-      before,
-      versions: Object.keys(versions),
-    })
-  }
-
-  const staged = (includeStaged && packument.stagedVersions &&
-    packument.stagedVersions.versions) || {}
-  const restricted = (packument.policyRestrictions &&
-    packument.policyRestrictions.versions) || {}
-
-  const time = before && verTimes ? +(new Date(before)) : Infinity
-  const spec = npa.resolve(name, wanted || defaultTag)
-  const type = spec.type
-  const distTags = packument['dist-tags'] || {}
-
-  if (type !== 'tag' && type !== 'version' && type !== 'range') {
-    throw new Error('Only tag, version, and range are supported')
-  }
-
-  // if the type is 'tag', and not just the implicit default, then it must
-  // be that exactly, or nothing else will do.
-  if (wanted && type === 'tag') {
-    const ver = distTags[wanted]
-    // if the version in the dist-tags is before the before date, then
-    // we use that.  Otherwise, we get the highest precedence version
-    // prior to the dist-tag.
-    if (isBefore(verTimes, ver, time)) {
-      return decorateAvoid(versions[ver] || staged[ver] || restricted[ver], avoid)
-    } else {
-      return pickManifest(packument, `<=${ver}`, opts)
-    }
-  }
-
-  // similarly, if a specific version, then only that version will do
-  if (wanted && type === 'version') {
-    const ver = semver.clean(wanted, { loose: true })
-    const mani = versions[ver] || staged[ver] || restricted[ver]
-    return isBefore(verTimes, ver, time) ? decorateAvoid(mani, avoid) : null
-  }
-
-  // ok, sort based on our heuristics, and pick the best fit
-  const range = type === 'range' ? wanted : '*'
-
-  // if the range is *, then we prefer the 'latest' if available
-  // but skip this if it should be avoided, in that case we have
-  // to try a little harder.
-  const defaultVer = distTags[defaultTag]
-  if (defaultVer &&
-      (range === '*' || semver.satisfies(defaultVer, range, { loose: true })) &&
-      !shouldAvoid(defaultVer, avoid)) {
-    const mani = versions[defaultVer]
-    if (mani && isBefore(verTimes, defaultVer, time)) {
-      return mani
-    }
-  }
-
-  // ok, actually have to sort the list and take the winner
-  const allEntries = Object.entries(versions)
-    .concat(Object.entries(staged))
-    .concat(Object.entries(restricted))
-    .filter(([ver, mani]) => isBefore(verTimes, ver, time))
-
-  if (!allEntries.length) {
-    throw Object.assign(new Error(`No versions available for ${name}`), {
-      code: 'ENOVERSIONS',
-      name,
-      type,
-      wanted,
-      before,
-      versions: Object.keys(versions),
-    })
-  }
-
-  const sortSemverOpt = { loose: true }
-  const entries = allEntries.filter(([ver, mani]) =>
-    semver.satisfies(ver, range, { loose: true }))
-    .sort((a, b) => {
-      const [vera, mania] = a
-      const [verb, manib] = b
-      const notavoida = !shouldAvoid(vera, avoid)
-      const notavoidb = !shouldAvoid(verb, avoid)
-      const notrestra = !restricted[a]
-      const notrestrb = !restricted[b]
-      const notstagea = !staged[a]
-      const notstageb = !staged[b]
-      const notdepra = !mania.deprecated
-      const notdeprb = !manib.deprecated
-      const enginea = engineOk(mania, npmVersion, nodeVersion)
-      const engineb = engineOk(manib, npmVersion, nodeVersion)
-      // sort by:
-      // - not an avoided version
-      // - not restricted
-      // - not staged
-      // - not deprecated and engine ok
-      // - engine ok
-      // - not deprecated
-      // - semver
-      return (notavoidb - notavoida) ||
-        (notrestrb - notrestra) ||
-        (notstageb - notstagea) ||
-        ((notdeprb && engineb) - (notdepra && enginea)) ||
-        (engineb - enginea) ||
-        (notdeprb - notdepra) ||
-        semver.rcompare(vera, verb, sortSemverOpt)
-    })
-
-  return decorateAvoid(entries[0] && entries[0][1], avoid)
-}
-
-module.exports = (packument, wanted, opts = {}) => {
-  const mani = pickManifest(packument, wanted, opts)
-  const picked = mani && normalizeBin(mani)
-  const policyRestrictions = packument.policyRestrictions
-  const restricted = (policyRestrictions && policyRestrictions.versions) || {}
-
-  if (picked && !restricted[picked.version]) {
-    return picked
-  }
-
-  const { before = null, defaultTag = 'latest' } = opts
-  const bstr = before ? new Date(before).toLocaleString() : ''
-  const { name } = packument
-  const pckg = `${name}@${wanted}` +
-    (before ? ` with a date before ${bstr}` : '')
-
-  const isForbidden = picked && !!restricted[picked.version]
-  const polMsg = isForbidden ? policyRestrictions.message : ''
-
-  const msg = !isForbidden ? `No matching version found for ${pckg}.`
-    : `Could not download ${pckg} due to policy violations:\n${polMsg}`
-
-  const code = isForbidden ? 'E403' : 'ETARGET'
-  throw Object.assign(new Error(msg), {
-    code,
-    type: npa.resolve(packument.name, wanted).type,
-    wanted,
-    versions: Object.keys(packument.versions ?? {}),
-    name,
-    distTags: packument['dist-tags'],
-    defaultTag,
-  })
-}
diff --git a/node_modules/@npmcli/git/node_modules/npm-pick-manifest/package.json b/node_modules/@npmcli/git/node_modules/npm-pick-manifest/package.json
deleted file mode 100644
index feff81f5b2fee..0000000000000
--- a/node_modules/@npmcli/git/node_modules/npm-pick-manifest/package.json
+++ /dev/null
@@ -1,57 +0,0 @@
-{
-  "name": "npm-pick-manifest",
-  "version": "8.0.2",
-  "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.",
-  "main": "./lib",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "coverage": "tap",
-    "lint": "eslint \"**/*.js\"",
-    "test": "tap",
-    "posttest": "npm run lint",
-    "postlint": "template-oss-check",
-    "lintfix": "npm run lint -- --fix",
-    "snap": "tap",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/npm-pick-manifest.git"
-  },
-  "keywords": [
-    "npm",
-    "semver",
-    "package manager"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "dependencies": {
-    "npm-install-checks": "^6.0.0",
-    "npm-normalize-package-bin": "^3.0.0",
-    "npm-package-arg": "^10.0.0",
-    "semver": "^7.3.5"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.18.0",
-    "tap": "^16.0.1"
-  },
-  "tap": {
-    "check-coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.18.0",
-    "publish": true
-  }
-}
diff --git a/node_modules/@npmcli/git/package.json b/node_modules/@npmcli/git/package.json
index a7e7f09d12c47..3852c358ae4ef 100644
--- a/node_modules/@npmcli/git/package.json
+++ b/node_modules/@npmcli/git/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@npmcli/git",
-  "version": "5.0.0",
+  "version": "5.0.1",
   "main": "lib/index.js",
   "files": [
     "bin/",
@@ -32,14 +32,14 @@
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/template-oss": "4.18.0",
-    "npm-package-arg": "^10.0.0",
+    "npm-package-arg": "^11.0.0",
     "slash": "^3.0.0",
     "tap": "^16.0.1"
   },
   "dependencies": {
     "@npmcli/promise-spawn": "^6.0.0",
     "lru-cache": "^10.0.1",
-    "npm-pick-manifest": "^8.0.0",
+    "npm-pick-manifest": "^9.0.0",
     "proc-log": "^3.0.0",
     "promise-inflight": "^1.0.1",
     "promise-retry": "^2.0.1",
diff --git a/package-lock.json b/package-lock.json
index 5335acdd5ce59..45cb733e784b3 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -162,7 +162,7 @@
       "devDependencies": {
         "@npmcli/docs": "^1.0.0",
         "@npmcli/eslint-config": "^4.0.2",
-        "@npmcli/git": "^5.0.0",
+        "@npmcli/git": "^5.0.1",
         "@npmcli/mock-globals": "^1.0.0",
         "@npmcli/mock-registry": "^1.0.0",
         "@npmcli/template-oss": "4.18.0",
@@ -2410,14 +2410,14 @@
       }
     },
     "node_modules/@npmcli/git": {
-      "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.0.tgz",
-      "integrity": "sha512-KzDPpE5oGu2x3ZHUMacrIPqmvgV48TBqNJzNQTszkOqNwtIjlLoZ+4Gxa268EgPE6UcEzunmZdyY9hLoNClXhQ==",
+      "version": "5.0.1",
+      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.1.tgz",
+      "integrity": "sha512-9zUEqmRMZU5bmqWVu83wFVHH9kwLEQeMuDUDSYsBK/L4qbBl8Shdoc5EWfANzAdy5kFuPbBn7ToXTakbVdlCZg==",
       "inBundle": true,
       "dependencies": {
         "@npmcli/promise-spawn": "^6.0.0",
         "lru-cache": "^10.0.1",
-        "npm-pick-manifest": "^8.0.0",
+        "npm-pick-manifest": "^9.0.0",
         "proc-log": "^3.0.0",
         "promise-inflight": "^1.0.1",
         "promise-retry": "^2.0.1",
@@ -2428,57 +2428,6 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/@npmcli/git/node_modules/hosted-git-info": {
-      "version": "6.1.1",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
-      "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
-      "inBundle": true,
-      "dependencies": {
-        "lru-cache": "^7.5.1"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/git/node_modules/hosted-git-info/node_modules/lru-cache": {
-      "version": "7.18.3",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
-      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
-      "inBundle": true,
-      "engines": {
-        "node": ">=12"
-      }
-    },
-    "node_modules/@npmcli/git/node_modules/npm-package-arg": {
-      "version": "10.1.0",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
-      "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==",
-      "inBundle": true,
-      "dependencies": {
-        "hosted-git-info": "^6.0.0",
-        "proc-log": "^3.0.0",
-        "semver": "^7.3.5",
-        "validate-npm-package-name": "^5.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/git/node_modules/npm-pick-manifest": {
-      "version": "8.0.2",
-      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-8.0.2.tgz",
-      "integrity": "sha512-1dKY+86/AIiq1tkKVD3l0WI+Gd3vkknVGAggsFeBkTvbhMQ1OND/LKkYv4JtXPKUJ8bOTCyLiqEg2P6QNdK+Gg==",
-      "inBundle": true,
-      "dependencies": {
-        "npm-install-checks": "^6.0.0",
-        "npm-normalize-package-bin": "^3.0.0",
-        "npm-package-arg": "^10.0.0",
-        "semver": "^7.3.5"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
     "node_modules/@npmcli/installed-package-contents": {
       "version": "2.0.2",
       "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-2.0.2.tgz",
@@ -16667,7 +16616,7 @@
       "version": "4.0.2",
       "license": "ISC",
       "dependencies": {
-        "@npmcli/git": "^5.0.0",
+        "@npmcli/git": "^5.0.1",
         "@npmcli/run-script": "^6.0.0",
         "json-parse-even-better-errors": "^3.0.0",
         "proc-log": "^3.0.0",
diff --git a/package.json b/package.json
index 06672c07f8fdb..5c3cedfa0c64a 100644
--- a/package.json
+++ b/package.json
@@ -193,7 +193,7 @@
   "devDependencies": {
     "@npmcli/docs": "^1.0.0",
     "@npmcli/eslint-config": "^4.0.2",
-    "@npmcli/git": "^5.0.0",
+    "@npmcli/git": "^5.0.1",
     "@npmcli/mock-globals": "^1.0.0",
     "@npmcli/mock-registry": "^1.0.0",
     "@npmcli/template-oss": "4.18.0",
diff --git a/workspaces/libnpmversion/package.json b/workspaces/libnpmversion/package.json
index 2eadbc079a142..19e4972632a88 100644
--- a/workspaces/libnpmversion/package.json
+++ b/workspaces/libnpmversion/package.json
@@ -37,7 +37,7 @@
     "tap": "^16.3.4"
   },
   "dependencies": {
-    "@npmcli/git": "^5.0.0",
+    "@npmcli/git": "^5.0.1",
     "@npmcli/run-script": "^6.0.0",
     "json-parse-even-better-errors": "^3.0.0",
     "proc-log": "^3.0.0",

From 6ba2fd3ee19ac444c35643509ea74628eb2b218a Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Tue, 15 Aug 2023 13:54:50 -0700
Subject: [PATCH 40/68] deps: @npmcli/metavuln-calculator@7.0.0

---
 node_modules/.gitignore                       |   17 -
 .../node_modules/@npmcli/git/LICENSE          |   15 -
 .../node_modules/@npmcli/git/lib/clone.js     |  172 ---
 .../node_modules/@npmcli/git/lib/errors.js    |   36 -
 .../node_modules/@npmcli/git/lib/find.js      |   15 -
 .../node_modules/@npmcli/git/lib/index.js     |    9 -
 .../node_modules/@npmcli/git/lib/is-clean.js  |    6 -
 .../node_modules/@npmcli/git/lib/is.js        |    6 -
 .../@npmcli/git/lib/lines-to-revs.js          |  147 --
 .../@npmcli/git/lib/make-error.js             |   33 -
 .../node_modules/@npmcli/git/lib/opts.js      |   12 -
 .../node_modules/@npmcli/git/lib/revs.js      |   28 -
 .../node_modules/@npmcli/git/lib/spawn.js     |   44 -
 .../node_modules/@npmcli/git/lib/utils.js     |    3 -
 .../node_modules/@npmcli/git/lib/which.js     |   18 -
 .../node_modules/@npmcli/git/package.json     |   57 -
 .../node_modules/cacache/LICENSE.md           |   16 -
 .../node_modules/cacache/lib/content/path.js  |   29 -
 .../node_modules/cacache/lib/content/read.js  |  166 ---
 .../node_modules/cacache/lib/content/rm.js    |   18 -
 .../node_modules/cacache/lib/content/write.js |  205 ---
 .../node_modules/cacache/lib/entry-index.js   |  330 -----
 .../node_modules/cacache/lib/get.js           |  170 ---
 .../node_modules/cacache/lib/index.js         |   42 -
 .../node_modules/cacache/lib/memoization.js   |   72 -
 .../node_modules/cacache/lib/put.js           |   80 --
 .../node_modules/cacache/lib/rm.js            |   31 -
 .../node_modules/cacache/lib/util/glob.js     |    7 -
 .../cacache/lib/util/hash-to-segments.js      |    7 -
 .../node_modules/cacache/lib/util/tmp.js      |   26 -
 .../node_modules/cacache/lib/verify.js        |  257 ----
 .../node_modules/cacache/package.json         |   82 --
 .../node_modules/hosted-git-info/LICENSE      |   13 -
 .../hosted-git-info/lib/from-url.js           |  122 --
 .../node_modules/hosted-git-info/lib/hosts.js |  228 ---
 .../node_modules/hosted-git-info/lib/index.js |  179 ---
 .../hosted-git-info/lib/parse-url.js          |   78 --
 .../node_modules/hosted-git-info/package.json |   59 -
 .../node_modules/lru-cache/LICENSE            |   15 -
 .../node_modules/lru-cache/index.js           | 1227 -----------------
 .../node_modules/lru-cache/index.mjs          | 1227 -----------------
 .../node_modules/lru-cache/package.json       |   96 --
 .../node_modules/npm-package-arg/LICENSE      |   15 -
 .../node_modules/npm-package-arg/lib/npa.js   |  431 ------
 .../node_modules/npm-package-arg/package.json |   59 -
 .../node_modules/npm-pick-manifest/LICENSE.md |   16 -
 .../npm-pick-manifest/lib/index.js            |  218 ---
 .../npm-pick-manifest/package.json            |   57 -
 .../node_modules/pacote/LICENSE               |   15 -
 .../node_modules/pacote/lib/bin.js            |  158 ---
 .../node_modules/pacote/lib/dir.js            |  108 --
 .../node_modules/pacote/lib/fetcher.js        |  505 -------
 .../node_modules/pacote/lib/file.js           |   96 --
 .../node_modules/pacote/lib/git.js            |  327 -----
 .../node_modules/pacote/lib/index.js          |   23 -
 .../node_modules/pacote/lib/registry.js       |  344 -----
 .../node_modules/pacote/lib/remote.js         |   91 --
 .../pacote/lib/util/add-git-sha.js            |   15 -
 .../node_modules/pacote/lib/util/cache-dir.js |   15 -
 .../pacote/lib/util/is-package-bin.js         |   25 -
 .../node_modules/pacote/lib/util/npm.js       |   14 -
 .../pacote/lib/util/tar-create-options.js     |   31 -
 .../pacote/lib/util/trailing-slashes.js       |   10 -
 .../node_modules/pacote/package.json          |   85 --
 .../@npmcli/metavuln-calculator/package.json  |   10 +-
 node_modules/normalize-package-data/LICENSE   |   15 -
 .../lib/extract_description.js                |   24 -
 .../normalize-package-data/lib/fixer.js       |  475 -------
 .../lib/make_warning.js                       |   22 -
 .../normalize-package-data/lib/normalize.js   |   48 -
 .../normalize-package-data/lib/safe_format.js |   11 -
 .../normalize-package-data/lib/typos.json     |   25 -
 .../lib/warning_messages.json                 |   30 -
 .../node_modules/hosted-git-info/LICENSE      |   13 -
 .../hosted-git-info/lib/from-url.js           |  122 --
 .../node_modules/hosted-git-info/lib/hosts.js |  228 ---
 .../node_modules/hosted-git-info/lib/index.js |  179 ---
 .../hosted-git-info/lib/parse-url.js          |   78 --
 .../node_modules/hosted-git-info/package.json |   59 -
 .../node_modules/lru-cache/LICENSE            |   15 -
 .../node_modules/lru-cache/index.js           | 1227 -----------------
 .../node_modules/lru-cache/index.mjs          | 1227 -----------------
 .../node_modules/lru-cache/package.json       |   96 --
 .../normalize-package-data/package.json       |   55 -
 node_modules/read-package-json/LICENSE        |   15 -
 .../read-package-json/lib/read-json.js        |  589 --------
 node_modules/read-package-json/package.json   |   59 -
 package-lock.json                             |  149 +-
 workspaces/arborist/package.json              |    2 +-
 89 files changed, 16 insertions(+), 12815 deletions(-)
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/LICENSE
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/clone.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/errors.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/find.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/index.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/is-clean.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/is.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/lines-to-revs.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/make-error.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/opts.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/revs.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/spawn.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/utils.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/which.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/glob.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/LICENSE
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/from-url.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/hosts.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/index.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/parse-url.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/LICENSE
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/index.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/index.mjs
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/LICENSE
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/lib/npa.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/LICENSE.md
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/lib/index.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE
 delete mode 100755 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/bin.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json
 delete mode 100644 node_modules/normalize-package-data/LICENSE
 delete mode 100644 node_modules/normalize-package-data/lib/extract_description.js
 delete mode 100644 node_modules/normalize-package-data/lib/fixer.js
 delete mode 100644 node_modules/normalize-package-data/lib/make_warning.js
 delete mode 100644 node_modules/normalize-package-data/lib/normalize.js
 delete mode 100644 node_modules/normalize-package-data/lib/safe_format.js
 delete mode 100644 node_modules/normalize-package-data/lib/typos.json
 delete mode 100644 node_modules/normalize-package-data/lib/warning_messages.json
 delete mode 100644 node_modules/normalize-package-data/node_modules/hosted-git-info/LICENSE
 delete mode 100644 node_modules/normalize-package-data/node_modules/hosted-git-info/lib/from-url.js
 delete mode 100644 node_modules/normalize-package-data/node_modules/hosted-git-info/lib/hosts.js
 delete mode 100644 node_modules/normalize-package-data/node_modules/hosted-git-info/lib/index.js
 delete mode 100644 node_modules/normalize-package-data/node_modules/hosted-git-info/lib/parse-url.js
 delete mode 100644 node_modules/normalize-package-data/node_modules/hosted-git-info/package.json
 delete mode 100644 node_modules/normalize-package-data/node_modules/lru-cache/LICENSE
 delete mode 100644 node_modules/normalize-package-data/node_modules/lru-cache/index.js
 delete mode 100644 node_modules/normalize-package-data/node_modules/lru-cache/index.mjs
 delete mode 100644 node_modules/normalize-package-data/node_modules/lru-cache/package.json
 delete mode 100644 node_modules/normalize-package-data/package.json
 delete mode 100644 node_modules/read-package-json/LICENSE
 delete mode 100644 node_modules/read-package-json/lib/read-json.js
 delete mode 100644 node_modules/read-package-json/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 6d8161877fc36..17bd4ad5a2682 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -25,17 +25,6 @@
 !/@npmcli/installed-package-contents
 !/@npmcli/map-workspaces
 !/@npmcli/metavuln-calculator
-!/@npmcli/metavuln-calculator/node_modules/
-/@npmcli/metavuln-calculator/node_modules/*
-!/@npmcli/metavuln-calculator/node_modules/@npmcli/
-/@npmcli/metavuln-calculator/node_modules/@npmcli/*
-!/@npmcli/metavuln-calculator/node_modules/@npmcli/git
-!/@npmcli/metavuln-calculator/node_modules/cacache
-!/@npmcli/metavuln-calculator/node_modules/hosted-git-info
-!/@npmcli/metavuln-calculator/node_modules/lru-cache
-!/@npmcli/metavuln-calculator/node_modules/npm-package-arg
-!/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest
-!/@npmcli/metavuln-calculator/node_modules/pacote
 !/@npmcli/name-from-folder
 !/@npmcli/node-gyp
 !/@npmcli/package-json
@@ -212,11 +201,6 @@
 !/node-gyp/node_modules/signal-exit
 !/node-gyp/node_modules/which
 !/nopt
-!/normalize-package-data
-!/normalize-package-data/node_modules/
-/normalize-package-data/node_modules/*
-!/normalize-package-data/node_modules/hosted-git-info
-!/normalize-package-data/node_modules/lru-cache
 !/npm-audit-report
 !/npm-bundled
 !/npm-install-checks
@@ -266,7 +250,6 @@
 !/qrcode-terminal
 !/read-cmd-shim
 !/read-package-json-fast
-!/read-package-json
 !/read
 !/readable-stream
 !/retry
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/LICENSE
deleted file mode 100644
index 8f90f96f4c6c5..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
-OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
-DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
-ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
-SOFTWARE.
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/clone.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/clone.js
deleted file mode 100644
index e25a4d1426821..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/clone.js
+++ /dev/null
@@ -1,172 +0,0 @@
-// The goal here is to minimize both git workload and
-// the number of refs we download over the network.
-//
-// Every method ends up with the checked out working dir
-// at the specified ref, and resolves with the git sha.
-
-// Only certain whitelisted hosts get shallow cloning.
-// Many hosts (including GHE) don't always support it.
-// A failed shallow fetch takes a LOT longer than a full
-// fetch in most cases, so we skip it entirely.
-// Set opts.gitShallow = true/false to force this behavior
-// one way or the other.
-const shallowHosts = new Set([
-  'github.com',
-  'gist.github.com',
-  'gitlab.com',
-  'bitbucket.com',
-  'bitbucket.org',
-])
-// we have to use url.parse until we add the same shim that hosted-git-info has
-// to handle scp:// urls
-const { parse } = require('url') // eslint-disable-line node/no-deprecated-api
-const path = require('path')
-
-const getRevs = require('./revs.js')
-const spawn = require('./spawn.js')
-const { isWindows } = require('./utils.js')
-
-const pickManifest = require('npm-pick-manifest')
-const fs = require('fs/promises')
-
-module.exports = (repo, ref = 'HEAD', target = null, opts = {}) =>
-  getRevs(repo, opts).then(revs => clone(
-    repo,
-    revs,
-    ref,
-    resolveRef(revs, ref, opts),
-    target || defaultTarget(repo, opts.cwd),
-    opts
-  ))
-
-const maybeShallow = (repo, opts) => {
-  if (opts.gitShallow === false || opts.gitShallow) {
-    return opts.gitShallow
-  }
-  return shallowHosts.has(parse(repo).host)
-}
-
-const defaultTarget = (repo, /* istanbul ignore next */ cwd = process.cwd()) =>
-  path.resolve(cwd, path.basename(repo.replace(/[/\\]?\.git$/, '')))
-
-const clone = (repo, revs, ref, revDoc, target, opts) => {
-  if (!revDoc) {
-    return unresolved(repo, ref, target, opts)
-  }
-  if (revDoc.sha === revs.refs.HEAD.sha) {
-    return plain(repo, revDoc, target, opts)
-  }
-  if (revDoc.type === 'tag' || revDoc.type === 'branch') {
-    return branch(repo, revDoc, target, opts)
-  }
-  return other(repo, revDoc, target, opts)
-}
-
-const resolveRef = (revs, ref, opts) => {
-  const { spec = {} } = opts
-  ref = spec.gitCommittish || ref
-  /* istanbul ignore next - will fail anyway, can't pull */
-  if (!revs) {
-    return null
-  }
-  if (spec.gitRange) {
-    return pickManifest(revs, spec.gitRange, opts)
-  }
-  if (!ref) {
-    return revs.refs.HEAD
-  }
-  if (revs.refs[ref]) {
-    return revs.refs[ref]
-  }
-  if (revs.shas[ref]) {
-    return revs.refs[revs.shas[ref][0]]
-  }
-  return null
-}
-
-// pull request or some other kind of advertised ref
-const other = (repo, revDoc, target, opts) => {
-  const shallow = maybeShallow(repo, opts)
-
-  const fetchOrigin = ['fetch', 'origin', revDoc.rawRef]
-    .concat(shallow ? ['--depth=1'] : [])
-
-  const git = (args) => spawn(args, { ...opts, cwd: target })
-  return fs.mkdir(target, { recursive: true })
-    .then(() => git(['init']))
-    .then(() => isWindows(opts)
-      ? git(['config', '--local', '--add', 'core.longpaths', 'true'])
-      : null)
-    .then(() => git(['remote', 'add', 'origin', repo]))
-    .then(() => git(fetchOrigin))
-    .then(() => git(['checkout', revDoc.sha]))
-    .then(() => updateSubmodules(target, opts))
-    .then(() => revDoc.sha)
-}
-
-// tag or branches.  use -b
-const branch = (repo, revDoc, target, opts) => {
-  const args = [
-    'clone',
-    '-b',
-    revDoc.ref,
-    repo,
-    target,
-    '--recurse-submodules',
-  ]
-  if (maybeShallow(repo, opts)) {
-    args.push('--depth=1')
-  }
-  if (isWindows(opts)) {
-    args.push('--config', 'core.longpaths=true')
-  }
-  return spawn(args, opts).then(() => revDoc.sha)
-}
-
-// just the head.  clone it
-const plain = (repo, revDoc, target, opts) => {
-  const args = [
-    'clone',
-    repo,
-    target,
-    '--recurse-submodules',
-  ]
-  if (maybeShallow(repo, opts)) {
-    args.push('--depth=1')
-  }
-  if (isWindows(opts)) {
-    args.push('--config', 'core.longpaths=true')
-  }
-  return spawn(args, opts).then(() => revDoc.sha)
-}
-
-const updateSubmodules = async (target, opts) => {
-  const hasSubmodules = await fs.stat(`${target}/.gitmodules`)
-    .then(() => true)
-    .catch(() => false)
-  if (!hasSubmodules) {
-    return null
-  }
-  return spawn([
-    'submodule',
-    'update',
-    '-q',
-    '--init',
-    '--recursive',
-  ], { ...opts, cwd: target })
-}
-
-const unresolved = (repo, ref, target, opts) => {
-  // can't do this one shallowly, because the ref isn't advertised
-  // but we can avoid checking out the working dir twice, at least
-  const lp = isWindows(opts) ? ['--config', 'core.longpaths=true'] : []
-  const cloneArgs = ['clone', '--mirror', '-q', repo, target + '/.git']
-  const git = (args) => spawn(args, { ...opts, cwd: target })
-  return fs.mkdir(target, { recursive: true })
-    .then(() => git(cloneArgs.concat(lp)))
-    .then(() => git(['init']))
-    .then(() => git(['checkout', ref]))
-    .then(() => updateSubmodules(target, opts))
-    .then(() => git(['rev-parse', '--revs-only', 'HEAD']))
-    .then(({ stdout }) => stdout.trim())
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/errors.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/errors.js
deleted file mode 100644
index 7aeac4762866f..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/errors.js
+++ /dev/null
@@ -1,36 +0,0 @@
-
-const maxRetry = 3
-
-class GitError extends Error {
-  shouldRetry () {
-    return false
-  }
-}
-
-class GitConnectionError extends GitError {
-  constructor (message) {
-    super('A git connection error occurred')
-  }
-
-  shouldRetry (number) {
-    return number < maxRetry
-  }
-}
-
-class GitPathspecError extends GitError {
-  constructor (message) {
-    super('The git reference could not be found')
-  }
-}
-
-class GitUnknownError extends GitError {
-  constructor (message) {
-    super('An unknown git error occurred')
-  }
-}
-
-module.exports = {
-  GitConnectionError,
-  GitPathspecError,
-  GitUnknownError,
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/find.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/find.js
deleted file mode 100644
index 34bd310b88e5d..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/find.js
+++ /dev/null
@@ -1,15 +0,0 @@
-const is = require('./is.js')
-const { dirname } = require('path')
-
-module.exports = async ({ cwd = process.cwd(), root } = {}) => {
-  while (true) {
-    if (await is({ cwd })) {
-      return cwd
-    }
-    const next = dirname(cwd)
-    if (cwd === root || cwd === next) {
-      return null
-    }
-    cwd = next
-  }
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/index.js
deleted file mode 100644
index 10a65f782e6da..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/index.js
+++ /dev/null
@@ -1,9 +0,0 @@
-module.exports = {
-  clone: require('./clone.js'),
-  revs: require('./revs.js'),
-  spawn: require('./spawn.js'),
-  is: require('./is.js'),
-  find: require('./find.js'),
-  isClean: require('./is-clean.js'),
-  errors: require('./errors.js'),
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/is-clean.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/is-clean.js
deleted file mode 100644
index 182373be94193..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/is-clean.js
+++ /dev/null
@@ -1,6 +0,0 @@
-const spawn = require('./spawn.js')
-
-module.exports = (opts = {}) =>
-  spawn(['status', '--porcelain=v1', '-uno'], opts)
-    .then(res => !res.stdout.trim().split(/\r?\n+/)
-      .map(l => l.trim()).filter(l => l).length)
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/is.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/is.js
deleted file mode 100644
index e2542f2157727..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/is.js
+++ /dev/null
@@ -1,6 +0,0 @@
-// not an airtight indicator, but a good gut-check to even bother trying
-const { promisify } = require('util')
-const fs = require('fs')
-const stat = promisify(fs.stat)
-module.exports = ({ cwd = process.cwd() } = {}) =>
-  stat(cwd + '/.git').then(() => true, () => false)
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/lines-to-revs.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/lines-to-revs.js
deleted file mode 100644
index 6bd7e7a4c1531..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/lines-to-revs.js
+++ /dev/null
@@ -1,147 +0,0 @@
-// turn an array of lines from `git ls-remote` into a thing
-// vaguely resembling a packument, where docs are a resolved ref
-
-const semver = require('semver')
-
-module.exports = lines => finish(lines.reduce(linesToRevsReducer, {
-  versions: {},
-  'dist-tags': {},
-  refs: {},
-  shas: {},
-}))
-
-const finish = revs => distTags(shaList(peelTags(revs)))
-
-// We can check out shallow clones on specific SHAs if we have a ref
-const shaList = revs => {
-  Object.keys(revs.refs).forEach(ref => {
-    const doc = revs.refs[ref]
-    if (!revs.shas[doc.sha]) {
-      revs.shas[doc.sha] = [ref]
-    } else {
-      revs.shas[doc.sha].push(ref)
-    }
-  })
-  return revs
-}
-
-// Replace any tags with their ^{} counterparts, if those exist
-const peelTags = revs => {
-  Object.keys(revs.refs).filter(ref => ref.endsWith('^{}')).forEach(ref => {
-    const peeled = revs.refs[ref]
-    const unpeeled = revs.refs[ref.replace(/\^\{\}$/, '')]
-    if (unpeeled) {
-      unpeeled.sha = peeled.sha
-      delete revs.refs[ref]
-    }
-  })
-  return revs
-}
-
-const distTags = revs => {
-  // not entirely sure what situations would result in an
-  // ichabod repo, but best to be careful in Sleepy Hollow anyway
-  const HEAD = revs.refs.HEAD || /* istanbul ignore next */ {}
-  const versions = Object.keys(revs.versions)
-  versions.forEach(v => {
-    // simulate a dist-tags with latest pointing at the
-    // 'latest' branch if one exists and is a version,
-    // or HEAD if not.
-    const ver = revs.versions[v]
-    if (revs.refs.latest && ver.sha === revs.refs.latest.sha) {
-      revs['dist-tags'].latest = v
-    } else if (ver.sha === HEAD.sha) {
-      revs['dist-tags'].HEAD = v
-      if (!revs.refs.latest) {
-        revs['dist-tags'].latest = v
-      }
-    }
-  })
-  return revs
-}
-
-const refType = ref => {
-  if (ref.startsWith('refs/tags/')) {
-    return 'tag'
-  }
-  if (ref.startsWith('refs/heads/')) {
-    return 'branch'
-  }
-  if (ref.startsWith('refs/pull/')) {
-    return 'pull'
-  }
-  if (ref === 'HEAD') {
-    return 'head'
-  }
-  // Could be anything, ignore for now
-  /* istanbul ignore next */
-  return 'other'
-}
-
-// return the doc, or null if we should ignore it.
-const lineToRevDoc = line => {
-  const split = line.trim().split(/\s+/, 2)
-  if (split.length < 2) {
-    return null
-  }
-
-  const sha = split[0].trim()
-  const rawRef = split[1].trim()
-  const type = refType(rawRef)
-
-  if (type === 'tag') {
-    // refs/tags/foo^{} is the 'peeled tag', ie the commit
-    // that is tagged by refs/tags/foo they resolve to the same
-    // content, just different objects in git's data structure.
-    // But, we care about the thing the tag POINTS to, not the tag
-    // object itself, so we only look at the peeled tag refs, and
-    // ignore the pointer.
-    // For now, though, we have to save both, because some tags
-    // don't have peels, if they were not annotated.
-    const ref = rawRef.slice('refs/tags/'.length)
-    return { sha, ref, rawRef, type }
-  }
-
-  if (type === 'branch') {
-    const ref = rawRef.slice('refs/heads/'.length)
-    return { sha, ref, rawRef, type }
-  }
-
-  if (type === 'pull') {
-    // NB: merged pull requests installable with #pull/123/merge
-    // for the merged pr, or #pull/123 for the PR head
-    const ref = rawRef.slice('refs/'.length).replace(/\/head$/, '')
-    return { sha, ref, rawRef, type }
-  }
-
-  if (type === 'head') {
-    const ref = 'HEAD'
-    return { sha, ref, rawRef, type }
-  }
-
-  // at this point, all we can do is leave the ref un-munged
-  return { sha, ref: rawRef, rawRef, type }
-}
-
-const linesToRevsReducer = (revs, line) => {
-  const doc = lineToRevDoc(line)
-
-  if (!doc) {
-    return revs
-  }
-
-  revs.refs[doc.ref] = doc
-  revs.refs[doc.rawRef] = doc
-
-  if (doc.type === 'tag') {
-    // try to pull a semver value out of tags like `release-v1.2.3`
-    // which is a pretty common pattern.
-    const match = !doc.ref.endsWith('^{}') &&
-      doc.ref.match(/v?(\d+\.\d+\.\d+(?:[-+].+)?)$/)
-    if (match && semver.valid(match[1], true)) {
-      revs.versions[semver.clean(match[1], true)] = doc
-    }
-  }
-
-  return revs
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/make-error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/make-error.js
deleted file mode 100644
index 7540ec7c8b9f7..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/make-error.js
+++ /dev/null
@@ -1,33 +0,0 @@
-const {
-  GitConnectionError,
-  GitPathspecError,
-  GitUnknownError,
-} = require('./errors.js')
-
-const connectionErrorRe = new RegExp([
-  'remote error: Internal Server Error',
-  'The remote end hung up unexpectedly',
-  'Connection timed out',
-  'Operation timed out',
-  'Failed to connect to .* Timed out',
-  'Connection reset by peer',
-  'SSL_ERROR_SYSCALL',
-  'The requested URL returned error: 503',
-].join('|'))
-
-const missingPathspecRe = /pathspec .* did not match any file\(s\) known to git/
-
-function makeError (er) {
-  const message = er.stderr
-  let gitEr
-  if (connectionErrorRe.test(message)) {
-    gitEr = new GitConnectionError(message)
-  } else if (missingPathspecRe.test(message)) {
-    gitEr = new GitPathspecError(message)
-  } else {
-    gitEr = new GitUnknownError(message)
-  }
-  return Object.assign(gitEr, er)
-}
-
-module.exports = makeError
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/opts.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/opts.js
deleted file mode 100644
index 3119af16e0cf1..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/opts.js
+++ /dev/null
@@ -1,12 +0,0 @@
-// Values we want to set if they're not already defined by the end user
-// This defaults to accepting new ssh host key fingerprints
-const gitEnv = {
-  GIT_ASKPASS: 'echo',
-  GIT_SSH_COMMAND: 'ssh -oStrictHostKeyChecking=accept-new',
-}
-module.exports = (opts = {}) => ({
-  stdioString: true,
-  ...opts,
-  shell: false,
-  env: opts.env || { ...gitEnv, ...process.env },
-})
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/revs.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/revs.js
deleted file mode 100644
index ee72370d5b7ec..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/revs.js
+++ /dev/null
@@ -1,28 +0,0 @@
-const pinflight = require('promise-inflight')
-const spawn = require('./spawn.js')
-const LRU = require('lru-cache')
-
-const revsCache = new LRU({
-  max: 100,
-  ttl: 5 * 60 * 1000,
-})
-
-const linesToRevs = require('./lines-to-revs.js')
-
-module.exports = async (repo, opts = {}) => {
-  if (!opts.noGitRevCache) {
-    const cached = revsCache.get(repo)
-    if (cached) {
-      return cached
-    }
-  }
-
-  return pinflight(`ls-remote:${repo}`, () =>
-    spawn(['ls-remote', repo], opts)
-      .then(({ stdout }) => linesToRevs(stdout.trim().split('\n')))
-      .then(revs => {
-        revsCache.set(repo, revs)
-        return revs
-      })
-  )
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/spawn.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/spawn.js
deleted file mode 100644
index 7098d7b872942..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/spawn.js
+++ /dev/null
@@ -1,44 +0,0 @@
-const spawn = require('@npmcli/promise-spawn')
-const promiseRetry = require('promise-retry')
-const log = require('proc-log')
-const makeError = require('./make-error.js')
-const whichGit = require('./which.js')
-const makeOpts = require('./opts.js')
-
-module.exports = (gitArgs, opts = {}) => {
-  const gitPath = whichGit(opts)
-
-  if (gitPath instanceof Error) {
-    return Promise.reject(gitPath)
-  }
-
-  // undocumented option, mostly only here for tests
-  const args = opts.allowReplace || gitArgs[0] === '--no-replace-objects'
-    ? gitArgs
-    : ['--no-replace-objects', ...gitArgs]
-
-  let retryOpts = opts.retry
-  if (retryOpts === null || retryOpts === undefined) {
-    retryOpts = {
-      retries: opts.fetchRetries || 2,
-      factor: opts.fetchRetryFactor || 10,
-      maxTimeout: opts.fetchRetryMaxtimeout || 60000,
-      minTimeout: opts.fetchRetryMintimeout || 1000,
-    }
-  }
-  return promiseRetry((retryFn, number) => {
-    if (number !== 1) {
-      log.silly('git', `Retrying git command: ${
-        args.join(' ')} attempt # ${number}`)
-    }
-
-    return spawn(gitPath, args, makeOpts(opts))
-      .catch(er => {
-        const gitError = makeError(er)
-        if (!gitError.shouldRetry(number)) {
-          throw gitError
-        }
-        retryFn(gitError)
-      })
-  }, retryOpts)
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/utils.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/utils.js
deleted file mode 100644
index fcd9578a19597..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/utils.js
+++ /dev/null
@@ -1,3 +0,0 @@
-const isWindows = opts => (opts.fakePlatform || process.platform) === 'win32'
-
-exports.isWindows = isWindows
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/which.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/which.js
deleted file mode 100644
index dc2a1ad212166..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/which.js
+++ /dev/null
@@ -1,18 +0,0 @@
-const which = require('which')
-
-let gitPath
-try {
-  gitPath = which.sync('git')
-} catch {
-  // ignore errors
-}
-
-module.exports = (opts = {}) => {
-  if (opts.git) {
-    return opts.git
-  }
-  if (!gitPath || opts.git === false) {
-    return Object.assign(new Error('No git binary found in $PATH'), { code: 'ENOGIT' })
-  }
-  return gitPath
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/package.json
deleted file mode 100644
index eeba1c0415788..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/package.json
+++ /dev/null
@@ -1,57 +0,0 @@
-{
-  "name": "@npmcli/git",
-  "version": "4.1.0",
-  "main": "lib/index.js",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "description": "a util for spawning git from npm CLI contexts",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/git.git"
-  },
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "scripts": {
-    "lint": "eslint \"**/*.js\"",
-    "snap": "tap",
-    "test": "tap",
-    "posttest": "npm run lint",
-    "postlint": "template-oss-check",
-    "lintfix": "npm run lint -- --fix",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "tap": {
-    "timeout": 600,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.15.1",
-    "npm-package-arg": "^10.0.0",
-    "slash": "^3.0.0",
-    "tap": "^16.0.1"
-  },
-  "dependencies": {
-    "@npmcli/promise-spawn": "^6.0.0",
-    "lru-cache": "^7.4.4",
-    "npm-pick-manifest": "^8.0.0",
-    "proc-log": "^3.0.0",
-    "promise-inflight": "^1.0.1",
-    "promise-retry": "^2.0.1",
-    "semver": "^7.3.5",
-    "which": "^3.0.0"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.15.1",
-    "publish": true
-  }
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md
deleted file mode 100644
index 8d28acf866d93..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js
deleted file mode 100644
index ad5a76a4f73f2..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js
+++ /dev/null
@@ -1,29 +0,0 @@
-'use strict'
-
-const contentVer = require('../../package.json')['cache-version'].content
-const hashToSegments = require('../util/hash-to-segments')
-const path = require('path')
-const ssri = require('ssri')
-
-// Current format of content file path:
-//
-// sha512-BaSE64Hex= ->
-// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
-//
-module.exports = contentPath
-
-function contentPath (cache, integrity) {
-  const sri = ssri.parse(integrity, { single: true })
-  // contentPath is the *strongest* algo given
-  return path.join(
-    contentDir(cache),
-    sri.algorithm,
-    ...hashToSegments(sri.hexDigest())
-  )
-}
-
-module.exports.contentDir = contentDir
-
-function contentDir (cache) {
-  return path.join(cache, `content-v${contentVer}`)
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js
deleted file mode 100644
index f41b539df65dc..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js
+++ /dev/null
@@ -1,166 +0,0 @@
-'use strict'
-
-const fs = require('fs/promises')
-const fsm = require('fs-minipass')
-const ssri = require('ssri')
-const contentPath = require('./path')
-const Pipeline = require('minipass-pipeline')
-
-module.exports = read
-
-const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
-async function read (cache, integrity, opts = {}) {
-  const { size } = opts
-  const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
-    // get size
-    const stat = await fs.stat(cpath)
-    return { stat, cpath, sri }
-  })
-  if (typeof size === 'number' && stat.size !== size) {
-    throw sizeError(size, stat.size)
-  }
-
-  if (stat.size > MAX_SINGLE_READ_SIZE) {
-    return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
-  }
-
-  const data = await fs.readFile(cpath, { encoding: null })
-  if (!ssri.checkData(data, sri)) {
-    throw integrityError(sri, cpath)
-  }
-
-  return data
-}
-
-const readPipeline = (cpath, size, sri, stream) => {
-  stream.push(
-    new fsm.ReadStream(cpath, {
-      size,
-      readSize: MAX_SINGLE_READ_SIZE,
-    }),
-    ssri.integrityStream({
-      integrity: sri,
-      size,
-    })
-  )
-  return stream
-}
-
-module.exports.stream = readStream
-module.exports.readStream = readStream
-
-function readStream (cache, integrity, opts = {}) {
-  const { size } = opts
-  const stream = new Pipeline()
-  // Set all this up to run on the stream and then just return the stream
-  Promise.resolve().then(async () => {
-    const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
-      // just stat to ensure it exists
-      const stat = await fs.stat(cpath)
-      return { stat, cpath, sri }
-    })
-    if (typeof size === 'number' && size !== stat.size) {
-      return stream.emit('error', sizeError(size, stat.size))
-    }
-
-    return readPipeline(cpath, stat.size, sri, stream)
-  }).catch(err => stream.emit('error', err))
-
-  return stream
-}
-
-module.exports.copy = copy
-
-function copy (cache, integrity, dest) {
-  return withContentSri(cache, integrity, (cpath, sri) => {
-    return fs.copyFile(cpath, dest)
-  })
-}
-
-module.exports.hasContent = hasContent
-
-async function hasContent (cache, integrity) {
-  if (!integrity) {
-    return false
-  }
-
-  try {
-    return await withContentSri(cache, integrity, async (cpath, sri) => {
-      const stat = await fs.stat(cpath)
-      return { size: stat.size, sri, stat }
-    })
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return false
-    }
-
-    if (err.code === 'EPERM') {
-      /* istanbul ignore else */
-      if (process.platform !== 'win32') {
-        throw err
-      } else {
-        return false
-      }
-    }
-  }
-}
-
-async function withContentSri (cache, integrity, fn) {
-  const sri = ssri.parse(integrity)
-  // If `integrity` has multiple entries, pick the first digest
-  // with available local data.
-  const algo = sri.pickAlgorithm()
-  const digests = sri[algo]
-
-  if (digests.length <= 1) {
-    const cpath = contentPath(cache, digests[0])
-    return fn(cpath, digests[0])
-  } else {
-    // Can't use race here because a generic error can happen before
-    // a ENOENT error, and can happen before a valid result
-    const results = await Promise.all(digests.map(async (meta) => {
-      try {
-        return await withContentSri(cache, meta, fn)
-      } catch (err) {
-        if (err.code === 'ENOENT') {
-          return Object.assign(
-            new Error('No matching content found for ' + sri.toString()),
-            { code: 'ENOENT' }
-          )
-        }
-        return err
-      }
-    }))
-    // Return the first non error if it is found
-    const result = results.find((r) => !(r instanceof Error))
-    if (result) {
-      return result
-    }
-
-    // Throw the No matching content found error
-    const enoentError = results.find((r) => r.code === 'ENOENT')
-    if (enoentError) {
-      throw enoentError
-    }
-
-    // Throw generic error
-    throw results.find((r) => r instanceof Error)
-  }
-}
-
-function sizeError (expected, found) {
-  /* eslint-disable-next-line max-len */
-  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
-  err.expected = expected
-  err.found = found
-  err.code = 'EBADSIZE'
-  return err
-}
-
-function integrityError (sri, path) {
-  const err = new Error(`Integrity verification failed for ${sri} (${path})`)
-  err.code = 'EINTEGRITY'
-  err.sri = sri
-  err.path = path
-  return err
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js
deleted file mode 100644
index ce58d679e4cb2..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js
+++ /dev/null
@@ -1,18 +0,0 @@
-'use strict'
-
-const fs = require('fs/promises')
-const contentPath = require('./path')
-const { hasContent } = require('./read')
-
-module.exports = rm
-
-async function rm (cache, integrity) {
-  const content = await hasContent(cache, integrity)
-  // ~pretty~ sure we can't end up with a content lacking sri, but be safe
-  if (content && content.sri) {
-    await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
-    return true
-  } else {
-    return false
-  }
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js
deleted file mode 100644
index 7146146581287..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js
+++ /dev/null
@@ -1,205 +0,0 @@
-'use strict'
-
-const events = require('events')
-
-const contentPath = require('./path')
-const fs = require('fs/promises')
-const { moveFile } = require('@npmcli/fs')
-const { Minipass } = require('minipass')
-const Pipeline = require('minipass-pipeline')
-const Flush = require('minipass-flush')
-const path = require('path')
-const ssri = require('ssri')
-const uniqueFilename = require('unique-filename')
-const fsm = require('fs-minipass')
-
-module.exports = write
-
-// Cache of move operations in process so we don't duplicate
-const moveOperations = new Map()
-
-async function write (cache, data, opts = {}) {
-  const { algorithms, size, integrity } = opts
-
-  if (typeof size === 'number' && data.length !== size) {
-    throw sizeError(size, data.length)
-  }
-
-  const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
-  if (integrity && !ssri.checkData(data, integrity, opts)) {
-    throw checksumError(integrity, sri)
-  }
-
-  for (const algo in sri) {
-    const tmp = await makeTmp(cache, opts)
-    const hash = sri[algo].toString()
-    try {
-      await fs.writeFile(tmp.target, data, { flag: 'wx' })
-      await moveToDestination(tmp, cache, hash, opts)
-    } finally {
-      if (!tmp.moved) {
-        await fs.rm(tmp.target, { recursive: true, force: true })
-      }
-    }
-  }
-  return { integrity: sri, size: data.length }
-}
-
-module.exports.stream = writeStream
-
-// writes proxied to the 'inputStream' that is passed to the Promise
-// 'end' is deferred until content is handled.
-class CacacheWriteStream extends Flush {
-  constructor (cache, opts) {
-    super()
-    this.opts = opts
-    this.cache = cache
-    this.inputStream = new Minipass()
-    this.inputStream.on('error', er => this.emit('error', er))
-    this.inputStream.on('drain', () => this.emit('drain'))
-    this.handleContentP = null
-  }
-
-  write (chunk, encoding, cb) {
-    if (!this.handleContentP) {
-      this.handleContentP = handleContent(
-        this.inputStream,
-        this.cache,
-        this.opts
-      )
-    }
-    return this.inputStream.write(chunk, encoding, cb)
-  }
-
-  flush (cb) {
-    this.inputStream.end(() => {
-      if (!this.handleContentP) {
-        const e = new Error('Cache input stream was empty')
-        e.code = 'ENODATA'
-        // empty streams are probably emitting end right away.
-        // defer this one tick by rejecting a promise on it.
-        return Promise.reject(e).catch(cb)
-      }
-      // eslint-disable-next-line promise/catch-or-return
-      this.handleContentP.then(
-        (res) => {
-          res.integrity && this.emit('integrity', res.integrity)
-          // eslint-disable-next-line promise/always-return
-          res.size !== null && this.emit('size', res.size)
-          cb()
-        },
-        (er) => cb(er)
-      )
-    })
-  }
-}
-
-function writeStream (cache, opts = {}) {
-  return new CacacheWriteStream(cache, opts)
-}
-
-async function handleContent (inputStream, cache, opts) {
-  const tmp = await makeTmp(cache, opts)
-  try {
-    const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
-    await moveToDestination(
-      tmp,
-      cache,
-      res.integrity,
-      opts
-    )
-    return res
-  } finally {
-    if (!tmp.moved) {
-      await fs.rm(tmp.target, { recursive: true, force: true })
-    }
-  }
-}
-
-async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
-  const outStream = new fsm.WriteStream(tmpTarget, {
-    flags: 'wx',
-  })
-
-  if (opts.integrityEmitter) {
-    // we need to create these all simultaneously since they can fire in any order
-    const [integrity, size] = await Promise.all([
-      events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
-      events.once(opts.integrityEmitter, 'size').then(res => res[0]),
-      new Pipeline(inputStream, outStream).promise(),
-    ])
-    return { integrity, size }
-  }
-
-  let integrity
-  let size
-  const hashStream = ssri.integrityStream({
-    integrity: opts.integrity,
-    algorithms: opts.algorithms,
-    size: opts.size,
-  })
-  hashStream.on('integrity', i => {
-    integrity = i
-  })
-  hashStream.on('size', s => {
-    size = s
-  })
-
-  const pipeline = new Pipeline(inputStream, hashStream, outStream)
-  await pipeline.promise()
-  return { integrity, size }
-}
-
-async function makeTmp (cache, opts) {
-  const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
-  await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
-  return {
-    target: tmpTarget,
-    moved: false,
-  }
-}
-
-async function moveToDestination (tmp, cache, sri, opts) {
-  const destination = contentPath(cache, sri)
-  const destDir = path.dirname(destination)
-  if (moveOperations.has(destination)) {
-    return moveOperations.get(destination)
-  }
-  moveOperations.set(
-    destination,
-    fs.mkdir(destDir, { recursive: true })
-      .then(async () => {
-        await moveFile(tmp.target, destination, { overwrite: false })
-        tmp.moved = true
-        return tmp.moved
-      })
-      .catch(err => {
-        if (!err.message.startsWith('The destination file exists')) {
-          throw Object.assign(err, { code: 'EEXIST' })
-        }
-      }).finally(() => {
-        moveOperations.delete(destination)
-      })
-
-  )
-  return moveOperations.get(destination)
-}
-
-function sizeError (expected, found) {
-  /* eslint-disable-next-line max-len */
-  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
-  err.expected = expected
-  err.found = found
-  err.code = 'EBADSIZE'
-  return err
-}
-
-function checksumError (expected, found) {
-  const err = new Error(`Integrity check failed:
-  Wanted: ${expected}
-   Found: ${found}`)
-  err.code = 'EINTEGRITY'
-  err.expected = expected
-  err.found = found
-  return err
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js
deleted file mode 100644
index 722a37af5ce15..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js
+++ /dev/null
@@ -1,330 +0,0 @@
-'use strict'
-
-const crypto = require('crypto')
-const {
-  appendFile,
-  mkdir,
-  readFile,
-  readdir,
-  rm,
-  writeFile,
-} = require('fs/promises')
-const { Minipass } = require('minipass')
-const path = require('path')
-const ssri = require('ssri')
-const uniqueFilename = require('unique-filename')
-
-const contentPath = require('./content/path')
-const hashToSegments = require('./util/hash-to-segments')
-const indexV = require('../package.json')['cache-version'].index
-const { moveFile } = require('@npmcli/fs')
-
-module.exports.NotFoundError = class NotFoundError extends Error {
-  constructor (cache, key) {
-    super(`No cache entry for ${key} found in ${cache}`)
-    this.code = 'ENOENT'
-    this.cache = cache
-    this.key = key
-  }
-}
-
-module.exports.compact = compact
-
-async function compact (cache, key, matchFn, opts = {}) {
-  const bucket = bucketPath(cache, key)
-  const entries = await bucketEntries(bucket)
-  const newEntries = []
-  // we loop backwards because the bottom-most result is the newest
-  // since we add new entries with appendFile
-  for (let i = entries.length - 1; i >= 0; --i) {
-    const entry = entries[i]
-    // a null integrity could mean either a delete was appended
-    // or the user has simply stored an index that does not map
-    // to any content. we determine if the user wants to keep the
-    // null integrity based on the validateEntry function passed in options.
-    // if the integrity is null and no validateEntry is provided, we break
-    // as we consider the null integrity to be a deletion of everything
-    // that came before it.
-    if (entry.integrity === null && !opts.validateEntry) {
-      break
-    }
-
-    // if this entry is valid, and it is either the first entry or
-    // the newEntries array doesn't already include an entry that
-    // matches this one based on the provided matchFn, then we add
-    // it to the beginning of our list
-    if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
-      (newEntries.length === 0 ||
-        !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
-      newEntries.unshift(entry)
-    }
-  }
-
-  const newIndex = '\n' + newEntries.map((entry) => {
-    const stringified = JSON.stringify(entry)
-    const hash = hashEntry(stringified)
-    return `${hash}\t${stringified}`
-  }).join('\n')
-
-  const setup = async () => {
-    const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
-    await mkdir(path.dirname(target), { recursive: true })
-    return {
-      target,
-      moved: false,
-    }
-  }
-
-  const teardown = async (tmp) => {
-    if (!tmp.moved) {
-      return rm(tmp.target, { recursive: true, force: true })
-    }
-  }
-
-  const write = async (tmp) => {
-    await writeFile(tmp.target, newIndex, { flag: 'wx' })
-    await mkdir(path.dirname(bucket), { recursive: true })
-    // we use @npmcli/move-file directly here because we
-    // want to overwrite the existing file
-    await moveFile(tmp.target, bucket)
-    tmp.moved = true
-  }
-
-  // write the file atomically
-  const tmp = await setup()
-  try {
-    await write(tmp)
-  } finally {
-    await teardown(tmp)
-  }
-
-  // we reverse the list we generated such that the newest
-  // entries come first in order to make looping through them easier
-  // the true passed to formatEntry tells it to keep null
-  // integrity values, if they made it this far it's because
-  // validateEntry returned true, and as such we should return it
-  return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
-}
-
-module.exports.insert = insert
-
-async function insert (cache, key, integrity, opts = {}) {
-  const { metadata, size, time } = opts
-  const bucket = bucketPath(cache, key)
-  const entry = {
-    key,
-    integrity: integrity && ssri.stringify(integrity),
-    time: time || Date.now(),
-    size,
-    metadata,
-  }
-  try {
-    await mkdir(path.dirname(bucket), { recursive: true })
-    const stringified = JSON.stringify(entry)
-    // NOTE - Cleverness ahoy!
-    //
-    // This works because it's tremendously unlikely for an entry to corrupt
-    // another while still preserving the string length of the JSON in
-    // question. So, we just slap the length in there and verify it on read.
-    //
-    // Thanks to @isaacs for the whiteboarding session that ended up with
-    // this.
-    await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return undefined
-    }
-
-    throw err
-  }
-  return formatEntry(cache, entry)
-}
-
-module.exports.find = find
-
-async function find (cache, key) {
-  const bucket = bucketPath(cache, key)
-  try {
-    const entries = await bucketEntries(bucket)
-    return entries.reduce((latest, next) => {
-      if (next && next.key === key) {
-        return formatEntry(cache, next)
-      } else {
-        return latest
-      }
-    }, null)
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return null
-    } else {
-      throw err
-    }
-  }
-}
-
-module.exports.delete = del
-
-function del (cache, key, opts = {}) {
-  if (!opts.removeFully) {
-    return insert(cache, key, null, opts)
-  }
-
-  const bucket = bucketPath(cache, key)
-  return rm(bucket, { recursive: true, force: true })
-}
-
-module.exports.lsStream = lsStream
-
-function lsStream (cache) {
-  const indexDir = bucketDir(cache)
-  const stream = new Minipass({ objectMode: true })
-
-  // Set all this up to run on the stream and then just return the stream
-  Promise.resolve().then(async () => {
-    const buckets = await readdirOrEmpty(indexDir)
-    await Promise.all(buckets.map(async (bucket) => {
-      const bucketPath = path.join(indexDir, bucket)
-      const subbuckets = await readdirOrEmpty(bucketPath)
-      await Promise.all(subbuckets.map(async (subbucket) => {
-        const subbucketPath = path.join(bucketPath, subbucket)
-
-        // "/cachename//./*"
-        const subbucketEntries = await readdirOrEmpty(subbucketPath)
-        await Promise.all(subbucketEntries.map(async (entry) => {
-          const entryPath = path.join(subbucketPath, entry)
-          try {
-            const entries = await bucketEntries(entryPath)
-            // using a Map here prevents duplicate keys from showing up
-            // twice, I guess?
-            const reduced = entries.reduce((acc, entry) => {
-              acc.set(entry.key, entry)
-              return acc
-            }, new Map())
-            // reduced is a map of key => entry
-            for (const entry of reduced.values()) {
-              const formatted = formatEntry(cache, entry)
-              if (formatted) {
-                stream.write(formatted)
-              }
-            }
-          } catch (err) {
-            if (err.code === 'ENOENT') {
-              return undefined
-            }
-            throw err
-          }
-        }))
-      }))
-    }))
-    stream.end()
-    return stream
-  }).catch(err => stream.emit('error', err))
-
-  return stream
-}
-
-module.exports.ls = ls
-
-async function ls (cache) {
-  const entries = await lsStream(cache).collect()
-  return entries.reduce((acc, xs) => {
-    acc[xs.key] = xs
-    return acc
-  }, {})
-}
-
-module.exports.bucketEntries = bucketEntries
-
-async function bucketEntries (bucket, filter) {
-  const data = await readFile(bucket, 'utf8')
-  return _bucketEntries(data, filter)
-}
-
-function _bucketEntries (data, filter) {
-  const entries = []
-  data.split('\n').forEach((entry) => {
-    if (!entry) {
-      return
-    }
-
-    const pieces = entry.split('\t')
-    if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
-      // Hash is no good! Corruption or malice? Doesn't matter!
-      // EJECT EJECT
-      return
-    }
-    let obj
-    try {
-      obj = JSON.parse(pieces[1])
-    } catch (_) {
-      // eslint-ignore-next-line no-empty-block
-    }
-    // coverage disabled here, no need to test with an entry that parses to something falsey
-    // istanbul ignore else
-    if (obj) {
-      entries.push(obj)
-    }
-  })
-  return entries
-}
-
-module.exports.bucketDir = bucketDir
-
-function bucketDir (cache) {
-  return path.join(cache, `index-v${indexV}`)
-}
-
-module.exports.bucketPath = bucketPath
-
-function bucketPath (cache, key) {
-  const hashed = hashKey(key)
-  return path.join.apply(
-    path,
-    [bucketDir(cache)].concat(hashToSegments(hashed))
-  )
-}
-
-module.exports.hashKey = hashKey
-
-function hashKey (key) {
-  return hash(key, 'sha256')
-}
-
-module.exports.hashEntry = hashEntry
-
-function hashEntry (str) {
-  return hash(str, 'sha1')
-}
-
-function hash (str, digest) {
-  return crypto
-    .createHash(digest)
-    .update(str)
-    .digest('hex')
-}
-
-function formatEntry (cache, entry, keepAll) {
-  // Treat null digests as deletions. They'll shadow any previous entries.
-  if (!entry.integrity && !keepAll) {
-    return null
-  }
-
-  return {
-    key: entry.key,
-    integrity: entry.integrity,
-    path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
-    size: entry.size,
-    time: entry.time,
-    metadata: entry.metadata,
-  }
-}
-
-function readdirOrEmpty (dir) {
-  return readdir(dir).catch((err) => {
-    if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
-      return []
-    }
-
-    throw err
-  })
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js
deleted file mode 100644
index 80ec206c7ecaa..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js
+++ /dev/null
@@ -1,170 +0,0 @@
-'use strict'
-
-const Collect = require('minipass-collect')
-const { Minipass } = require('minipass')
-const Pipeline = require('minipass-pipeline')
-
-const index = require('./entry-index')
-const memo = require('./memoization')
-const read = require('./content/read')
-
-async function getData (cache, key, opts = {}) {
-  const { integrity, memoize, size } = opts
-  const memoized = memo.get(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return {
-      metadata: memoized.entry.metadata,
-      data: memoized.data,
-      integrity: memoized.entry.integrity,
-      size: memoized.entry.size,
-    }
-  }
-
-  const entry = await index.find(cache, key, opts)
-  if (!entry) {
-    throw new index.NotFoundError(cache, key)
-  }
-  const data = await read(cache, entry.integrity, { integrity, size })
-  if (memoize) {
-    memo.put(cache, entry, data, opts)
-  }
-
-  return {
-    data,
-    metadata: entry.metadata,
-    size: entry.size,
-    integrity: entry.integrity,
-  }
-}
-module.exports = getData
-
-async function getDataByDigest (cache, key, opts = {}) {
-  const { integrity, memoize, size } = opts
-  const memoized = memo.get.byDigest(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return memoized
-  }
-
-  const res = await read(cache, key, { integrity, size })
-  if (memoize) {
-    memo.put.byDigest(cache, key, res, opts)
-  }
-  return res
-}
-module.exports.byDigest = getDataByDigest
-
-const getMemoizedStream = (memoized) => {
-  const stream = new Minipass()
-  stream.on('newListener', function (ev, cb) {
-    ev === 'metadata' && cb(memoized.entry.metadata)
-    ev === 'integrity' && cb(memoized.entry.integrity)
-    ev === 'size' && cb(memoized.entry.size)
-  })
-  stream.end(memoized.data)
-  return stream
-}
-
-function getStream (cache, key, opts = {}) {
-  const { memoize, size } = opts
-  const memoized = memo.get(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return getMemoizedStream(memoized)
-  }
-
-  const stream = new Pipeline()
-  // Set all this up to run on the stream and then just return the stream
-  Promise.resolve().then(async () => {
-    const entry = await index.find(cache, key)
-    if (!entry) {
-      throw new index.NotFoundError(cache, key)
-    }
-
-    stream.emit('metadata', entry.metadata)
-    stream.emit('integrity', entry.integrity)
-    stream.emit('size', entry.size)
-    stream.on('newListener', function (ev, cb) {
-      ev === 'metadata' && cb(entry.metadata)
-      ev === 'integrity' && cb(entry.integrity)
-      ev === 'size' && cb(entry.size)
-    })
-
-    const src = read.readStream(
-      cache,
-      entry.integrity,
-      { ...opts, size: typeof size !== 'number' ? entry.size : size }
-    )
-
-    if (memoize) {
-      const memoStream = new Collect.PassThrough()
-      memoStream.on('collect', data => memo.put(cache, entry, data, opts))
-      stream.unshift(memoStream)
-    }
-    stream.unshift(src)
-    return stream
-  }).catch((err) => stream.emit('error', err))
-
-  return stream
-}
-
-module.exports.stream = getStream
-
-function getStreamDigest (cache, integrity, opts = {}) {
-  const { memoize } = opts
-  const memoized = memo.get.byDigest(cache, integrity, opts)
-  if (memoized && memoize !== false) {
-    const stream = new Minipass()
-    stream.end(memoized)
-    return stream
-  } else {
-    const stream = read.readStream(cache, integrity, opts)
-    if (!memoize) {
-      return stream
-    }
-
-    const memoStream = new Collect.PassThrough()
-    memoStream.on('collect', data => memo.put.byDigest(
-      cache,
-      integrity,
-      data,
-      opts
-    ))
-    return new Pipeline(stream, memoStream)
-  }
-}
-
-module.exports.stream.byDigest = getStreamDigest
-
-function info (cache, key, opts = {}) {
-  const { memoize } = opts
-  const memoized = memo.get(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return Promise.resolve(memoized.entry)
-  } else {
-    return index.find(cache, key)
-  }
-}
-module.exports.info = info
-
-async function copy (cache, key, dest, opts = {}) {
-  const entry = await index.find(cache, key, opts)
-  if (!entry) {
-    throw new index.NotFoundError(cache, key)
-  }
-  await read.copy(cache, entry.integrity, dest, opts)
-  return {
-    metadata: entry.metadata,
-    size: entry.size,
-    integrity: entry.integrity,
-  }
-}
-
-module.exports.copy = copy
-
-async function copyByDigest (cache, key, dest, opts = {}) {
-  await read.copy(cache, key, dest, opts)
-  return key
-}
-
-module.exports.copy.byDigest = copyByDigest
-
-module.exports.hasContent = read.hasContent
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js
deleted file mode 100644
index c9b0da5f3a271..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js
+++ /dev/null
@@ -1,42 +0,0 @@
-'use strict'
-
-const get = require('./get.js')
-const put = require('./put.js')
-const rm = require('./rm.js')
-const verify = require('./verify.js')
-const { clearMemoized } = require('./memoization.js')
-const tmp = require('./util/tmp.js')
-const index = require('./entry-index.js')
-
-module.exports.index = {}
-module.exports.index.compact = index.compact
-module.exports.index.insert = index.insert
-
-module.exports.ls = index.ls
-module.exports.ls.stream = index.lsStream
-
-module.exports.get = get
-module.exports.get.byDigest = get.byDigest
-module.exports.get.stream = get.stream
-module.exports.get.stream.byDigest = get.stream.byDigest
-module.exports.get.copy = get.copy
-module.exports.get.copy.byDigest = get.copy.byDigest
-module.exports.get.info = get.info
-module.exports.get.hasContent = get.hasContent
-
-module.exports.put = put
-module.exports.put.stream = put.stream
-
-module.exports.rm = rm.entry
-module.exports.rm.all = rm.all
-module.exports.rm.entry = module.exports.rm
-module.exports.rm.content = rm.content
-
-module.exports.clearMemoized = clearMemoized
-
-module.exports.tmp = {}
-module.exports.tmp.mkdir = tmp.mkdir
-module.exports.tmp.withTmp = tmp.withTmp
-
-module.exports.verify = verify
-module.exports.verify.lastRun = verify.lastRun
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js
deleted file mode 100644
index 0ff604a479c9c..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js
+++ /dev/null
@@ -1,72 +0,0 @@
-'use strict'
-
-const LRU = require('lru-cache')
-
-const MEMOIZED = new LRU({
-  max: 500,
-  maxSize: 50 * 1024 * 1024, // 50MB
-  ttl: 3 * 60 * 1000, // 3 minutes
-  sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
-})
-
-module.exports.clearMemoized = clearMemoized
-
-function clearMemoized () {
-  const old = {}
-  MEMOIZED.forEach((v, k) => {
-    old[k] = v
-  })
-  MEMOIZED.clear()
-  return old
-}
-
-module.exports.put = put
-
-function put (cache, entry, data, opts) {
-  pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
-  putDigest(cache, entry.integrity, data, opts)
-}
-
-module.exports.put.byDigest = putDigest
-
-function putDigest (cache, integrity, data, opts) {
-  pickMem(opts).set(`digest:${cache}:${integrity}`, data)
-}
-
-module.exports.get = get
-
-function get (cache, key, opts) {
-  return pickMem(opts).get(`key:${cache}:${key}`)
-}
-
-module.exports.get.byDigest = getDigest
-
-function getDigest (cache, integrity, opts) {
-  return pickMem(opts).get(`digest:${cache}:${integrity}`)
-}
-
-class ObjProxy {
-  constructor (obj) {
-    this.obj = obj
-  }
-
-  get (key) {
-    return this.obj[key]
-  }
-
-  set (key, val) {
-    this.obj[key] = val
-  }
-}
-
-function pickMem (opts) {
-  if (!opts || !opts.memoize) {
-    return MEMOIZED
-  } else if (opts.memoize.get && opts.memoize.set) {
-    return opts.memoize
-  } else if (typeof opts.memoize === 'object') {
-    return new ObjProxy(opts.memoize)
-  } else {
-    return MEMOIZED
-  }
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js
deleted file mode 100644
index 9fc932d5f6dec..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js
+++ /dev/null
@@ -1,80 +0,0 @@
-'use strict'
-
-const index = require('./entry-index')
-const memo = require('./memoization')
-const write = require('./content/write')
-const Flush = require('minipass-flush')
-const { PassThrough } = require('minipass-collect')
-const Pipeline = require('minipass-pipeline')
-
-const putOpts = (opts) => ({
-  algorithms: ['sha512'],
-  ...opts,
-})
-
-module.exports = putData
-
-async function putData (cache, key, data, opts = {}) {
-  const { memoize } = opts
-  opts = putOpts(opts)
-  const res = await write(cache, data, opts)
-  const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
-  if (memoize) {
-    memo.put(cache, entry, data, opts)
-  }
-
-  return res.integrity
-}
-
-module.exports.stream = putStream
-
-function putStream (cache, key, opts = {}) {
-  const { memoize } = opts
-  opts = putOpts(opts)
-  let integrity
-  let size
-  let error
-
-  let memoData
-  const pipeline = new Pipeline()
-  // first item in the pipeline is the memoizer, because we need
-  // that to end first and get the collected data.
-  if (memoize) {
-    const memoizer = new PassThrough().on('collect', data => {
-      memoData = data
-    })
-    pipeline.push(memoizer)
-  }
-
-  // contentStream is a write-only, not a passthrough
-  // no data comes out of it.
-  const contentStream = write.stream(cache, opts)
-    .on('integrity', (int) => {
-      integrity = int
-    })
-    .on('size', (s) => {
-      size = s
-    })
-    .on('error', (err) => {
-      error = err
-    })
-
-  pipeline.push(contentStream)
-
-  // last but not least, we write the index and emit hash and size,
-  // and memoize if we're doing that
-  pipeline.push(new Flush({
-    async flush () {
-      if (!error) {
-        const entry = await index.insert(cache, key, integrity, { ...opts, size })
-        if (memoize && memoData) {
-          memo.put(cache, entry, memoData, opts)
-        }
-        pipeline.emit('integrity', integrity)
-        pipeline.emit('size', size)
-      }
-    },
-  }))
-
-  return pipeline
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js
deleted file mode 100644
index a94760c7cf243..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js
+++ /dev/null
@@ -1,31 +0,0 @@
-'use strict'
-
-const { rm } = require('fs/promises')
-const glob = require('./util/glob.js')
-const index = require('./entry-index')
-const memo = require('./memoization')
-const path = require('path')
-const rmContent = require('./content/rm')
-
-module.exports = entry
-module.exports.entry = entry
-
-function entry (cache, key, opts) {
-  memo.clearMemoized()
-  return index.delete(cache, key, opts)
-}
-
-module.exports.content = content
-
-function content (cache, integrity) {
-  memo.clearMemoized()
-  return rmContent(cache, integrity)
-}
-
-module.exports.all = all
-
-async function all (cache) {
-  memo.clearMemoized()
-  const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
-  return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/glob.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/glob.js
deleted file mode 100644
index 8500c1c16a429..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/glob.js
+++ /dev/null
@@ -1,7 +0,0 @@
-'use strict'
-
-const { glob } = require('glob')
-const path = require('path')
-
-const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep)
-module.exports = (path, options) => glob(globify(path), options)
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js
deleted file mode 100644
index 445599b503808..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js
+++ /dev/null
@@ -1,7 +0,0 @@
-'use strict'
-
-module.exports = hashToSegments
-
-function hashToSegments (hash) {
-  return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js
deleted file mode 100644
index 0bf5302136ebe..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js
+++ /dev/null
@@ -1,26 +0,0 @@
-'use strict'
-
-const { withTempDir } = require('@npmcli/fs')
-const fs = require('fs/promises')
-const path = require('path')
-
-module.exports.mkdir = mktmpdir
-
-async function mktmpdir (cache, opts = {}) {
-  const { tmpPrefix } = opts
-  const tmpDir = path.join(cache, 'tmp')
-  await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
-  // do not use path.join(), it drops the trailing / if tmpPrefix is unset
-  const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
-  return fs.mkdtemp(target, { owner: 'inherit' })
-}
-
-module.exports.withTmp = withTmp
-
-function withTmp (cache, opts, cb) {
-  if (!cb) {
-    cb = opts
-    opts = {}
-  }
-  return withTempDir(path.join(cache, 'tmp'), cb, opts)
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js
deleted file mode 100644
index 62e85c946490f..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js
+++ /dev/null
@@ -1,257 +0,0 @@
-'use strict'
-
-const {
-  mkdir,
-  readFile,
-  rm,
-  stat,
-  truncate,
-  writeFile,
-} = require('fs/promises')
-const pMap = require('p-map')
-const contentPath = require('./content/path')
-const fsm = require('fs-minipass')
-const glob = require('./util/glob.js')
-const index = require('./entry-index')
-const path = require('path')
-const ssri = require('ssri')
-
-const hasOwnProperty = (obj, key) =>
-  Object.prototype.hasOwnProperty.call(obj, key)
-
-const verifyOpts = (opts) => ({
-  concurrency: 20,
-  log: { silly () {} },
-  ...opts,
-})
-
-module.exports = verify
-
-async function verify (cache, opts) {
-  opts = verifyOpts(opts)
-  opts.log.silly('verify', 'verifying cache at', cache)
-
-  const steps = [
-    markStartTime,
-    fixPerms,
-    garbageCollect,
-    rebuildIndex,
-    cleanTmp,
-    writeVerifile,
-    markEndTime,
-  ]
-
-  const stats = {}
-  for (const step of steps) {
-    const label = step.name
-    const start = new Date()
-    const s = await step(cache, opts)
-    if (s) {
-      Object.keys(s).forEach((k) => {
-        stats[k] = s[k]
-      })
-    }
-    const end = new Date()
-    if (!stats.runTime) {
-      stats.runTime = {}
-    }
-    stats.runTime[label] = end - start
-  }
-  stats.runTime.total = stats.endTime - stats.startTime
-  opts.log.silly(
-    'verify',
-    'verification finished for',
-    cache,
-    'in',
-    `${stats.runTime.total}ms`
-  )
-  return stats
-}
-
-async function markStartTime (cache, opts) {
-  return { startTime: new Date() }
-}
-
-async function markEndTime (cache, opts) {
-  return { endTime: new Date() }
-}
-
-async function fixPerms (cache, opts) {
-  opts.log.silly('verify', 'fixing cache permissions')
-  await mkdir(cache, { recursive: true })
-  return null
-}
-
-// Implements a naive mark-and-sweep tracing garbage collector.
-//
-// The algorithm is basically as follows:
-// 1. Read (and filter) all index entries ("pointers")
-// 2. Mark each integrity value as "live"
-// 3. Read entire filesystem tree in `content-vX/` dir
-// 4. If content is live, verify its checksum and delete it if it fails
-// 5. If content is not marked as live, rm it.
-//
-async function garbageCollect (cache, opts) {
-  opts.log.silly('verify', 'garbage collecting content')
-  const indexStream = index.lsStream(cache)
-  const liveContent = new Set()
-  indexStream.on('data', (entry) => {
-    if (opts.filter && !opts.filter(entry)) {
-      return
-    }
-
-    // integrity is stringified, re-parse it so we can get each hash
-    const integrity = ssri.parse(entry.integrity)
-    for (const algo in integrity) {
-      liveContent.add(integrity[algo].toString())
-    }
-  })
-  await new Promise((resolve, reject) => {
-    indexStream.on('end', resolve).on('error', reject)
-  })
-  const contentDir = contentPath.contentDir(cache)
-  const files = await glob(path.join(contentDir, '**'), {
-    follow: false,
-    nodir: true,
-    nosort: true,
-  })
-  const stats = {
-    verifiedContent: 0,
-    reclaimedCount: 0,
-    reclaimedSize: 0,
-    badContentCount: 0,
-    keptSize: 0,
-  }
-  await pMap(
-    files,
-    async (f) => {
-      const split = f.split(/[/\\]/)
-      const digest = split.slice(split.length - 3).join('')
-      const algo = split[split.length - 4]
-      const integrity = ssri.fromHex(digest, algo)
-      if (liveContent.has(integrity.toString())) {
-        const info = await verifyContent(f, integrity)
-        if (!info.valid) {
-          stats.reclaimedCount++
-          stats.badContentCount++
-          stats.reclaimedSize += info.size
-        } else {
-          stats.verifiedContent++
-          stats.keptSize += info.size
-        }
-      } else {
-        // No entries refer to this content. We can delete.
-        stats.reclaimedCount++
-        const s = await stat(f)
-        await rm(f, { recursive: true, force: true })
-        stats.reclaimedSize += s.size
-      }
-      return stats
-    },
-    { concurrency: opts.concurrency }
-  )
-  return stats
-}
-
-async function verifyContent (filepath, sri) {
-  const contentInfo = {}
-  try {
-    const { size } = await stat(filepath)
-    contentInfo.size = size
-    contentInfo.valid = true
-    await ssri.checkStream(new fsm.ReadStream(filepath), sri)
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return { size: 0, valid: false }
-    }
-    if (err.code !== 'EINTEGRITY') {
-      throw err
-    }
-
-    await rm(filepath, { recursive: true, force: true })
-    contentInfo.valid = false
-  }
-  return contentInfo
-}
-
-async function rebuildIndex (cache, opts) {
-  opts.log.silly('verify', 'rebuilding index')
-  const entries = await index.ls(cache)
-  const stats = {
-    missingContent: 0,
-    rejectedEntries: 0,
-    totalEntries: 0,
-  }
-  const buckets = {}
-  for (const k in entries) {
-    /* istanbul ignore else */
-    if (hasOwnProperty(entries, k)) {
-      const hashed = index.hashKey(k)
-      const entry = entries[k]
-      const excluded = opts.filter && !opts.filter(entry)
-      excluded && stats.rejectedEntries++
-      if (buckets[hashed] && !excluded) {
-        buckets[hashed].push(entry)
-      } else if (buckets[hashed] && excluded) {
-        // skip
-      } else if (excluded) {
-        buckets[hashed] = []
-        buckets[hashed]._path = index.bucketPath(cache, k)
-      } else {
-        buckets[hashed] = [entry]
-        buckets[hashed]._path = index.bucketPath(cache, k)
-      }
-    }
-  }
-  await pMap(
-    Object.keys(buckets),
-    (key) => {
-      return rebuildBucket(cache, buckets[key], stats, opts)
-    },
-    { concurrency: opts.concurrency }
-  )
-  return stats
-}
-
-async function rebuildBucket (cache, bucket, stats, opts) {
-  await truncate(bucket._path)
-  // This needs to be serialized because cacache explicitly
-  // lets very racy bucket conflicts clobber each other.
-  for (const entry of bucket) {
-    const content = contentPath(cache, entry.integrity)
-    try {
-      await stat(content)
-      await index.insert(cache, entry.key, entry.integrity, {
-        metadata: entry.metadata,
-        size: entry.size,
-        time: entry.time,
-      })
-      stats.totalEntries++
-    } catch (err) {
-      if (err.code === 'ENOENT') {
-        stats.rejectedEntries++
-        stats.missingContent++
-      } else {
-        throw err
-      }
-    }
-  }
-}
-
-function cleanTmp (cache, opts) {
-  opts.log.silly('verify', 'cleaning tmp directory')
-  return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
-}
-
-async function writeVerifile (cache, opts) {
-  const verifile = path.join(cache, '_lastverified')
-  opts.log.silly('verify', 'writing verifile to ' + verifile)
-  return writeFile(verifile, `${Date.now()}`)
-}
-
-module.exports.lastRun = lastRun
-
-async function lastRun (cache) {
-  const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
-  return new Date(+data)
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json
deleted file mode 100644
index ab58cb8b7c50f..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json
+++ /dev/null
@@ -1,82 +0,0 @@
-{
-  "name": "cacache",
-  "version": "17.1.4",
-  "cache-version": {
-    "content": "2",
-    "index": "5"
-  },
-  "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
-  "main": "lib/index.js",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "coverage": "tap",
-    "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
-    "lint": "eslint \"**/*.js\"",
-    "npmclilint": "npmcli-lint",
-    "lintfix": "npm run lint -- --fix",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "posttest": "npm run lint",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/cacache.git"
-  },
-  "keywords": [
-    "cache",
-    "caching",
-    "content-addressable",
-    "sri",
-    "sri hash",
-    "subresource integrity",
-    "cache",
-    "storage",
-    "store",
-    "file store",
-    "filesystem",
-    "disk cache",
-    "disk storage"
-  ],
-  "license": "ISC",
-  "dependencies": {
-    "@npmcli/fs": "^3.1.0",
-    "fs-minipass": "^3.0.0",
-    "glob": "^10.2.2",
-    "lru-cache": "^7.7.1",
-    "minipass": "^7.0.3",
-    "minipass-collect": "^1.0.2",
-    "minipass-flush": "^1.0.5",
-    "minipass-pipeline": "^1.2.4",
-    "p-map": "^4.0.0",
-    "ssri": "^10.0.0",
-    "tar": "^6.1.11",
-    "unique-filename": "^3.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.18.0",
-    "tap": "^16.0.0"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "windowsCI": false,
-    "version": "4.18.0",
-    "publish": "true"
-  },
-  "author": "GitHub Inc.",
-  "tap": {
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  }
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/LICENSE
deleted file mode 100644
index 45055763dc838..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright (c) 2015, Rebecca Turner
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/from-url.js b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/from-url.js
deleted file mode 100644
index efc1247d59d12..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/from-url.js
+++ /dev/null
@@ -1,122 +0,0 @@
-'use strict'
-
-const parseUrl = require('./parse-url')
-
-// look for github shorthand inputs, such as npm/cli
-const isGitHubShorthand = (arg) => {
-  // it cannot contain whitespace before the first #
-  // it cannot start with a / because that's probably an absolute file path
-  // but it must include a slash since repos are username/repository
-  // it cannot start with a . because that's probably a relative file path
-  // it cannot start with an @ because that's a scoped package if it passes the other tests
-  // it cannot contain a : before a # because that tells us that there's a protocol
-  // a second / may not exist before a #
-  const firstHash = arg.indexOf('#')
-  const firstSlash = arg.indexOf('/')
-  const secondSlash = arg.indexOf('/', firstSlash + 1)
-  const firstColon = arg.indexOf(':')
-  const firstSpace = /\s/.exec(arg)
-  const firstAt = arg.indexOf('@')
-
-  const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash)
-  const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash)
-  const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash)
-  const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash)
-  const hasSlash = firstSlash > 0
-  // if a # is found, what we really want to know is that the character
-  // immediately before # is not a /
-  const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/')
-  const doesNotStartWithDot = !arg.startsWith('.')
-
-  return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash &&
-    doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash &&
-    secondSlashOnlyAfterHash
-}
-
-module.exports = (giturl, opts, { gitHosts, protocols }) => {
-  if (!giturl) {
-    return
-  }
-
-  const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl
-  const parsed = parseUrl(correctedUrl, protocols)
-  if (!parsed) {
-    return
-  }
-
-  const gitHostShortcut = gitHosts.byShortcut[parsed.protocol]
-  const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.')
-    ? parsed.hostname.slice(4)
-    : parsed.hostname]
-  const gitHostName = gitHostShortcut || gitHostDomain
-  if (!gitHostName) {
-    return
-  }
-
-  const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain]
-  let auth = null
-  if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) {
-    auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}`
-  }
-
-  let committish = null
-  let user = null
-  let project = null
-  let defaultRepresentation = null
-
-  try {
-    if (gitHostShortcut) {
-      let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname
-      const firstAt = pathname.indexOf('@')
-      // we ignore auth for shortcuts, so just trim it out
-      if (firstAt > -1) {
-        pathname = pathname.slice(firstAt + 1)
-      }
-
-      const lastSlash = pathname.lastIndexOf('/')
-      if (lastSlash > -1) {
-        user = decodeURIComponent(pathname.slice(0, lastSlash))
-        // we want nulls only, never empty strings
-        if (!user) {
-          user = null
-        }
-        project = decodeURIComponent(pathname.slice(lastSlash + 1))
-      } else {
-        project = decodeURIComponent(pathname)
-      }
-
-      if (project.endsWith('.git')) {
-        project = project.slice(0, -4)
-      }
-
-      if (parsed.hash) {
-        committish = decodeURIComponent(parsed.hash.slice(1))
-      }
-
-      defaultRepresentation = 'shortcut'
-    } else {
-      if (!gitHostInfo.protocols.includes(parsed.protocol)) {
-        return
-      }
-
-      const segments = gitHostInfo.extract(parsed)
-      if (!segments) {
-        return
-      }
-
-      user = segments.user && decodeURIComponent(segments.user)
-      project = decodeURIComponent(segments.project)
-      committish = decodeURIComponent(segments.committish)
-      defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1)
-    }
-  } catch (err) {
-    /* istanbul ignore else */
-    if (err instanceof URIError) {
-      return
-    } else {
-      throw err
-    }
-  }
-
-  return [gitHostName, user, auth, project, committish, defaultRepresentation, opts]
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/hosts.js b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/hosts.js
deleted file mode 100644
index 013712b7842c8..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/hosts.js
+++ /dev/null
@@ -1,228 +0,0 @@
-/* eslint-disable max-len */
-
-'use strict'
-
-const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
-const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
-const formatHashFragment = (f) => f.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-')
-
-const defaults = {
-  sshtemplate: ({ domain, user, project, committish }) =>
-    `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, user, project, committish }) =>
-    `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath, path }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`,
-  browsetemplate: ({ domain, user, project, committish, treepath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  docstemplate: ({ domain, user, project, treepath, committish }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`,
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`,
-  shortcuttemplate: ({ type, user, project, committish }) =>
-    `${type}:${user}/${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ user, project, committish }) =>
-    `${user}/${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, user, project }) =>
-    `https://${domain}/${user}/${project}/issues`,
-  hashformat: formatHashFragment,
-}
-
-const hosts = {}
-hosts.github = {
-  // First two are insecure and generally shouldn't be used any more, but
-  // they are still supported.
-  protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'github.com',
-  treepath: 'tree',
-  blobpath: 'blob',
-  editpath: 'edit',
-  filetemplate: ({ auth, user, project, committish, path }) =>
-    `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`,
-  gittemplate: ({ auth, domain, user, project, committish }) =>
-    `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, type, committish] = url.pathname.split('/', 5)
-    if (type && type !== 'tree') {
-      return
-    }
-
-    if (!type) {
-      committish = url.hash.slice(1)
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish }
-  },
-}
-
-hosts.bitbucket = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'bitbucket.org',
-  treepath: 'src',
-  blobpath: 'src',
-  editpath: '?mode=edit',
-  edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (['get'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gitlab = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gitlab.com',
-  treepath: 'tree',
-  blobpath: 'tree',
-  editpath: '-/edit',
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    const path = url.pathname.slice(1)
-    if (path.includes('/-/') || path.includes('/archive.tar.gz')) {
-      return
-    }
-
-    const segments = path.split('/')
-    let project = segments.pop()
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    const user = segments.join('/')
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gist = {
-  protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gist.github.com',
-  editpath: 'edit',
-  sshtemplate: ({ domain, project, committish }) =>
-    `git@${domain}:${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, project, committish }) =>
-    `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`,
-  browsetemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  browseblobtemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  docstemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  httpstemplate: ({ domain, project, committish }) =>
-    `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ user, project, committish, path }) =>
-    `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`,
-  shortcuttemplate: ({ type, project, committish }) =>
-    `${type}:${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ project, committish }) =>
-    `${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, project }) =>
-    `https://${domain}/${project}`,
-  gittemplate: ({ domain, project, committish }) =>
-    `git://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ project, committish }) =>
-    `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (aux === 'raw') {
-      return
-    }
-
-    if (!project) {
-      if (!user) {
-        return
-      }
-
-      project = user
-      user = null
-    }
-
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-  hashformat: function (fragment) {
-    return fragment && 'file-' + formatHashFragment(fragment)
-  },
-}
-
-hosts.sourcehut = {
-  protocols: ['git+ssh:', 'https:'],
-  domain: 'git.sr.ht',
-  treepath: 'tree',
-  blobpath: 'tree',
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`,
-  httpstemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`,
-  bugstemplate: ({ user, project }) =>
-    `https://todo.sr.ht/${user}/${project}`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-
-    // tarball url
-    if (['archive'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  hosts[name] = Object.assign({}, defaults, host)
-}
-
-module.exports = hosts
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/index.js
deleted file mode 100644
index a7339c217e9a3..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/index.js
+++ /dev/null
@@ -1,179 +0,0 @@
-'use strict'
-
-const LRU = require('lru-cache')
-const hosts = require('./hosts.js')
-const fromUrl = require('./from-url.js')
-const parseUrl = require('./parse-url.js')
-
-const cache = new LRU({ max: 1000 })
-
-class GitHost {
-  constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
-    Object.assign(this, GitHost.#gitHosts[type], {
-      type,
-      user,
-      auth,
-      project,
-      committish,
-      default: defaultRepresentation,
-      opts,
-    })
-  }
-
-  static #gitHosts = { byShortcut: {}, byDomain: {} }
-  static #protocols = {
-    'git+ssh:': { name: 'sshurl' },
-    'ssh:': { name: 'sshurl' },
-    'git+https:': { name: 'https', auth: true },
-    'git:': { auth: true },
-    'http:': { auth: true },
-    'https:': { auth: true },
-    'git+http:': { auth: true },
-  }
-
-  static addHost (name, host) {
-    GitHost.#gitHosts[name] = host
-    GitHost.#gitHosts.byDomain[host.domain] = name
-    GitHost.#gitHosts.byShortcut[`${name}:`] = name
-    GitHost.#protocols[`${name}:`] = { name }
-  }
-
-  static fromUrl (giturl, opts) {
-    if (typeof giturl !== 'string') {
-      return
-    }
-
-    const key = giturl + JSON.stringify(opts || {})
-
-    if (!cache.has(key)) {
-      const hostArgs = fromUrl(giturl, opts, {
-        gitHosts: GitHost.#gitHosts,
-        protocols: GitHost.#protocols,
-      })
-      cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined)
-    }
-
-    return cache.get(key)
-  }
-
-  static parseUrl (url) {
-    return parseUrl(url)
-  }
-
-  #fill (template, opts) {
-    if (typeof template !== 'function') {
-      return null
-    }
-
-    const options = { ...this, ...this.opts, ...opts }
-
-    // the path should always be set so we don't end up with 'undefined' in urls
-    if (!options.path) {
-      options.path = ''
-    }
-
-    // template functions will insert the leading slash themselves
-    if (options.path.startsWith('/')) {
-      options.path = options.path.slice(1)
-    }
-
-    if (options.noCommittish) {
-      options.committish = null
-    }
-
-    const result = template(options)
-    return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result
-  }
-
-  hash () {
-    return this.committish ? `#${this.committish}` : ''
-  }
-
-  ssh (opts) {
-    return this.#fill(this.sshtemplate, opts)
-  }
-
-  sshurl (opts) {
-    return this.#fill(this.sshurltemplate, opts)
-  }
-
-  browse (path, ...args) {
-    // not a string, treat path as opts
-    if (typeof path !== 'string') {
-      return this.#fill(this.browsetemplate, path)
-    }
-
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browsetreetemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  // If the path is known to be a file, then browseFile should be used. For some hosts
-  // the url is the same as browse, but for others like GitHub a file can use both `/tree/`
-  // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/`
-  // path will redirect to a specific commit. Using the `/blob/` path avoids this and
-  // does not redirect to a different commit.
-  browseFile (path, ...args) {
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browseblobtemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  docs (opts) {
-    return this.#fill(this.docstemplate, opts)
-  }
-
-  bugs (opts) {
-    return this.#fill(this.bugstemplate, opts)
-  }
-
-  https (opts) {
-    return this.#fill(this.httpstemplate, opts)
-  }
-
-  git (opts) {
-    return this.#fill(this.gittemplate, opts)
-  }
-
-  shortcut (opts) {
-    return this.#fill(this.shortcuttemplate, opts)
-  }
-
-  path (opts) {
-    return this.#fill(this.pathtemplate, opts)
-  }
-
-  tarball (opts) {
-    return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false })
-  }
-
-  file (path, opts) {
-    return this.#fill(this.filetemplate, { ...opts, path })
-  }
-
-  edit (path, opts) {
-    return this.#fill(this.edittemplate, { ...opts, path })
-  }
-
-  getDefaultRepresentation () {
-    return this.default
-  }
-
-  toString (opts) {
-    if (this.default && typeof this[this.default] === 'function') {
-      return this[this.default](opts)
-    }
-
-    return this.sshurl(opts)
-  }
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  GitHost.addHost(name, host)
-}
-
-module.exports = GitHost
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/parse-url.js
deleted file mode 100644
index 7d5489c008ab4..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/parse-url.js
+++ /dev/null
@@ -1,78 +0,0 @@
-const url = require('url')
-
-const lastIndexOfBefore = (str, char, beforeChar) => {
-  const startPosition = str.indexOf(beforeChar)
-  return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity)
-}
-
-const safeUrl = (u) => {
-  try {
-    return new url.URL(u)
-  } catch {
-    // this fn should never throw
-  }
-}
-
-// accepts input like git:github.com:user/repo and inserts the // after the first :
-const correctProtocol = (arg, protocols) => {
-  const firstColon = arg.indexOf(':')
-  const proto = arg.slice(0, firstColon + 1)
-  if (Object.prototype.hasOwnProperty.call(protocols, proto)) {
-    return arg
-  }
-
-  const firstAt = arg.indexOf('@')
-  if (firstAt > -1) {
-    if (firstAt > firstColon) {
-      return `git+ssh://${arg}`
-    } else {
-      return arg
-    }
-  }
-
-  const doubleSlash = arg.indexOf('//')
-  if (doubleSlash === firstColon + 1) {
-    return arg
-  }
-
-  return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}`
-}
-
-// attempt to correct an scp style url so that it will parse with `new URL()`
-const correctUrl = (giturl) => {
-  // ignore @ that come after the first hash since the denotes the start
-  // of a committish which can contain @ characters
-  const firstAt = lastIndexOfBefore(giturl, '@', '#')
-  // ignore colons that come after the hash since that could include colons such as:
-  // git@github.com:user/package-2#semver:^1.0.0
-  const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#')
-
-  if (lastColonBeforeHash > firstAt) {
-    // the last : comes after the first @ (or there is no @)
-    // like it would in:
-    // proto://hostname.com:user/repo
-    // username@hostname.com:user/repo
-    // :password@hostname.com:user/repo
-    // username:password@hostname.com:user/repo
-    // proto://username@hostname.com:user/repo
-    // proto://:password@hostname.com:user/repo
-    // proto://username:password@hostname.com:user/repo
-    // then we replace the last : with a / to create a valid path
-    giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1)
-  }
-
-  if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) {
-    // we have no : at all
-    // as it would be in:
-    // username@hostname.com/user/repo
-    // then we prepend a protocol
-    giturl = `git+ssh://${giturl}`
-  }
-
-  return giturl
-}
-
-module.exports = (giturl, protocols) => {
-  const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl
-  return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol))
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/package.json
deleted file mode 100644
index 612259948afe7..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/package.json
+++ /dev/null
@@ -1,59 +0,0 @@
-{
-  "name": "hosted-git-info",
-  "version": "6.1.1",
-  "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
-  "main": "./lib/index.js",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/hosted-git-info.git"
-  },
-  "keywords": [
-    "git",
-    "github",
-    "bitbucket",
-    "gitlab"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/hosted-git-info/issues"
-  },
-  "homepage": "https://github.com/npm/hosted-git-info",
-  "scripts": {
-    "posttest": "npm run lint",
-    "snap": "tap",
-    "test": "tap",
-    "test:coverage": "tap --coverage-report=html",
-    "lint": "eslint \"**/*.js\"",
-    "postlint": "template-oss-check",
-    "lintfix": "npm run lint -- --fix",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "dependencies": {
-    "lru-cache": "^7.5.1"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.7.1",
-    "tap": "^16.0.1"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "tap": {
-    "color": 1,
-    "coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.7.1"
-  }
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/LICENSE
deleted file mode 100644
index f785757cd63f8..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/index.js
deleted file mode 100644
index 48e99fe5e5a70..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/index.js
+++ /dev/null
@@ -1,1227 +0,0 @@
-const perf =
-  typeof performance === 'object' &&
-  performance &&
-  typeof performance.now === 'function'
-    ? performance
-    : Date
-
-const hasAbortController = typeof AbortController === 'function'
-
-// minimal backwards-compatibility polyfill
-// this doesn't have nearly all the checks and whatnot that
-// actual AbortController/Signal has, but it's enough for
-// our purposes, and if used properly, behaves the same.
-const AC = hasAbortController
-  ? AbortController
-  : class AbortController {
-      constructor() {
-        this.signal = new AS()
-      }
-      abort(reason = new Error('This operation was aborted')) {
-        this.signal.reason = this.signal.reason || reason
-        this.signal.aborted = true
-        this.signal.dispatchEvent({
-          type: 'abort',
-          target: this.signal,
-        })
-      }
-    }
-
-const hasAbortSignal = typeof AbortSignal === 'function'
-// Some polyfills put this on the AC class, not global
-const hasACAbortSignal = typeof AC.AbortSignal === 'function'
-const AS = hasAbortSignal
-  ? AbortSignal
-  : hasACAbortSignal
-  ? AC.AbortController
-  : class AbortSignal {
-      constructor() {
-        this.reason = undefined
-        this.aborted = false
-        this._listeners = []
-      }
-      dispatchEvent(e) {
-        if (e.type === 'abort') {
-          this.aborted = true
-          this.onabort(e)
-          this._listeners.forEach(f => f(e), this)
-        }
-      }
-      onabort() {}
-      addEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners.push(fn)
-        }
-      }
-      removeEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners = this._listeners.filter(f => f !== fn)
-        }
-      }
-    }
-
-const warned = new Set()
-const deprecatedOption = (opt, instead) => {
-  const code = `LRU_CACHE_OPTION_${opt}`
-  if (shouldWarn(code)) {
-    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
-  }
-}
-const deprecatedMethod = (method, instead) => {
-  const code = `LRU_CACHE_METHOD_${method}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
-    warn(code, `${method} method`, `cache.${instead}()`, get)
-  }
-}
-const deprecatedProperty = (field, instead) => {
-  const code = `LRU_CACHE_PROPERTY_${field}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
-    warn(code, `${field} property`, `cache.${instead}`, get)
-  }
-}
-
-const emitWarning = (...a) => {
-  typeof process === 'object' &&
-  process &&
-  typeof process.emitWarning === 'function'
-    ? process.emitWarning(...a)
-    : console.error(...a)
-}
-
-const shouldWarn = code => !warned.has(code)
-
-const warn = (code, what, instead, fn) => {
-  warned.add(code)
-  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
-  emitWarning(msg, 'DeprecationWarning', code, fn)
-}
-
-const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
-
-/* istanbul ignore next - This is a little bit ridiculous, tbh.
- * The maximum array length is 2^32-1 or thereabouts on most JS impls.
- * And well before that point, you're caching the entire world, I mean,
- * that's ~32GB of just integers for the next/prev links, plus whatever
- * else to hold that many keys and values.  Just filling the memory with
- * zeroes at init time is brutal when you get that big.
- * But why not be complete?
- * Maybe in the future, these limits will have expanded. */
-const getUintArray = max =>
-  !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-    ? Uint8Array
-    : max <= Math.pow(2, 16)
-    ? Uint16Array
-    : max <= Math.pow(2, 32)
-    ? Uint32Array
-    : max <= Number.MAX_SAFE_INTEGER
-    ? ZeroArray
-    : null
-
-class ZeroArray extends Array {
-  constructor(size) {
-    super(size)
-    this.fill(0)
-  }
-}
-
-class Stack {
-  constructor(max) {
-    if (max === 0) {
-      return []
-    }
-    const UintArray = getUintArray(max)
-    this.heap = new UintArray(max)
-    this.length = 0
-  }
-  push(n) {
-    this.heap[this.length++] = n
-  }
-  pop() {
-    return this.heap[--this.length]
-  }
-}
-
-class LRUCache {
-  constructor(options = {}) {
-    const {
-      max = 0,
-      ttl,
-      ttlResolution = 1,
-      ttlAutopurge,
-      updateAgeOnGet,
-      updateAgeOnHas,
-      allowStale,
-      dispose,
-      disposeAfter,
-      noDisposeOnSet,
-      noUpdateTTL,
-      maxSize = 0,
-      maxEntrySize = 0,
-      sizeCalculation,
-      fetchMethod,
-      fetchContext,
-      noDeleteOnFetchRejection,
-      noDeleteOnStaleGet,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-    } = options
-
-    // deprecated options, don't trigger a warning for getting them if
-    // the thing being passed in is another LRUCache we're copying.
-    const { length, maxAge, stale } =
-      options instanceof LRUCache ? {} : options
-
-    if (max !== 0 && !isPosInt(max)) {
-      throw new TypeError('max option must be a nonnegative integer')
-    }
-
-    const UintArray = max ? getUintArray(max) : Array
-    if (!UintArray) {
-      throw new Error('invalid max value: ' + max)
-    }
-
-    this.max = max
-    this.maxSize = maxSize
-    this.maxEntrySize = maxEntrySize || this.maxSize
-    this.sizeCalculation = sizeCalculation || length
-    if (this.sizeCalculation) {
-      if (!this.maxSize && !this.maxEntrySize) {
-        throw new TypeError(
-          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
-        )
-      }
-      if (typeof this.sizeCalculation !== 'function') {
-        throw new TypeError('sizeCalculation set to non-function')
-      }
-    }
-
-    this.fetchMethod = fetchMethod || null
-    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
-      throw new TypeError(
-        'fetchMethod must be a function if specified'
-      )
-    }
-
-    this.fetchContext = fetchContext
-    if (!this.fetchMethod && fetchContext !== undefined) {
-      throw new TypeError(
-        'cannot set fetchContext without fetchMethod'
-      )
-    }
-
-    this.keyMap = new Map()
-    this.keyList = new Array(max).fill(null)
-    this.valList = new Array(max).fill(null)
-    this.next = new UintArray(max)
-    this.prev = new UintArray(max)
-    this.head = 0
-    this.tail = 0
-    this.free = new Stack(max)
-    this.initialFill = 1
-    this.size = 0
-
-    if (typeof dispose === 'function') {
-      this.dispose = dispose
-    }
-    if (typeof disposeAfter === 'function') {
-      this.disposeAfter = disposeAfter
-      this.disposed = []
-    } else {
-      this.disposeAfter = null
-      this.disposed = null
-    }
-    this.noDisposeOnSet = !!noDisposeOnSet
-    this.noUpdateTTL = !!noUpdateTTL
-    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
-    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
-    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
-    this.ignoreFetchAbort = !!ignoreFetchAbort
-
-    // NB: maxEntrySize is set to maxSize if it's set
-    if (this.maxEntrySize !== 0) {
-      if (this.maxSize !== 0) {
-        if (!isPosInt(this.maxSize)) {
-          throw new TypeError(
-            'maxSize must be a positive integer if specified'
-          )
-        }
-      }
-      if (!isPosInt(this.maxEntrySize)) {
-        throw new TypeError(
-          'maxEntrySize must be a positive integer if specified'
-        )
-      }
-      this.initializeSizeTracking()
-    }
-
-    this.allowStale = !!allowStale || !!stale
-    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
-    this.updateAgeOnGet = !!updateAgeOnGet
-    this.updateAgeOnHas = !!updateAgeOnHas
-    this.ttlResolution =
-      isPosInt(ttlResolution) || ttlResolution === 0
-        ? ttlResolution
-        : 1
-    this.ttlAutopurge = !!ttlAutopurge
-    this.ttl = ttl || maxAge || 0
-    if (this.ttl) {
-      if (!isPosInt(this.ttl)) {
-        throw new TypeError(
-          'ttl must be a positive integer if specified'
-        )
-      }
-      this.initializeTTLTracking()
-    }
-
-    // do not allow completely unbounded caches
-    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
-      throw new TypeError(
-        'At least one of max, maxSize, or ttl is required'
-      )
-    }
-    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
-      const code = 'LRU_CACHE_UNBOUNDED'
-      if (shouldWarn(code)) {
-        warned.add(code)
-        const msg =
-          'TTL caching without ttlAutopurge, max, or maxSize can ' +
-          'result in unbounded memory consumption.'
-        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
-      }
-    }
-
-    if (stale) {
-      deprecatedOption('stale', 'allowStale')
-    }
-    if (maxAge) {
-      deprecatedOption('maxAge', 'ttl')
-    }
-    if (length) {
-      deprecatedOption('length', 'sizeCalculation')
-    }
-  }
-
-  getRemainingTTL(key) {
-    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
-  }
-
-  initializeTTLTracking() {
-    this.ttls = new ZeroArray(this.max)
-    this.starts = new ZeroArray(this.max)
-
-    this.setItemTTL = (index, ttl, start = perf.now()) => {
-      this.starts[index] = ttl !== 0 ? start : 0
-      this.ttls[index] = ttl
-      if (ttl !== 0 && this.ttlAutopurge) {
-        const t = setTimeout(() => {
-          if (this.isStale(index)) {
-            this.delete(this.keyList[index])
-          }
-        }, ttl + 1)
-        /* istanbul ignore else - unref() not supported on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-    }
-
-    this.updateItemAge = index => {
-      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
-    }
-
-    this.statusTTL = (status, index) => {
-      if (status) {
-        status.ttl = this.ttls[index]
-        status.start = this.starts[index]
-        status.now = cachedNow || getNow()
-        status.remainingTTL = status.now + status.ttl - status.start
-      }
-    }
-
-    // debounce calls to perf.now() to 1s so we're not hitting
-    // that costly call repeatedly.
-    let cachedNow = 0
-    const getNow = () => {
-      const n = perf.now()
-      if (this.ttlResolution > 0) {
-        cachedNow = n
-        const t = setTimeout(
-          () => (cachedNow = 0),
-          this.ttlResolution
-        )
-        /* istanbul ignore else - not available on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-      return n
-    }
-
-    this.getRemainingTTL = key => {
-      const index = this.keyMap.get(key)
-      if (index === undefined) {
-        return 0
-      }
-      return this.ttls[index] === 0 || this.starts[index] === 0
-        ? Infinity
-        : this.starts[index] +
-            this.ttls[index] -
-            (cachedNow || getNow())
-    }
-
-    this.isStale = index => {
-      return (
-        this.ttls[index] !== 0 &&
-        this.starts[index] !== 0 &&
-        (cachedNow || getNow()) - this.starts[index] >
-          this.ttls[index]
-      )
-    }
-  }
-  updateItemAge(_index) {}
-  statusTTL(_status, _index) {}
-  setItemTTL(_index, _ttl, _start) {}
-  isStale(_index) {
-    return false
-  }
-
-  initializeSizeTracking() {
-    this.calculatedSize = 0
-    this.sizes = new ZeroArray(this.max)
-    this.removeItemSize = index => {
-      this.calculatedSize -= this.sizes[index]
-      this.sizes[index] = 0
-    }
-    this.requireSize = (k, v, size, sizeCalculation) => {
-      // provisionally accept background fetches.
-      // actual value size will be checked when they return.
-      if (this.isBackgroundFetch(v)) {
-        return 0
-      }
-      if (!isPosInt(size)) {
-        if (sizeCalculation) {
-          if (typeof sizeCalculation !== 'function') {
-            throw new TypeError('sizeCalculation must be a function')
-          }
-          size = sizeCalculation(v, k)
-          if (!isPosInt(size)) {
-            throw new TypeError(
-              'sizeCalculation return invalid (expect positive integer)'
-            )
-          }
-        } else {
-          throw new TypeError(
-            'invalid size value (must be positive integer). ' +
-              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
-              'must be set.'
-          )
-        }
-      }
-      return size
-    }
-    this.addItemSize = (index, size, status) => {
-      this.sizes[index] = size
-      if (this.maxSize) {
-        const maxSize = this.maxSize - this.sizes[index]
-        while (this.calculatedSize > maxSize) {
-          this.evict(true)
-        }
-      }
-      this.calculatedSize += this.sizes[index]
-      if (status) {
-        status.entrySize = size
-        status.totalCalculatedSize = this.calculatedSize
-      }
-    }
-  }
-  removeItemSize(_index) {}
-  addItemSize(_index, _size) {}
-  requireSize(_k, _v, size, sizeCalculation) {
-    if (size || sizeCalculation) {
-      throw new TypeError(
-        'cannot set size without setting maxSize or maxEntrySize on cache'
-      )
-    }
-  }
-
-  *indexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.tail; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.head) {
-          break
-        } else {
-          i = this.prev[i]
-        }
-      }
-    }
-  }
-
-  *rindexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.head; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.tail) {
-          break
-        } else {
-          i = this.next[i]
-        }
-      }
-    }
-  }
-
-  isValidIndex(index) {
-    return (
-      index !== undefined &&
-      this.keyMap.get(this.keyList[index]) === index
-    )
-  }
-
-  *entries() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-  *rentries() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-
-  *keys() {
-    for (const i of this.indexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-  *rkeys() {
-    for (const i of this.rindexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-
-  *values() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-  *rvalues() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-
-  [Symbol.iterator]() {
-    return this.entries()
-  }
-
-  find(fn, getOptions) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      if (fn(value, this.keyList[i], this)) {
-        return this.get(this.keyList[i], getOptions)
-      }
-    }
-  }
-
-  forEach(fn, thisp = this) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  rforEach(fn, thisp = this) {
-    for (const i of this.rindexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  get prune() {
-    deprecatedMethod('prune', 'purgeStale')
-    return this.purgeStale
-  }
-
-  purgeStale() {
-    let deleted = false
-    for (const i of this.rindexes({ allowStale: true })) {
-      if (this.isStale(i)) {
-        this.delete(this.keyList[i])
-        deleted = true
-      }
-    }
-    return deleted
-  }
-
-  dump() {
-    const arr = []
-    for (const i of this.indexes({ allowStale: true })) {
-      const key = this.keyList[i]
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      const entry = { value }
-      if (this.ttls) {
-        entry.ttl = this.ttls[i]
-        // always dump the start relative to a portable timestamp
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = perf.now() - this.starts[i]
-        entry.start = Math.floor(Date.now() - age)
-      }
-      if (this.sizes) {
-        entry.size = this.sizes[i]
-      }
-      arr.unshift([key, entry])
-    }
-    return arr
-  }
-
-  load(arr) {
-    this.clear()
-    for (const [key, entry] of arr) {
-      if (entry.start) {
-        // entry.start is a portable timestamp, but we may be using
-        // node's performance.now(), so calculate the offset.
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = Date.now() - entry.start
-        entry.start = perf.now() - age
-      }
-      this.set(key, entry.value, entry)
-    }
-  }
-
-  dispose(_v, _k, _reason) {}
-
-  set(
-    k,
-    v,
-    {
-      ttl = this.ttl,
-      start,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      status,
-    } = {}
-  ) {
-    size = this.requireSize(k, v, size, sizeCalculation)
-    // if the item doesn't fit, don't do anything
-    // NB: maxEntrySize set to maxSize by default
-    if (this.maxEntrySize && size > this.maxEntrySize) {
-      if (status) {
-        status.set = 'miss'
-        status.maxEntrySizeExceeded = true
-      }
-      // have to delete, in case a background fetch is there already.
-      // in non-async cases, this is a no-op
-      this.delete(k)
-      return this
-    }
-    let index = this.size === 0 ? undefined : this.keyMap.get(k)
-    if (index === undefined) {
-      // addition
-      index = this.newIndex()
-      this.keyList[index] = k
-      this.valList[index] = v
-      this.keyMap.set(k, index)
-      this.next[this.tail] = index
-      this.prev[index] = this.tail
-      this.tail = index
-      this.size++
-      this.addItemSize(index, size, status)
-      if (status) {
-        status.set = 'add'
-      }
-      noUpdateTTL = false
-    } else {
-      // update
-      this.moveToTail(index)
-      const oldVal = this.valList[index]
-      if (v !== oldVal) {
-        if (this.isBackgroundFetch(oldVal)) {
-          oldVal.__abortController.abort(new Error('replaced'))
-        } else {
-          if (!noDisposeOnSet) {
-            this.dispose(oldVal, k, 'set')
-            if (this.disposeAfter) {
-              this.disposed.push([oldVal, k, 'set'])
-            }
-          }
-        }
-        this.removeItemSize(index)
-        this.valList[index] = v
-        this.addItemSize(index, size, status)
-        if (status) {
-          status.set = 'replace'
-          const oldValue =
-            oldVal && this.isBackgroundFetch(oldVal)
-              ? oldVal.__staleWhileFetching
-              : oldVal
-          if (oldValue !== undefined) status.oldValue = oldValue
-        }
-      } else if (status) {
-        status.set = 'update'
-      }
-    }
-    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
-      this.initializeTTLTracking()
-    }
-    if (!noUpdateTTL) {
-      this.setItemTTL(index, ttl, start)
-    }
-    this.statusTTL(status, index)
-    if (this.disposeAfter) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return this
-  }
-
-  newIndex() {
-    if (this.size === 0) {
-      return this.tail
-    }
-    if (this.size === this.max && this.max !== 0) {
-      return this.evict(false)
-    }
-    if (this.free.length !== 0) {
-      return this.free.pop()
-    }
-    // initial fill, just keep writing down the list
-    return this.initialFill++
-  }
-
-  pop() {
-    if (this.size) {
-      const val = this.valList[this.head]
-      this.evict(true)
-      return val
-    }
-  }
-
-  evict(free) {
-    const head = this.head
-    const k = this.keyList[head]
-    const v = this.valList[head]
-    if (this.isBackgroundFetch(v)) {
-      v.__abortController.abort(new Error('evicted'))
-    } else {
-      this.dispose(v, k, 'evict')
-      if (this.disposeAfter) {
-        this.disposed.push([v, k, 'evict'])
-      }
-    }
-    this.removeItemSize(head)
-    // if we aren't about to use the index, then null these out
-    if (free) {
-      this.keyList[head] = null
-      this.valList[head] = null
-      this.free.push(head)
-    }
-    this.head = this.next[head]
-    this.keyMap.delete(k)
-    this.size--
-    return head
-  }
-
-  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      if (!this.isStale(index)) {
-        if (updateAgeOnHas) {
-          this.updateItemAge(index)
-        }
-        if (status) status.has = 'hit'
-        this.statusTTL(status, index)
-        return true
-      } else if (status) {
-        status.has = 'stale'
-        this.statusTTL(status, index)
-      }
-    } else if (status) {
-      status.has = 'miss'
-    }
-    return false
-  }
-
-  // like get(), but without any LRU updating or TTL expiration
-  peek(k, { allowStale = this.allowStale } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined && (allowStale || !this.isStale(index))) {
-      const v = this.valList[index]
-      // either stale and allowed, or forcing a refresh of non-stale value
-      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
-    }
-  }
-
-  backgroundFetch(k, index, options, context) {
-    const v = index === undefined ? undefined : this.valList[index]
-    if (this.isBackgroundFetch(v)) {
-      return v
-    }
-    const ac = new AC()
-    if (options.signal) {
-      options.signal.addEventListener('abort', () =>
-        ac.abort(options.signal.reason)
-      )
-    }
-    const fetchOpts = {
-      signal: ac.signal,
-      options,
-      context,
-    }
-    const cb = (v, updateCache = false) => {
-      const { aborted } = ac.signal
-      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
-      if (options.status) {
-        if (aborted && !updateCache) {
-          options.status.fetchAborted = true
-          options.status.fetchError = ac.signal.reason
-          if (ignoreAbort) options.status.fetchAbortIgnored = true
-        } else {
-          options.status.fetchResolved = true
-        }
-      }
-      if (aborted && !ignoreAbort && !updateCache) {
-        return fetchFail(ac.signal.reason)
-      }
-      // either we didn't abort, and are still here, or we did, and ignored
-      if (this.valList[index] === p) {
-        if (v === undefined) {
-          if (p.__staleWhileFetching) {
-            this.valList[index] = p.__staleWhileFetching
-          } else {
-            this.delete(k)
-          }
-        } else {
-          if (options.status) options.status.fetchUpdated = true
-          this.set(k, v, fetchOpts.options)
-        }
-      }
-      return v
-    }
-    const eb = er => {
-      if (options.status) {
-        options.status.fetchRejected = true
-        options.status.fetchError = er
-      }
-      return fetchFail(er)
-    }
-    const fetchFail = er => {
-      const { aborted } = ac.signal
-      const allowStaleAborted =
-        aborted && options.allowStaleOnFetchAbort
-      const allowStale =
-        allowStaleAborted || options.allowStaleOnFetchRejection
-      const noDelete = allowStale || options.noDeleteOnFetchRejection
-      if (this.valList[index] === p) {
-        // if we allow stale on fetch rejections, then we need to ensure that
-        // the stale value is not removed from the cache when the fetch fails.
-        const del = !noDelete || p.__staleWhileFetching === undefined
-        if (del) {
-          this.delete(k)
-        } else if (!allowStaleAborted) {
-          // still replace the *promise* with the stale value,
-          // since we are done with the promise at this point.
-          // leave it untouched if we're still waiting for an
-          // aborted background fetch that hasn't yet returned.
-          this.valList[index] = p.__staleWhileFetching
-        }
-      }
-      if (allowStale) {
-        if (options.status && p.__staleWhileFetching !== undefined) {
-          options.status.returnedStale = true
-        }
-        return p.__staleWhileFetching
-      } else if (p.__returned === p) {
-        throw er
-      }
-    }
-    const pcall = (res, rej) => {
-      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
-      // ignored, we go until we finish, regardless.
-      // defer check until we are actually aborting,
-      // so fetchMethod can override.
-      ac.signal.addEventListener('abort', () => {
-        if (
-          !options.ignoreFetchAbort ||
-          options.allowStaleOnFetchAbort
-        ) {
-          res()
-          // when it eventually resolves, update the cache.
-          if (options.allowStaleOnFetchAbort) {
-            res = v => cb(v, true)
-          }
-        }
-      })
-    }
-    if (options.status) options.status.fetchDispatched = true
-    const p = new Promise(pcall).then(cb, eb)
-    p.__abortController = ac
-    p.__staleWhileFetching = v
-    p.__returned = null
-    if (index === undefined) {
-      // internal, don't expose status.
-      this.set(k, p, { ...fetchOpts.options, status: undefined })
-      index = this.keyMap.get(k)
-    } else {
-      this.valList[index] = p
-    }
-    return p
-  }
-
-  isBackgroundFetch(p) {
-    return (
-      p &&
-      typeof p === 'object' &&
-      typeof p.then === 'function' &&
-      Object.prototype.hasOwnProperty.call(
-        p,
-        '__staleWhileFetching'
-      ) &&
-      Object.prototype.hasOwnProperty.call(p, '__returned') &&
-      (p.__returned === p || p.__returned === null)
-    )
-  }
-
-  // this takes the union of get() and set() opts, because it does both
-  async fetch(
-    k,
-    {
-      // get options
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      // set options
-      ttl = this.ttl,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      // fetch exclusive options
-      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
-      ignoreFetchAbort = this.ignoreFetchAbort,
-      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
-      fetchContext = this.fetchContext,
-      forceRefresh = false,
-      status,
-      signal,
-    } = {}
-  ) {
-    if (!this.fetchMethod) {
-      if (status) status.fetch = 'get'
-      return this.get(k, {
-        allowStale,
-        updateAgeOnGet,
-        noDeleteOnStaleGet,
-        status,
-      })
-    }
-
-    const options = {
-      allowStale,
-      updateAgeOnGet,
-      noDeleteOnStaleGet,
-      ttl,
-      noDisposeOnSet,
-      size,
-      sizeCalculation,
-      noUpdateTTL,
-      noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-      status,
-      signal,
-    }
-
-    let index = this.keyMap.get(k)
-    if (index === undefined) {
-      if (status) status.fetch = 'miss'
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      return (p.__returned = p)
-    } else {
-      // in cache, maybe already fetching
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        const stale =
-          allowStale && v.__staleWhileFetching !== undefined
-        if (status) {
-          status.fetch = 'inflight'
-          if (stale) status.returnedStale = true
-        }
-        return stale ? v.__staleWhileFetching : (v.__returned = v)
-      }
-
-      // if we force a refresh, that means do NOT serve the cached value,
-      // unless we are already in the process of refreshing the cache.
-      const isStale = this.isStale(index)
-      if (!forceRefresh && !isStale) {
-        if (status) status.fetch = 'hit'
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        this.statusTTL(status, index)
-        return v
-      }
-
-      // ok, it is stale or a forced refresh, and not already fetching.
-      // refresh the cache.
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      const hasStale = p.__staleWhileFetching !== undefined
-      const staleVal = hasStale && allowStale
-      if (status) {
-        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
-        if (staleVal && isStale) status.returnedStale = true
-      }
-      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
-    }
-  }
-
-  get(
-    k,
-    {
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      status,
-    } = {}
-  ) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      const value = this.valList[index]
-      const fetching = this.isBackgroundFetch(value)
-      this.statusTTL(status, index)
-      if (this.isStale(index)) {
-        if (status) status.get = 'stale'
-        // delete only if not an in-flight background fetch
-        if (!fetching) {
-          if (!noDeleteOnStaleGet) {
-            this.delete(k)
-          }
-          if (status) status.returnedStale = allowStale
-          return allowStale ? value : undefined
-        } else {
-          if (status) {
-            status.returnedStale =
-              allowStale && value.__staleWhileFetching !== undefined
-          }
-          return allowStale ? value.__staleWhileFetching : undefined
-        }
-      } else {
-        if (status) status.get = 'hit'
-        // if we're currently fetching it, we don't actually have it yet
-        // it's not stale, which means this isn't a staleWhileRefetching.
-        // If it's not stale, and fetching, AND has a __staleWhileFetching
-        // value, then that means the user fetched with {forceRefresh:true},
-        // so it's safe to return that value.
-        if (fetching) {
-          return value.__staleWhileFetching
-        }
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        return value
-      }
-    } else if (status) {
-      status.get = 'miss'
-    }
-  }
-
-  connect(p, n) {
-    this.prev[n] = p
-    this.next[p] = n
-  }
-
-  moveToTail(index) {
-    // if tail already, nothing to do
-    // if head, move head to next[index]
-    // else
-    //   move next[prev[index]] to next[index] (head has no prev)
-    //   move prev[next[index]] to prev[index]
-    // prev[index] = tail
-    // next[tail] = index
-    // tail = index
-    if (index !== this.tail) {
-      if (index === this.head) {
-        this.head = this.next[index]
-      } else {
-        this.connect(this.prev[index], this.next[index])
-      }
-      this.connect(this.tail, index)
-      this.tail = index
-    }
-  }
-
-  get del() {
-    deprecatedMethod('del', 'delete')
-    return this.delete
-  }
-
-  delete(k) {
-    let deleted = false
-    if (this.size !== 0) {
-      const index = this.keyMap.get(k)
-      if (index !== undefined) {
-        deleted = true
-        if (this.size === 1) {
-          this.clear()
-        } else {
-          this.removeItemSize(index)
-          const v = this.valList[index]
-          if (this.isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('deleted'))
-          } else {
-            this.dispose(v, k, 'delete')
-            if (this.disposeAfter) {
-              this.disposed.push([v, k, 'delete'])
-            }
-          }
-          this.keyMap.delete(k)
-          this.keyList[index] = null
-          this.valList[index] = null
-          if (index === this.tail) {
-            this.tail = this.prev[index]
-          } else if (index === this.head) {
-            this.head = this.next[index]
-          } else {
-            this.next[this.prev[index]] = this.next[index]
-            this.prev[this.next[index]] = this.prev[index]
-          }
-          this.size--
-          this.free.push(index)
-        }
-      }
-    }
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return deleted
-  }
-
-  clear() {
-    for (const index of this.rindexes({ allowStale: true })) {
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        v.__abortController.abort(new Error('deleted'))
-      } else {
-        const k = this.keyList[index]
-        this.dispose(v, k, 'delete')
-        if (this.disposeAfter) {
-          this.disposed.push([v, k, 'delete'])
-        }
-      }
-    }
-
-    this.keyMap.clear()
-    this.valList.fill(null)
-    this.keyList.fill(null)
-    if (this.ttls) {
-      this.ttls.fill(0)
-      this.starts.fill(0)
-    }
-    if (this.sizes) {
-      this.sizes.fill(0)
-    }
-    this.head = 0
-    this.tail = 0
-    this.initialFill = 1
-    this.free.length = 0
-    this.calculatedSize = 0
-    this.size = 0
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-  }
-
-  get reset() {
-    deprecatedMethod('reset', 'clear')
-    return this.clear
-  }
-
-  get length() {
-    deprecatedProperty('length', 'size')
-    return this.size
-  }
-
-  static get AbortController() {
-    return AC
-  }
-  static get AbortSignal() {
-    return AS
-  }
-}
-
-module.exports = LRUCache
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/index.mjs b/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/index.mjs
deleted file mode 100644
index 4a0b4813ec515..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/index.mjs
+++ /dev/null
@@ -1,1227 +0,0 @@
-const perf =
-  typeof performance === 'object' &&
-  performance &&
-  typeof performance.now === 'function'
-    ? performance
-    : Date
-
-const hasAbortController = typeof AbortController === 'function'
-
-// minimal backwards-compatibility polyfill
-// this doesn't have nearly all the checks and whatnot that
-// actual AbortController/Signal has, but it's enough for
-// our purposes, and if used properly, behaves the same.
-const AC = hasAbortController
-  ? AbortController
-  : class AbortController {
-      constructor() {
-        this.signal = new AS()
-      }
-      abort(reason = new Error('This operation was aborted')) {
-        this.signal.reason = this.signal.reason || reason
-        this.signal.aborted = true
-        this.signal.dispatchEvent({
-          type: 'abort',
-          target: this.signal,
-        })
-      }
-    }
-
-const hasAbortSignal = typeof AbortSignal === 'function'
-// Some polyfills put this on the AC class, not global
-const hasACAbortSignal = typeof AC.AbortSignal === 'function'
-const AS = hasAbortSignal
-  ? AbortSignal
-  : hasACAbortSignal
-  ? AC.AbortController
-  : class AbortSignal {
-      constructor() {
-        this.reason = undefined
-        this.aborted = false
-        this._listeners = []
-      }
-      dispatchEvent(e) {
-        if (e.type === 'abort') {
-          this.aborted = true
-          this.onabort(e)
-          this._listeners.forEach(f => f(e), this)
-        }
-      }
-      onabort() {}
-      addEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners.push(fn)
-        }
-      }
-      removeEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners = this._listeners.filter(f => f !== fn)
-        }
-      }
-    }
-
-const warned = new Set()
-const deprecatedOption = (opt, instead) => {
-  const code = `LRU_CACHE_OPTION_${opt}`
-  if (shouldWarn(code)) {
-    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
-  }
-}
-const deprecatedMethod = (method, instead) => {
-  const code = `LRU_CACHE_METHOD_${method}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
-    warn(code, `${method} method`, `cache.${instead}()`, get)
-  }
-}
-const deprecatedProperty = (field, instead) => {
-  const code = `LRU_CACHE_PROPERTY_${field}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
-    warn(code, `${field} property`, `cache.${instead}`, get)
-  }
-}
-
-const emitWarning = (...a) => {
-  typeof process === 'object' &&
-  process &&
-  typeof process.emitWarning === 'function'
-    ? process.emitWarning(...a)
-    : console.error(...a)
-}
-
-const shouldWarn = code => !warned.has(code)
-
-const warn = (code, what, instead, fn) => {
-  warned.add(code)
-  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
-  emitWarning(msg, 'DeprecationWarning', code, fn)
-}
-
-const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
-
-/* istanbul ignore next - This is a little bit ridiculous, tbh.
- * The maximum array length is 2^32-1 or thereabouts on most JS impls.
- * And well before that point, you're caching the entire world, I mean,
- * that's ~32GB of just integers for the next/prev links, plus whatever
- * else to hold that many keys and values.  Just filling the memory with
- * zeroes at init time is brutal when you get that big.
- * But why not be complete?
- * Maybe in the future, these limits will have expanded. */
-const getUintArray = max =>
-  !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-    ? Uint8Array
-    : max <= Math.pow(2, 16)
-    ? Uint16Array
-    : max <= Math.pow(2, 32)
-    ? Uint32Array
-    : max <= Number.MAX_SAFE_INTEGER
-    ? ZeroArray
-    : null
-
-class ZeroArray extends Array {
-  constructor(size) {
-    super(size)
-    this.fill(0)
-  }
-}
-
-class Stack {
-  constructor(max) {
-    if (max === 0) {
-      return []
-    }
-    const UintArray = getUintArray(max)
-    this.heap = new UintArray(max)
-    this.length = 0
-  }
-  push(n) {
-    this.heap[this.length++] = n
-  }
-  pop() {
-    return this.heap[--this.length]
-  }
-}
-
-class LRUCache {
-  constructor(options = {}) {
-    const {
-      max = 0,
-      ttl,
-      ttlResolution = 1,
-      ttlAutopurge,
-      updateAgeOnGet,
-      updateAgeOnHas,
-      allowStale,
-      dispose,
-      disposeAfter,
-      noDisposeOnSet,
-      noUpdateTTL,
-      maxSize = 0,
-      maxEntrySize = 0,
-      sizeCalculation,
-      fetchMethod,
-      fetchContext,
-      noDeleteOnFetchRejection,
-      noDeleteOnStaleGet,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-    } = options
-
-    // deprecated options, don't trigger a warning for getting them if
-    // the thing being passed in is another LRUCache we're copying.
-    const { length, maxAge, stale } =
-      options instanceof LRUCache ? {} : options
-
-    if (max !== 0 && !isPosInt(max)) {
-      throw new TypeError('max option must be a nonnegative integer')
-    }
-
-    const UintArray = max ? getUintArray(max) : Array
-    if (!UintArray) {
-      throw new Error('invalid max value: ' + max)
-    }
-
-    this.max = max
-    this.maxSize = maxSize
-    this.maxEntrySize = maxEntrySize || this.maxSize
-    this.sizeCalculation = sizeCalculation || length
-    if (this.sizeCalculation) {
-      if (!this.maxSize && !this.maxEntrySize) {
-        throw new TypeError(
-          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
-        )
-      }
-      if (typeof this.sizeCalculation !== 'function') {
-        throw new TypeError('sizeCalculation set to non-function')
-      }
-    }
-
-    this.fetchMethod = fetchMethod || null
-    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
-      throw new TypeError(
-        'fetchMethod must be a function if specified'
-      )
-    }
-
-    this.fetchContext = fetchContext
-    if (!this.fetchMethod && fetchContext !== undefined) {
-      throw new TypeError(
-        'cannot set fetchContext without fetchMethod'
-      )
-    }
-
-    this.keyMap = new Map()
-    this.keyList = new Array(max).fill(null)
-    this.valList = new Array(max).fill(null)
-    this.next = new UintArray(max)
-    this.prev = new UintArray(max)
-    this.head = 0
-    this.tail = 0
-    this.free = new Stack(max)
-    this.initialFill = 1
-    this.size = 0
-
-    if (typeof dispose === 'function') {
-      this.dispose = dispose
-    }
-    if (typeof disposeAfter === 'function') {
-      this.disposeAfter = disposeAfter
-      this.disposed = []
-    } else {
-      this.disposeAfter = null
-      this.disposed = null
-    }
-    this.noDisposeOnSet = !!noDisposeOnSet
-    this.noUpdateTTL = !!noUpdateTTL
-    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
-    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
-    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
-    this.ignoreFetchAbort = !!ignoreFetchAbort
-
-    // NB: maxEntrySize is set to maxSize if it's set
-    if (this.maxEntrySize !== 0) {
-      if (this.maxSize !== 0) {
-        if (!isPosInt(this.maxSize)) {
-          throw new TypeError(
-            'maxSize must be a positive integer if specified'
-          )
-        }
-      }
-      if (!isPosInt(this.maxEntrySize)) {
-        throw new TypeError(
-          'maxEntrySize must be a positive integer if specified'
-        )
-      }
-      this.initializeSizeTracking()
-    }
-
-    this.allowStale = !!allowStale || !!stale
-    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
-    this.updateAgeOnGet = !!updateAgeOnGet
-    this.updateAgeOnHas = !!updateAgeOnHas
-    this.ttlResolution =
-      isPosInt(ttlResolution) || ttlResolution === 0
-        ? ttlResolution
-        : 1
-    this.ttlAutopurge = !!ttlAutopurge
-    this.ttl = ttl || maxAge || 0
-    if (this.ttl) {
-      if (!isPosInt(this.ttl)) {
-        throw new TypeError(
-          'ttl must be a positive integer if specified'
-        )
-      }
-      this.initializeTTLTracking()
-    }
-
-    // do not allow completely unbounded caches
-    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
-      throw new TypeError(
-        'At least one of max, maxSize, or ttl is required'
-      )
-    }
-    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
-      const code = 'LRU_CACHE_UNBOUNDED'
-      if (shouldWarn(code)) {
-        warned.add(code)
-        const msg =
-          'TTL caching without ttlAutopurge, max, or maxSize can ' +
-          'result in unbounded memory consumption.'
-        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
-      }
-    }
-
-    if (stale) {
-      deprecatedOption('stale', 'allowStale')
-    }
-    if (maxAge) {
-      deprecatedOption('maxAge', 'ttl')
-    }
-    if (length) {
-      deprecatedOption('length', 'sizeCalculation')
-    }
-  }
-
-  getRemainingTTL(key) {
-    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
-  }
-
-  initializeTTLTracking() {
-    this.ttls = new ZeroArray(this.max)
-    this.starts = new ZeroArray(this.max)
-
-    this.setItemTTL = (index, ttl, start = perf.now()) => {
-      this.starts[index] = ttl !== 0 ? start : 0
-      this.ttls[index] = ttl
-      if (ttl !== 0 && this.ttlAutopurge) {
-        const t = setTimeout(() => {
-          if (this.isStale(index)) {
-            this.delete(this.keyList[index])
-          }
-        }, ttl + 1)
-        /* istanbul ignore else - unref() not supported on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-    }
-
-    this.updateItemAge = index => {
-      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
-    }
-
-    this.statusTTL = (status, index) => {
-      if (status) {
-        status.ttl = this.ttls[index]
-        status.start = this.starts[index]
-        status.now = cachedNow || getNow()
-        status.remainingTTL = status.now + status.ttl - status.start
-      }
-    }
-
-    // debounce calls to perf.now() to 1s so we're not hitting
-    // that costly call repeatedly.
-    let cachedNow = 0
-    const getNow = () => {
-      const n = perf.now()
-      if (this.ttlResolution > 0) {
-        cachedNow = n
-        const t = setTimeout(
-          () => (cachedNow = 0),
-          this.ttlResolution
-        )
-        /* istanbul ignore else - not available on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-      return n
-    }
-
-    this.getRemainingTTL = key => {
-      const index = this.keyMap.get(key)
-      if (index === undefined) {
-        return 0
-      }
-      return this.ttls[index] === 0 || this.starts[index] === 0
-        ? Infinity
-        : this.starts[index] +
-            this.ttls[index] -
-            (cachedNow || getNow())
-    }
-
-    this.isStale = index => {
-      return (
-        this.ttls[index] !== 0 &&
-        this.starts[index] !== 0 &&
-        (cachedNow || getNow()) - this.starts[index] >
-          this.ttls[index]
-      )
-    }
-  }
-  updateItemAge(_index) {}
-  statusTTL(_status, _index) {}
-  setItemTTL(_index, _ttl, _start) {}
-  isStale(_index) {
-    return false
-  }
-
-  initializeSizeTracking() {
-    this.calculatedSize = 0
-    this.sizes = new ZeroArray(this.max)
-    this.removeItemSize = index => {
-      this.calculatedSize -= this.sizes[index]
-      this.sizes[index] = 0
-    }
-    this.requireSize = (k, v, size, sizeCalculation) => {
-      // provisionally accept background fetches.
-      // actual value size will be checked when they return.
-      if (this.isBackgroundFetch(v)) {
-        return 0
-      }
-      if (!isPosInt(size)) {
-        if (sizeCalculation) {
-          if (typeof sizeCalculation !== 'function') {
-            throw new TypeError('sizeCalculation must be a function')
-          }
-          size = sizeCalculation(v, k)
-          if (!isPosInt(size)) {
-            throw new TypeError(
-              'sizeCalculation return invalid (expect positive integer)'
-            )
-          }
-        } else {
-          throw new TypeError(
-            'invalid size value (must be positive integer). ' +
-              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
-              'must be set.'
-          )
-        }
-      }
-      return size
-    }
-    this.addItemSize = (index, size, status) => {
-      this.sizes[index] = size
-      if (this.maxSize) {
-        const maxSize = this.maxSize - this.sizes[index]
-        while (this.calculatedSize > maxSize) {
-          this.evict(true)
-        }
-      }
-      this.calculatedSize += this.sizes[index]
-      if (status) {
-        status.entrySize = size
-        status.totalCalculatedSize = this.calculatedSize
-      }
-    }
-  }
-  removeItemSize(_index) {}
-  addItemSize(_index, _size) {}
-  requireSize(_k, _v, size, sizeCalculation) {
-    if (size || sizeCalculation) {
-      throw new TypeError(
-        'cannot set size without setting maxSize or maxEntrySize on cache'
-      )
-    }
-  }
-
-  *indexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.tail; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.head) {
-          break
-        } else {
-          i = this.prev[i]
-        }
-      }
-    }
-  }
-
-  *rindexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.head; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.tail) {
-          break
-        } else {
-          i = this.next[i]
-        }
-      }
-    }
-  }
-
-  isValidIndex(index) {
-    return (
-      index !== undefined &&
-      this.keyMap.get(this.keyList[index]) === index
-    )
-  }
-
-  *entries() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-  *rentries() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-
-  *keys() {
-    for (const i of this.indexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-  *rkeys() {
-    for (const i of this.rindexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-
-  *values() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-  *rvalues() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-
-  [Symbol.iterator]() {
-    return this.entries()
-  }
-
-  find(fn, getOptions) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      if (fn(value, this.keyList[i], this)) {
-        return this.get(this.keyList[i], getOptions)
-      }
-    }
-  }
-
-  forEach(fn, thisp = this) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  rforEach(fn, thisp = this) {
-    for (const i of this.rindexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  get prune() {
-    deprecatedMethod('prune', 'purgeStale')
-    return this.purgeStale
-  }
-
-  purgeStale() {
-    let deleted = false
-    for (const i of this.rindexes({ allowStale: true })) {
-      if (this.isStale(i)) {
-        this.delete(this.keyList[i])
-        deleted = true
-      }
-    }
-    return deleted
-  }
-
-  dump() {
-    const arr = []
-    for (const i of this.indexes({ allowStale: true })) {
-      const key = this.keyList[i]
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      const entry = { value }
-      if (this.ttls) {
-        entry.ttl = this.ttls[i]
-        // always dump the start relative to a portable timestamp
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = perf.now() - this.starts[i]
-        entry.start = Math.floor(Date.now() - age)
-      }
-      if (this.sizes) {
-        entry.size = this.sizes[i]
-      }
-      arr.unshift([key, entry])
-    }
-    return arr
-  }
-
-  load(arr) {
-    this.clear()
-    for (const [key, entry] of arr) {
-      if (entry.start) {
-        // entry.start is a portable timestamp, but we may be using
-        // node's performance.now(), so calculate the offset.
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = Date.now() - entry.start
-        entry.start = perf.now() - age
-      }
-      this.set(key, entry.value, entry)
-    }
-  }
-
-  dispose(_v, _k, _reason) {}
-
-  set(
-    k,
-    v,
-    {
-      ttl = this.ttl,
-      start,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      status,
-    } = {}
-  ) {
-    size = this.requireSize(k, v, size, sizeCalculation)
-    // if the item doesn't fit, don't do anything
-    // NB: maxEntrySize set to maxSize by default
-    if (this.maxEntrySize && size > this.maxEntrySize) {
-      if (status) {
-        status.set = 'miss'
-        status.maxEntrySizeExceeded = true
-      }
-      // have to delete, in case a background fetch is there already.
-      // in non-async cases, this is a no-op
-      this.delete(k)
-      return this
-    }
-    let index = this.size === 0 ? undefined : this.keyMap.get(k)
-    if (index === undefined) {
-      // addition
-      index = this.newIndex()
-      this.keyList[index] = k
-      this.valList[index] = v
-      this.keyMap.set(k, index)
-      this.next[this.tail] = index
-      this.prev[index] = this.tail
-      this.tail = index
-      this.size++
-      this.addItemSize(index, size, status)
-      if (status) {
-        status.set = 'add'
-      }
-      noUpdateTTL = false
-    } else {
-      // update
-      this.moveToTail(index)
-      const oldVal = this.valList[index]
-      if (v !== oldVal) {
-        if (this.isBackgroundFetch(oldVal)) {
-          oldVal.__abortController.abort(new Error('replaced'))
-        } else {
-          if (!noDisposeOnSet) {
-            this.dispose(oldVal, k, 'set')
-            if (this.disposeAfter) {
-              this.disposed.push([oldVal, k, 'set'])
-            }
-          }
-        }
-        this.removeItemSize(index)
-        this.valList[index] = v
-        this.addItemSize(index, size, status)
-        if (status) {
-          status.set = 'replace'
-          const oldValue =
-            oldVal && this.isBackgroundFetch(oldVal)
-              ? oldVal.__staleWhileFetching
-              : oldVal
-          if (oldValue !== undefined) status.oldValue = oldValue
-        }
-      } else if (status) {
-        status.set = 'update'
-      }
-    }
-    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
-      this.initializeTTLTracking()
-    }
-    if (!noUpdateTTL) {
-      this.setItemTTL(index, ttl, start)
-    }
-    this.statusTTL(status, index)
-    if (this.disposeAfter) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return this
-  }
-
-  newIndex() {
-    if (this.size === 0) {
-      return this.tail
-    }
-    if (this.size === this.max && this.max !== 0) {
-      return this.evict(false)
-    }
-    if (this.free.length !== 0) {
-      return this.free.pop()
-    }
-    // initial fill, just keep writing down the list
-    return this.initialFill++
-  }
-
-  pop() {
-    if (this.size) {
-      const val = this.valList[this.head]
-      this.evict(true)
-      return val
-    }
-  }
-
-  evict(free) {
-    const head = this.head
-    const k = this.keyList[head]
-    const v = this.valList[head]
-    if (this.isBackgroundFetch(v)) {
-      v.__abortController.abort(new Error('evicted'))
-    } else {
-      this.dispose(v, k, 'evict')
-      if (this.disposeAfter) {
-        this.disposed.push([v, k, 'evict'])
-      }
-    }
-    this.removeItemSize(head)
-    // if we aren't about to use the index, then null these out
-    if (free) {
-      this.keyList[head] = null
-      this.valList[head] = null
-      this.free.push(head)
-    }
-    this.head = this.next[head]
-    this.keyMap.delete(k)
-    this.size--
-    return head
-  }
-
-  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      if (!this.isStale(index)) {
-        if (updateAgeOnHas) {
-          this.updateItemAge(index)
-        }
-        if (status) status.has = 'hit'
-        this.statusTTL(status, index)
-        return true
-      } else if (status) {
-        status.has = 'stale'
-        this.statusTTL(status, index)
-      }
-    } else if (status) {
-      status.has = 'miss'
-    }
-    return false
-  }
-
-  // like get(), but without any LRU updating or TTL expiration
-  peek(k, { allowStale = this.allowStale } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined && (allowStale || !this.isStale(index))) {
-      const v = this.valList[index]
-      // either stale and allowed, or forcing a refresh of non-stale value
-      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
-    }
-  }
-
-  backgroundFetch(k, index, options, context) {
-    const v = index === undefined ? undefined : this.valList[index]
-    if (this.isBackgroundFetch(v)) {
-      return v
-    }
-    const ac = new AC()
-    if (options.signal) {
-      options.signal.addEventListener('abort', () =>
-        ac.abort(options.signal.reason)
-      )
-    }
-    const fetchOpts = {
-      signal: ac.signal,
-      options,
-      context,
-    }
-    const cb = (v, updateCache = false) => {
-      const { aborted } = ac.signal
-      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
-      if (options.status) {
-        if (aborted && !updateCache) {
-          options.status.fetchAborted = true
-          options.status.fetchError = ac.signal.reason
-          if (ignoreAbort) options.status.fetchAbortIgnored = true
-        } else {
-          options.status.fetchResolved = true
-        }
-      }
-      if (aborted && !ignoreAbort && !updateCache) {
-        return fetchFail(ac.signal.reason)
-      }
-      // either we didn't abort, and are still here, or we did, and ignored
-      if (this.valList[index] === p) {
-        if (v === undefined) {
-          if (p.__staleWhileFetching) {
-            this.valList[index] = p.__staleWhileFetching
-          } else {
-            this.delete(k)
-          }
-        } else {
-          if (options.status) options.status.fetchUpdated = true
-          this.set(k, v, fetchOpts.options)
-        }
-      }
-      return v
-    }
-    const eb = er => {
-      if (options.status) {
-        options.status.fetchRejected = true
-        options.status.fetchError = er
-      }
-      return fetchFail(er)
-    }
-    const fetchFail = er => {
-      const { aborted } = ac.signal
-      const allowStaleAborted =
-        aborted && options.allowStaleOnFetchAbort
-      const allowStale =
-        allowStaleAborted || options.allowStaleOnFetchRejection
-      const noDelete = allowStale || options.noDeleteOnFetchRejection
-      if (this.valList[index] === p) {
-        // if we allow stale on fetch rejections, then we need to ensure that
-        // the stale value is not removed from the cache when the fetch fails.
-        const del = !noDelete || p.__staleWhileFetching === undefined
-        if (del) {
-          this.delete(k)
-        } else if (!allowStaleAborted) {
-          // still replace the *promise* with the stale value,
-          // since we are done with the promise at this point.
-          // leave it untouched if we're still waiting for an
-          // aborted background fetch that hasn't yet returned.
-          this.valList[index] = p.__staleWhileFetching
-        }
-      }
-      if (allowStale) {
-        if (options.status && p.__staleWhileFetching !== undefined) {
-          options.status.returnedStale = true
-        }
-        return p.__staleWhileFetching
-      } else if (p.__returned === p) {
-        throw er
-      }
-    }
-    const pcall = (res, rej) => {
-      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
-      // ignored, we go until we finish, regardless.
-      // defer check until we are actually aborting,
-      // so fetchMethod can override.
-      ac.signal.addEventListener('abort', () => {
-        if (
-          !options.ignoreFetchAbort ||
-          options.allowStaleOnFetchAbort
-        ) {
-          res()
-          // when it eventually resolves, update the cache.
-          if (options.allowStaleOnFetchAbort) {
-            res = v => cb(v, true)
-          }
-        }
-      })
-    }
-    if (options.status) options.status.fetchDispatched = true
-    const p = new Promise(pcall).then(cb, eb)
-    p.__abortController = ac
-    p.__staleWhileFetching = v
-    p.__returned = null
-    if (index === undefined) {
-      // internal, don't expose status.
-      this.set(k, p, { ...fetchOpts.options, status: undefined })
-      index = this.keyMap.get(k)
-    } else {
-      this.valList[index] = p
-    }
-    return p
-  }
-
-  isBackgroundFetch(p) {
-    return (
-      p &&
-      typeof p === 'object' &&
-      typeof p.then === 'function' &&
-      Object.prototype.hasOwnProperty.call(
-        p,
-        '__staleWhileFetching'
-      ) &&
-      Object.prototype.hasOwnProperty.call(p, '__returned') &&
-      (p.__returned === p || p.__returned === null)
-    )
-  }
-
-  // this takes the union of get() and set() opts, because it does both
-  async fetch(
-    k,
-    {
-      // get options
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      // set options
-      ttl = this.ttl,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      // fetch exclusive options
-      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
-      ignoreFetchAbort = this.ignoreFetchAbort,
-      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
-      fetchContext = this.fetchContext,
-      forceRefresh = false,
-      status,
-      signal,
-    } = {}
-  ) {
-    if (!this.fetchMethod) {
-      if (status) status.fetch = 'get'
-      return this.get(k, {
-        allowStale,
-        updateAgeOnGet,
-        noDeleteOnStaleGet,
-        status,
-      })
-    }
-
-    const options = {
-      allowStale,
-      updateAgeOnGet,
-      noDeleteOnStaleGet,
-      ttl,
-      noDisposeOnSet,
-      size,
-      sizeCalculation,
-      noUpdateTTL,
-      noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-      status,
-      signal,
-    }
-
-    let index = this.keyMap.get(k)
-    if (index === undefined) {
-      if (status) status.fetch = 'miss'
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      return (p.__returned = p)
-    } else {
-      // in cache, maybe already fetching
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        const stale =
-          allowStale && v.__staleWhileFetching !== undefined
-        if (status) {
-          status.fetch = 'inflight'
-          if (stale) status.returnedStale = true
-        }
-        return stale ? v.__staleWhileFetching : (v.__returned = v)
-      }
-
-      // if we force a refresh, that means do NOT serve the cached value,
-      // unless we are already in the process of refreshing the cache.
-      const isStale = this.isStale(index)
-      if (!forceRefresh && !isStale) {
-        if (status) status.fetch = 'hit'
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        this.statusTTL(status, index)
-        return v
-      }
-
-      // ok, it is stale or a forced refresh, and not already fetching.
-      // refresh the cache.
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      const hasStale = p.__staleWhileFetching !== undefined
-      const staleVal = hasStale && allowStale
-      if (status) {
-        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
-        if (staleVal && isStale) status.returnedStale = true
-      }
-      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
-    }
-  }
-
-  get(
-    k,
-    {
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      status,
-    } = {}
-  ) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      const value = this.valList[index]
-      const fetching = this.isBackgroundFetch(value)
-      this.statusTTL(status, index)
-      if (this.isStale(index)) {
-        if (status) status.get = 'stale'
-        // delete only if not an in-flight background fetch
-        if (!fetching) {
-          if (!noDeleteOnStaleGet) {
-            this.delete(k)
-          }
-          if (status) status.returnedStale = allowStale
-          return allowStale ? value : undefined
-        } else {
-          if (status) {
-            status.returnedStale =
-              allowStale && value.__staleWhileFetching !== undefined
-          }
-          return allowStale ? value.__staleWhileFetching : undefined
-        }
-      } else {
-        if (status) status.get = 'hit'
-        // if we're currently fetching it, we don't actually have it yet
-        // it's not stale, which means this isn't a staleWhileRefetching.
-        // If it's not stale, and fetching, AND has a __staleWhileFetching
-        // value, then that means the user fetched with {forceRefresh:true},
-        // so it's safe to return that value.
-        if (fetching) {
-          return value.__staleWhileFetching
-        }
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        return value
-      }
-    } else if (status) {
-      status.get = 'miss'
-    }
-  }
-
-  connect(p, n) {
-    this.prev[n] = p
-    this.next[p] = n
-  }
-
-  moveToTail(index) {
-    // if tail already, nothing to do
-    // if head, move head to next[index]
-    // else
-    //   move next[prev[index]] to next[index] (head has no prev)
-    //   move prev[next[index]] to prev[index]
-    // prev[index] = tail
-    // next[tail] = index
-    // tail = index
-    if (index !== this.tail) {
-      if (index === this.head) {
-        this.head = this.next[index]
-      } else {
-        this.connect(this.prev[index], this.next[index])
-      }
-      this.connect(this.tail, index)
-      this.tail = index
-    }
-  }
-
-  get del() {
-    deprecatedMethod('del', 'delete')
-    return this.delete
-  }
-
-  delete(k) {
-    let deleted = false
-    if (this.size !== 0) {
-      const index = this.keyMap.get(k)
-      if (index !== undefined) {
-        deleted = true
-        if (this.size === 1) {
-          this.clear()
-        } else {
-          this.removeItemSize(index)
-          const v = this.valList[index]
-          if (this.isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('deleted'))
-          } else {
-            this.dispose(v, k, 'delete')
-            if (this.disposeAfter) {
-              this.disposed.push([v, k, 'delete'])
-            }
-          }
-          this.keyMap.delete(k)
-          this.keyList[index] = null
-          this.valList[index] = null
-          if (index === this.tail) {
-            this.tail = this.prev[index]
-          } else if (index === this.head) {
-            this.head = this.next[index]
-          } else {
-            this.next[this.prev[index]] = this.next[index]
-            this.prev[this.next[index]] = this.prev[index]
-          }
-          this.size--
-          this.free.push(index)
-        }
-      }
-    }
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return deleted
-  }
-
-  clear() {
-    for (const index of this.rindexes({ allowStale: true })) {
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        v.__abortController.abort(new Error('deleted'))
-      } else {
-        const k = this.keyList[index]
-        this.dispose(v, k, 'delete')
-        if (this.disposeAfter) {
-          this.disposed.push([v, k, 'delete'])
-        }
-      }
-    }
-
-    this.keyMap.clear()
-    this.valList.fill(null)
-    this.keyList.fill(null)
-    if (this.ttls) {
-      this.ttls.fill(0)
-      this.starts.fill(0)
-    }
-    if (this.sizes) {
-      this.sizes.fill(0)
-    }
-    this.head = 0
-    this.tail = 0
-    this.initialFill = 1
-    this.free.length = 0
-    this.calculatedSize = 0
-    this.size = 0
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-  }
-
-  get reset() {
-    deprecatedMethod('reset', 'clear')
-    return this.clear
-  }
-
-  get length() {
-    deprecatedProperty('length', 'size')
-    return this.size
-  }
-
-  static get AbortController() {
-    return AC
-  }
-  static get AbortSignal() {
-    return AS
-  }
-}
-
-export default LRUCache
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/package.json
deleted file mode 100644
index 9684991727e7a..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/package.json
+++ /dev/null
@@ -1,96 +0,0 @@
-{
-  "name": "lru-cache",
-  "description": "A cache object that deletes the least-recently-used items.",
-  "version": "7.18.3",
-  "author": "Isaac Z. Schlueter ",
-  "keywords": [
-    "mru",
-    "lru",
-    "cache"
-  ],
-  "sideEffects": false,
-  "scripts": {
-    "build": "npm run prepare",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "prepare": "node ./scripts/transpile-to-esm.js",
-    "size": "size-limit",
-    "test": "tap",
-    "snap": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write .",
-    "typedoc": "typedoc ./index.d.ts"
-  },
-  "type": "commonjs",
-  "main": "./index.js",
-  "module": "./index.mjs",
-  "types": "./index.d.ts",
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./index.d.ts",
-        "default": "./index.mjs"
-      },
-      "require": {
-        "types": "./index.d.ts",
-        "default": "./index.js"
-      }
-    },
-    "./package.json": "./package.json"
-  },
-  "repository": "git://github.com/isaacs/node-lru-cache.git",
-  "devDependencies": {
-    "@size-limit/preset-small-lib": "^7.0.8",
-    "@types/node": "^17.0.31",
-    "@types/tap": "^15.0.6",
-    "benchmark": "^2.1.4",
-    "c8": "^7.11.2",
-    "clock-mock": "^1.0.6",
-    "eslint-config-prettier": "^8.5.0",
-    "prettier": "^2.6.2",
-    "size-limit": "^7.0.8",
-    "tap": "^16.3.4",
-    "ts-node": "^10.7.0",
-    "tslib": "^2.4.0",
-    "typedoc": "^0.23.24",
-    "typescript": "^4.6.4"
-  },
-  "license": "ISC",
-  "files": [
-    "index.js",
-    "index.mjs",
-    "index.d.ts"
-  ],
-  "engines": {
-    "node": ">=12"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tap": {
-    "nyc-arg": [
-      "--include=index.js"
-    ],
-    "node-arg": [
-      "--expose-gc",
-      "--require",
-      "ts-node/register"
-    ],
-    "ts": false
-  },
-  "size-limit": [
-    {
-      "path": "./index.js"
-    }
-  ]
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/LICENSE
deleted file mode 100644
index 19cec97b18468..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/lib/npa.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/lib/npa.js
deleted file mode 100644
index 36bd18cd9f9a6..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/lib/npa.js
+++ /dev/null
@@ -1,431 +0,0 @@
-'use strict'
-module.exports = npa
-module.exports.resolve = resolve
-module.exports.toPurl = toPurl
-module.exports.Result = Result
-
-const url = require('url')
-const HostedGit = require('hosted-git-info')
-const semver = require('semver')
-const path = global.FAKE_WINDOWS ? require('path').win32 : require('path')
-const validatePackageName = require('validate-npm-package-name')
-const { homedir } = require('os')
-const log = require('proc-log')
-
-const isWindows = process.platform === 'win32' || global.FAKE_WINDOWS
-const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
-const isURL = /^(?:git[+])?[a-z]+:/i
-const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
-const isFilename = /[.](?:tgz|tar.gz|tar)$/i
-
-function npa (arg, where) {
-  let name
-  let spec
-  if (typeof arg === 'object') {
-    if (arg instanceof Result && (!where || where === arg.where)) {
-      return arg
-    } else if (arg.name && arg.rawSpec) {
-      return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
-    } else {
-      return npa(arg.raw, where || arg.where)
-    }
-  }
-  const nameEndsAt = arg[0] === '@' ? arg.slice(1).indexOf('@') + 1 : arg.indexOf('@')
-  const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
-  if (isURL.test(arg)) {
-    spec = arg
-  } else if (isGit.test(arg)) {
-    spec = `git+ssh://${arg}`
-  } else if (namePart[0] !== '@' && (hasSlashes.test(namePart) || isFilename.test(namePart))) {
-    spec = arg
-  } else if (nameEndsAt > 0) {
-    name = namePart
-    spec = arg.slice(nameEndsAt + 1) || '*'
-  } else {
-    const valid = validatePackageName(arg)
-    if (valid.validForOldPackages) {
-      name = arg
-      spec = '*'
-    } else {
-      spec = arg
-    }
-  }
-  return resolve(name, spec, where, arg)
-}
-
-const isFilespec = isWindows ? /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/ : /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
-
-function resolve (name, spec, where, arg) {
-  const res = new Result({
-    raw: arg,
-    name: name,
-    rawSpec: spec,
-    fromArgument: arg != null,
-  })
-
-  if (name) {
-    res.setName(name)
-  }
-
-  if (spec && (isFilespec.test(spec) || /^file:/i.test(spec))) {
-    return fromFile(res, where)
-  } else if (spec && /^npm:/i.test(spec)) {
-    return fromAlias(res, where)
-  }
-
-  const hosted = HostedGit.fromUrl(spec, {
-    noGitPlus: true,
-    noCommittish: true,
-  })
-  if (hosted) {
-    return fromHostedGit(res, hosted)
-  } else if (spec && isURL.test(spec)) {
-    return fromURL(res)
-  } else if (spec && (hasSlashes.test(spec) || isFilename.test(spec))) {
-    return fromFile(res, where)
-  } else {
-    return fromRegistry(res)
-  }
-}
-
-const defaultRegistry = 'https://registry.npmjs.org'
-
-function toPurl (arg, reg = defaultRegistry) {
-  const res = npa(arg)
-
-  if (res.type !== 'version') {
-    throw invalidPurlType(res.type, res.raw)
-  }
-
-  // URI-encode leading @ of scoped packages
-  let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec
-  if (reg !== defaultRegistry) {
-    purl += '?repository_url=' + reg
-  }
-
-  return purl
-}
-
-function invalidPackageName (name, valid, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`)
-  err.code = 'EINVALIDPACKAGENAME'
-  return err
-}
-
-function invalidTagName (name, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`)
-  err.code = 'EINVALIDTAGNAME'
-  return err
-}
-
-function invalidPurlType (type, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`)
-  err.code = 'EINVALIDPURLTYPE'
-  return err
-}
-
-function Result (opts) {
-  this.type = opts.type
-  this.registry = opts.registry
-  this.where = opts.where
-  if (opts.raw == null) {
-    this.raw = opts.name ? opts.name + '@' + opts.rawSpec : opts.rawSpec
-  } else {
-    this.raw = opts.raw
-  }
-
-  this.name = undefined
-  this.escapedName = undefined
-  this.scope = undefined
-  this.rawSpec = opts.rawSpec || ''
-  this.saveSpec = opts.saveSpec
-  this.fetchSpec = opts.fetchSpec
-  if (opts.name) {
-    this.setName(opts.name)
-  }
-  this.gitRange = opts.gitRange
-  this.gitCommittish = opts.gitCommittish
-  this.gitSubdir = opts.gitSubdir
-  this.hosted = opts.hosted
-}
-
-Result.prototype.setName = function (name) {
-  const valid = validatePackageName(name)
-  if (!valid.validForOldPackages) {
-    throw invalidPackageName(name, valid, this.raw)
-  }
-
-  this.name = name
-  this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
-  // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
-  this.escapedName = name.replace('/', '%2f')
-  return this
-}
-
-Result.prototype.toString = function () {
-  const full = []
-  if (this.name != null && this.name !== '') {
-    full.push(this.name)
-  }
-  const spec = this.saveSpec || this.fetchSpec || this.rawSpec
-  if (spec != null && spec !== '') {
-    full.push(spec)
-  }
-  return full.length ? full.join('@') : this.raw
-}
-
-Result.prototype.toJSON = function () {
-  const result = Object.assign({}, this)
-  delete result.hosted
-  return result
-}
-
-function setGitCommittish (res, committish) {
-  if (!committish) {
-    res.gitCommittish = null
-    return res
-  }
-
-  // for each :: separated item:
-  for (const part of committish.split('::')) {
-    // if the item has no : the n it is a commit-ish
-    if (!part.includes(':')) {
-      if (res.gitRange) {
-        throw new Error('cannot override existing semver range with a committish')
-      }
-      if (res.gitCommittish) {
-        throw new Error('cannot override existing committish with a second committish')
-      }
-      res.gitCommittish = part
-      continue
-    }
-    // split on name:value
-    const [name, value] = part.split(':')
-    // if name is semver do semver lookup of ref or tag
-    if (name === 'semver') {
-      if (res.gitCommittish) {
-        throw new Error('cannot override existing committish with a semver range')
-      }
-      if (res.gitRange) {
-        throw new Error('cannot override existing semver range with a second semver range')
-      }
-      res.gitRange = decodeURIComponent(value)
-      continue
-    }
-    if (name === 'path') {
-      if (res.gitSubdir) {
-        throw new Error('cannot override existing path with a second path')
-      }
-      res.gitSubdir = `/${value}`
-      continue
-    }
-    log.warn('npm-package-arg', `ignoring unknown key "${name}"`)
-  }
-
-  return res
-}
-
-function fromFile (res, where) {
-  if (!where) {
-    where = process.cwd()
-  }
-  res.type = isFilename.test(res.rawSpec) ? 'file' : 'directory'
-  res.where = where
-
-  // always put the '/' on where when resolving urls, or else
-  // file:foo from /path/to/bar goes to /path/to/foo, when we want
-  // it to be /path/to/bar/foo
-
-  let specUrl
-  let resolvedUrl
-  const prefix = (!/^file:/.test(res.rawSpec) ? 'file:' : '')
-  const rawWithPrefix = prefix + res.rawSpec
-  let rawNoPrefix = rawWithPrefix.replace(/^file:/, '')
-  try {
-    resolvedUrl = new url.URL(rawWithPrefix, `file://${path.resolve(where)}/`)
-    specUrl = new url.URL(rawWithPrefix)
-  } catch (originalError) {
-    const er = new Error('Invalid file: URL, must comply with RFC 8909')
-    throw Object.assign(er, {
-      raw: res.rawSpec,
-      spec: res,
-      where,
-      originalError,
-    })
-  }
-
-  // environment switch for testing
-  if (process.env.NPM_PACKAGE_ARG_8909_STRICT !== '1') {
-    // XXX backwards compatibility lack of compliance with 8909
-    // Remove when we want a breaking change to come into RFC compliance.
-    if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
-      const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///')
-      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
-      specUrl = new url.URL(rawSpec)
-      rawNoPrefix = rawSpec.replace(/^file:/, '')
-    }
-    // turn file:/../foo into file:../foo
-    // for 1, 2 or 3 leading slashes since we attempted
-    // in the previous step to make it a file protocol url with a leading slash
-    if (/^\/{1,3}\.\.?(\/|$)/.test(rawNoPrefix)) {
-      const rawSpec = res.rawSpec.replace(/^file:\/{1,3}/, 'file:')
-      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
-      specUrl = new url.URL(rawSpec)
-      rawNoPrefix = rawSpec.replace(/^file:/, '')
-    }
-    // XXX end 8909 violation backwards compatibility section
-  }
-
-  // file:foo - relative url to ./foo
-  // file:/foo - absolute path /foo
-  // file:///foo - absolute path to /foo, no authority host
-  // file://localhost/foo - absolute path to /foo, on localhost
-  // file://foo - absolute path to / on foo host (error!)
-  if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
-    const msg = `Invalid file: URL, must be absolute if // present`
-    throw Object.assign(new Error(msg), {
-      raw: res.rawSpec,
-      parsed: resolvedUrl,
-    })
-  }
-
-  // turn /C:/blah into just C:/blah on windows
-  let specPath = decodeURIComponent(specUrl.pathname)
-  let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
-  if (isWindows) {
-    specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1')
-    resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1')
-  }
-
-  // replace ~ with homedir, but keep the ~ in the saveSpec
-  // otherwise, make it relative to where param
-  if (/^\/~(\/|$)/.test(specPath)) {
-    res.saveSpec = `file:${specPath.substr(1)}`
-    resolvedPath = path.resolve(homedir(), specPath.substr(3))
-  } else if (!path.isAbsolute(rawNoPrefix)) {
-    res.saveSpec = `file:${path.relative(where, resolvedPath)}`
-  } else {
-    res.saveSpec = `file:${path.resolve(resolvedPath)}`
-  }
-
-  res.fetchSpec = path.resolve(where, resolvedPath)
-  return res
-}
-
-function fromHostedGit (res, hosted) {
-  res.type = 'git'
-  res.hosted = hosted
-  res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
-  res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
-  return setGitCommittish(res, hosted.committish)
-}
-
-function unsupportedURLType (protocol, spec) {
-  const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
-  err.code = 'EUNSUPPORTEDPROTOCOL'
-  return err
-}
-
-function matchGitScp (spec) {
-  // git ssh specifiers are overloaded to also use scp-style git
-  // specifiers, so we have to parse those out and treat them special.
-  // They are NOT true URIs, so we can't hand them to `url.parse`.
-  //
-  // This regex looks for things that look like:
-  // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
-  //
-  // ...and various combinations. The username in the beginning is *required*.
-  const matched = spec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
-  return matched && !matched[1].match(/:[0-9]+\/?.*$/i) && {
-    fetchSpec: matched[1],
-    gitCommittish: matched[2] == null ? null : matched[2],
-  }
-}
-
-function fromURL (res) {
-  // eslint-disable-next-line node/no-deprecated-api
-  const urlparse = url.parse(res.rawSpec)
-  res.saveSpec = res.rawSpec
-  // check the protocol, and then see if it's git or not
-  switch (urlparse.protocol) {
-    case 'git:':
-    case 'git+http:':
-    case 'git+https:':
-    case 'git+rsync:':
-    case 'git+ftp:':
-    case 'git+file:':
-    case 'git+ssh:': {
-      res.type = 'git'
-      const match = urlparse.protocol === 'git+ssh:' ? matchGitScp(res.rawSpec)
-        : null
-      if (match) {
-        setGitCommittish(res, match.gitCommittish)
-        res.fetchSpec = match.fetchSpec
-      } else {
-        setGitCommittish(res, urlparse.hash != null ? urlparse.hash.slice(1) : '')
-        urlparse.protocol = urlparse.protocol.replace(/^git[+]/, '')
-        if (urlparse.protocol === 'file:' && /^git\+file:\/\/[a-z]:/i.test(res.rawSpec)) {
-          // keep the drive letter : on windows file paths
-          urlparse.host += ':'
-          urlparse.hostname += ':'
-        }
-        delete urlparse.hash
-        res.fetchSpec = url.format(urlparse)
-      }
-      break
-    }
-    case 'http:':
-    case 'https:':
-      res.type = 'remote'
-      res.fetchSpec = res.saveSpec
-      break
-
-    default:
-      throw unsupportedURLType(urlparse.protocol, res.rawSpec)
-  }
-
-  return res
-}
-
-function fromAlias (res, where) {
-  const subSpec = npa(res.rawSpec.substr(4), where)
-  if (subSpec.type === 'alias') {
-    throw new Error('nested aliases not supported')
-  }
-
-  if (!subSpec.registry) {
-    throw new Error('aliases only work for registry deps')
-  }
-
-  res.subSpec = subSpec
-  res.registry = true
-  res.type = 'alias'
-  res.saveSpec = null
-  res.fetchSpec = null
-  return res
-}
-
-function fromRegistry (res) {
-  res.registry = true
-  const spec = res.rawSpec.trim()
-  // no save spec for registry components as we save based on the fetched
-  // version, not on the argument so this can't compute that.
-  res.saveSpec = null
-  res.fetchSpec = spec
-  const version = semver.valid(spec, true)
-  const range = semver.validRange(spec, true)
-  if (version) {
-    res.type = 'version'
-  } else if (range) {
-    res.type = 'range'
-  } else {
-    if (encodeURIComponent(spec) !== spec) {
-      throw invalidTagName(spec, res.raw)
-    }
-    res.type = 'tag'
-  }
-  return res
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/package.json
deleted file mode 100644
index bb9e71b258a93..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/package.json
+++ /dev/null
@@ -1,59 +0,0 @@
-{
-  "name": "npm-package-arg",
-  "version": "10.1.0",
-  "description": "Parse the things that can be arguments to `npm install`",
-  "main": "./lib/npa.js",
-  "directories": {
-    "test": "test"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "dependencies": {
-    "hosted-git-info": "^6.0.0",
-    "proc-log": "^3.0.0",
-    "semver": "^7.3.5",
-    "validate-npm-package-name": "^5.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.10.0",
-    "tap": "^16.0.1"
-  },
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "npmclilint": "npmcli-lint",
-    "lint": "eslint \"**/*.js\"",
-    "lintfix": "npm run lint -- --fix",
-    "posttest": "npm run lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/npm-package-arg.git"
-  },
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/npm-package-arg/issues"
-  },
-  "homepage": "https://github.com/npm/npm-package-arg",
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "tap": {
-    "branches": 97,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.10.0"
-  }
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/LICENSE.md b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/LICENSE.md
deleted file mode 100644
index 8d28acf866d93..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/lib/index.js
deleted file mode 100644
index 8dbd2721c8996..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/lib/index.js
+++ /dev/null
@@ -1,218 +0,0 @@
-'use strict'
-
-const npa = require('npm-package-arg')
-const semver = require('semver')
-const { checkEngine } = require('npm-install-checks')
-const normalizeBin = require('npm-normalize-package-bin')
-
-const engineOk = (manifest, npmVersion, nodeVersion) => {
-  try {
-    checkEngine(manifest, npmVersion, nodeVersion)
-    return true
-  } catch (_) {
-    return false
-  }
-}
-
-const isBefore = (verTimes, ver, time) =>
-  !verTimes || !verTimes[ver] || Date.parse(verTimes[ver]) <= time
-
-const avoidSemverOpt = { includePrerelease: true, loose: true }
-const shouldAvoid = (ver, avoid) =>
-  avoid && semver.satisfies(ver, avoid, avoidSemverOpt)
-
-const decorateAvoid = (result, avoid) =>
-  result && shouldAvoid(result.version, avoid)
-    ? { ...result, _shouldAvoid: true }
-    : result
-
-const pickManifest = (packument, wanted, opts) => {
-  const {
-    defaultTag = 'latest',
-    before = null,
-    nodeVersion = process.version,
-    npmVersion = null,
-    includeStaged = false,
-    avoid = null,
-    avoidStrict = false,
-  } = opts
-
-  const { name, time: verTimes } = packument
-  const versions = packument.versions || {}
-
-  if (avoidStrict) {
-    const looseOpts = {
-      ...opts,
-      avoidStrict: false,
-    }
-
-    const result = pickManifest(packument, wanted, looseOpts)
-    if (!result || !result._shouldAvoid) {
-      return result
-    }
-
-    const caret = pickManifest(packument, `^${result.version}`, looseOpts)
-    if (!caret || !caret._shouldAvoid) {
-      return {
-        ...caret,
-        _outsideDependencyRange: true,
-        _isSemVerMajor: false,
-      }
-    }
-
-    const star = pickManifest(packument, '*', looseOpts)
-    if (!star || !star._shouldAvoid) {
-      return {
-        ...star,
-        _outsideDependencyRange: true,
-        _isSemVerMajor: true,
-      }
-    }
-
-    throw Object.assign(new Error(`No avoidable versions for ${name}`), {
-      code: 'ETARGET',
-      name,
-      wanted,
-      avoid,
-      before,
-      versions: Object.keys(versions),
-    })
-  }
-
-  const staged = (includeStaged && packument.stagedVersions &&
-    packument.stagedVersions.versions) || {}
-  const restricted = (packument.policyRestrictions &&
-    packument.policyRestrictions.versions) || {}
-
-  const time = before && verTimes ? +(new Date(before)) : Infinity
-  const spec = npa.resolve(name, wanted || defaultTag)
-  const type = spec.type
-  const distTags = packument['dist-tags'] || {}
-
-  if (type !== 'tag' && type !== 'version' && type !== 'range') {
-    throw new Error('Only tag, version, and range are supported')
-  }
-
-  // if the type is 'tag', and not just the implicit default, then it must
-  // be that exactly, or nothing else will do.
-  if (wanted && type === 'tag') {
-    const ver = distTags[wanted]
-    // if the version in the dist-tags is before the before date, then
-    // we use that.  Otherwise, we get the highest precedence version
-    // prior to the dist-tag.
-    if (isBefore(verTimes, ver, time)) {
-      return decorateAvoid(versions[ver] || staged[ver] || restricted[ver], avoid)
-    } else {
-      return pickManifest(packument, `<=${ver}`, opts)
-    }
-  }
-
-  // similarly, if a specific version, then only that version will do
-  if (wanted && type === 'version') {
-    const ver = semver.clean(wanted, { loose: true })
-    const mani = versions[ver] || staged[ver] || restricted[ver]
-    return isBefore(verTimes, ver, time) ? decorateAvoid(mani, avoid) : null
-  }
-
-  // ok, sort based on our heuristics, and pick the best fit
-  const range = type === 'range' ? wanted : '*'
-
-  // if the range is *, then we prefer the 'latest' if available
-  // but skip this if it should be avoided, in that case we have
-  // to try a little harder.
-  const defaultVer = distTags[defaultTag]
-  if (defaultVer &&
-      (range === '*' || semver.satisfies(defaultVer, range, { loose: true })) &&
-      !shouldAvoid(defaultVer, avoid)) {
-    const mani = versions[defaultVer]
-    if (mani && isBefore(verTimes, defaultVer, time)) {
-      return mani
-    }
-  }
-
-  // ok, actually have to sort the list and take the winner
-  const allEntries = Object.entries(versions)
-    .concat(Object.entries(staged))
-    .concat(Object.entries(restricted))
-    .filter(([ver, mani]) => isBefore(verTimes, ver, time))
-
-  if (!allEntries.length) {
-    throw Object.assign(new Error(`No versions available for ${name}`), {
-      code: 'ENOVERSIONS',
-      name,
-      type,
-      wanted,
-      before,
-      versions: Object.keys(versions),
-    })
-  }
-
-  const sortSemverOpt = { loose: true }
-  const entries = allEntries.filter(([ver, mani]) =>
-    semver.satisfies(ver, range, { loose: true }))
-    .sort((a, b) => {
-      const [vera, mania] = a
-      const [verb, manib] = b
-      const notavoida = !shouldAvoid(vera, avoid)
-      const notavoidb = !shouldAvoid(verb, avoid)
-      const notrestra = !restricted[a]
-      const notrestrb = !restricted[b]
-      const notstagea = !staged[a]
-      const notstageb = !staged[b]
-      const notdepra = !mania.deprecated
-      const notdeprb = !manib.deprecated
-      const enginea = engineOk(mania, npmVersion, nodeVersion)
-      const engineb = engineOk(manib, npmVersion, nodeVersion)
-      // sort by:
-      // - not an avoided version
-      // - not restricted
-      // - not staged
-      // - not deprecated and engine ok
-      // - engine ok
-      // - not deprecated
-      // - semver
-      return (notavoidb - notavoida) ||
-        (notrestrb - notrestra) ||
-        (notstageb - notstagea) ||
-        ((notdeprb && engineb) - (notdepra && enginea)) ||
-        (engineb - enginea) ||
-        (notdeprb - notdepra) ||
-        semver.rcompare(vera, verb, sortSemverOpt)
-    })
-
-  return decorateAvoid(entries[0] && entries[0][1], avoid)
-}
-
-module.exports = (packument, wanted, opts = {}) => {
-  const mani = pickManifest(packument, wanted, opts)
-  const picked = mani && normalizeBin(mani)
-  const policyRestrictions = packument.policyRestrictions
-  const restricted = (policyRestrictions && policyRestrictions.versions) || {}
-
-  if (picked && !restricted[picked.version]) {
-    return picked
-  }
-
-  const { before = null, defaultTag = 'latest' } = opts
-  const bstr = before ? new Date(before).toLocaleString() : ''
-  const { name } = packument
-  const pckg = `${name}@${wanted}` +
-    (before ? ` with a date before ${bstr}` : '')
-
-  const isForbidden = picked && !!restricted[picked.version]
-  const polMsg = isForbidden ? policyRestrictions.message : ''
-
-  const msg = !isForbidden ? `No matching version found for ${pckg}.`
-    : `Could not download ${pckg} due to policy violations:\n${polMsg}`
-
-  const code = isForbidden ? 'E403' : 'ETARGET'
-  throw Object.assign(new Error(msg), {
-    code,
-    type: npa.resolve(packument.name, wanted).type,
-    wanted,
-    versions: Object.keys(packument.versions ?? {}),
-    name,
-    distTags: packument['dist-tags'],
-    defaultTag,
-  })
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/package.json
deleted file mode 100644
index feff81f5b2fee..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/package.json
+++ /dev/null
@@ -1,57 +0,0 @@
-{
-  "name": "npm-pick-manifest",
-  "version": "8.0.2",
-  "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.",
-  "main": "./lib",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "coverage": "tap",
-    "lint": "eslint \"**/*.js\"",
-    "test": "tap",
-    "posttest": "npm run lint",
-    "postlint": "template-oss-check",
-    "lintfix": "npm run lint -- --fix",
-    "snap": "tap",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/npm-pick-manifest.git"
-  },
-  "keywords": [
-    "npm",
-    "semver",
-    "package manager"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "dependencies": {
-    "npm-install-checks": "^6.0.0",
-    "npm-normalize-package-bin": "^3.0.0",
-    "npm-package-arg": "^10.0.0",
-    "semver": "^7.3.5"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.18.0",
-    "tap": "^16.0.1"
-  },
-  "tap": {
-    "check-coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.18.0",
-    "publish": true
-  }
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE
deleted file mode 100644
index a03cd0ed0b338..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter, Kat Marchán, npm, Inc., and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/bin.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/bin.js
deleted file mode 100755
index f35b62ca71a53..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/bin.js
+++ /dev/null
@@ -1,158 +0,0 @@
-#!/usr/bin/env node
-
-const run = conf => {
-  const pacote = require('../')
-  switch (conf._[0]) {
-    case 'resolve':
-    case 'manifest':
-    case 'packument':
-      if (conf._[0] === 'resolve' && conf.long) {
-        return pacote.manifest(conf._[1], conf).then(mani => ({
-          resolved: mani._resolved,
-          integrity: mani._integrity,
-          from: mani._from,
-        }))
-      }
-      return pacote[conf._[0]](conf._[1], conf)
-
-    case 'tarball':
-      if (!conf._[2] || conf._[2] === '-') {
-        return pacote.tarball.stream(conf._[1], stream => {
-          stream.pipe(
-            conf.testStdout ||
-            /* istanbul ignore next */
-            process.stdout
-          )
-          // make sure it resolves something falsey
-          return stream.promise().then(() => {
-            return false
-          })
-        }, conf)
-      } else {
-        return pacote.tarball.file(conf._[1], conf._[2], conf)
-      }
-
-    case 'extract':
-      return pacote.extract(conf._[1], conf._[2], conf)
-
-    default: /* istanbul ignore next */ {
-      throw new Error(`bad command: ${conf._[0]}`)
-    }
-  }
-}
-
-const version = require('../package.json').version
-const usage = () =>
-`Pacote - The JavaScript Package Handler, v${version}
-
-Usage:
-
-  pacote resolve 
-    Resolve a specifier and output the fully resolved target
-    Returns integrity and from if '--long' flag is set.
-
-  pacote manifest 
-    Fetch a manifest and print to stdout
-
-  pacote packument 
-    Fetch a full packument and print to stdout
-
-  pacote tarball  []
-    Fetch a package tarball and save to 
-    If  is missing or '-', the tarball will be streamed to stdout.
-
-  pacote extract  
-    Extract a package to the destination folder.
-
-Configuration values all match the names of configs passed to npm, or
-options passed to Pacote.  Additional flags for this executable:
-
-  --long     Print an object from 'resolve', including integrity and spec.
-  --json     Print result objects as JSON rather than node's default.
-             (This is the default if stdout is not a TTY.)
-  --help -h  Print this helpful text.
-
-For example '--cache=/path/to/folder' will use that folder as the cache.
-`
-
-const shouldJSON = (conf, result) =>
-  conf.json ||
-  !process.stdout.isTTY &&
-  conf.json === undefined &&
-  result &&
-  typeof result === 'object'
-
-const pretty = (conf, result) =>
-  shouldJSON(conf, result) ? JSON.stringify(result, 0, 2) : result
-
-let addedLogListener = false
-const main = args => {
-  const conf = parse(args)
-  if (conf.help || conf.h) {
-    return console.log(usage())
-  }
-
-  if (!addedLogListener) {
-    process.on('log', console.error)
-    addedLogListener = true
-  }
-
-  try {
-    return run(conf)
-      .then(result => result && console.log(pretty(conf, result)))
-      .catch(er => {
-        console.error(er)
-        process.exit(1)
-      })
-  } catch (er) {
-    console.error(er.message)
-    console.error(usage())
-  }
-}
-
-const parseArg = arg => {
-  const split = arg.slice(2).split('=')
-  const k = split.shift()
-  const v = split.join('=')
-  const no = /^no-/.test(k) && !v
-  const key = (no ? k.slice(3) : k)
-    .replace(/^tag$/, 'defaultTag')
-    .replace(/-([a-z])/g, (_, c) => c.toUpperCase())
-  const value = v ? v.replace(/^~/, process.env.HOME) : !no
-  return { key, value }
-}
-
-const parse = args => {
-  const conf = {
-    _: [],
-    cache: process.env.HOME + '/.npm/_cacache',
-  }
-  let dashdash = false
-  args.forEach(arg => {
-    if (dashdash) {
-      conf._.push(arg)
-    } else if (arg === '--') {
-      dashdash = true
-    } else if (arg === '-h') {
-      conf.help = true
-    } else if (/^--/.test(arg)) {
-      const { key, value } = parseArg(arg)
-      conf[key] = value
-    } else {
-      conf._.push(arg)
-    }
-  })
-  return conf
-}
-
-if (module === require.main) {
-  main(process.argv.slice(2))
-} else {
-  module.exports = {
-    main,
-    run,
-    usage,
-    parseArg,
-    parse,
-  }
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js
deleted file mode 100644
index 420afc5802cb2..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js
+++ /dev/null
@@ -1,108 +0,0 @@
-const Fetcher = require('./fetcher.js')
-const FileFetcher = require('./file.js')
-const { Minipass } = require('minipass')
-const tarCreateOptions = require('./util/tar-create-options.js')
-const packlist = require('npm-packlist')
-const tar = require('tar')
-const _prepareDir = Symbol('_prepareDir')
-const { resolve } = require('path')
-const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson')
-
-const runScript = require('@npmcli/run-script')
-
-const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
-class DirFetcher extends Fetcher {
-  constructor (spec, opts) {
-    super(spec, opts)
-    // just the fully resolved filename
-    this.resolved = this.spec.fetchSpec
-
-    this.tree = opts.tree || null
-    this.Arborist = opts.Arborist || null
-  }
-
-  // exposes tarCreateOptions as public API
-  static tarCreateOptions (manifest) {
-    return tarCreateOptions(manifest)
-  }
-
-  get types () {
-    return ['directory']
-  }
-
-  [_prepareDir] () {
-    return this.manifest().then(mani => {
-      if (!mani.scripts || !mani.scripts.prepare) {
-        return
-      }
-
-      // we *only* run prepare.
-      // pre/post-pack is run by the npm CLI for publish and pack,
-      // but this function is *also* run when installing git deps
-      const stdio = this.opts.foregroundScripts ? 'inherit' : 'pipe'
-
-      // hide the banner if silent opt is passed in, or if prepare running
-      // in the background.
-      const banner = this.opts.silent ? false : stdio === 'inherit'
-
-      return runScript({
-        pkg: mani,
-        event: 'prepare',
-        path: this.resolved,
-        stdio,
-        banner,
-        env: {
-          npm_package_resolved: this.resolved,
-          npm_package_integrity: this.integrity,
-          npm_package_json: resolve(this.resolved, 'package.json'),
-        },
-      })
-    })
-  }
-
-  [_tarballFromResolved] () {
-    if (!this.tree && !this.Arborist) {
-      throw new Error('DirFetcher requires either a tree or an Arborist constructor to pack')
-    }
-
-    const stream = new Minipass()
-    stream.resolved = this.resolved
-    stream.integrity = this.integrity
-
-    const { prefix, workspaces } = this.opts
-
-    // run the prepare script, get the list of files, and tar it up
-    // pipe to the stream, and proxy errors the chain.
-    this[_prepareDir]()
-      .then(async () => {
-        if (!this.tree) {
-          const arb = new this.Arborist({ path: this.resolved })
-          this.tree = await arb.loadActual()
-        }
-        return packlist(this.tree, { path: this.resolved, prefix, workspaces })
-      })
-      .then(files => tar.c(tarCreateOptions(this.package), files)
-        .on('error', er => stream.emit('error', er)).pipe(stream))
-      .catch(er => stream.emit('error', er))
-    return stream
-  }
-
-  manifest () {
-    if (this.package) {
-      return Promise.resolve(this.package)
-    }
-
-    return this[_readPackageJson](this.resolved + '/package.json')
-      .then(mani => this.package = {
-        ...mani,
-        _integrity: this.integrity && String(this.integrity),
-        _resolved: this.resolved,
-        _from: this.from,
-      })
-  }
-
-  packument () {
-    return FileFetcher.prototype.packument.apply(this)
-  }
-}
-module.exports = DirFetcher
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js
deleted file mode 100644
index f961a45c7d346..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js
+++ /dev/null
@@ -1,505 +0,0 @@
-// This is the base class that the other fetcher types in lib
-// all descend from.
-// It handles the unpacking and retry logic that is shared among
-// all of the other Fetcher types.
-
-const npa = require('npm-package-arg')
-const ssri = require('ssri')
-const { promisify } = require('util')
-const { basename, dirname } = require('path')
-const tar = require('tar')
-const log = require('proc-log')
-const retry = require('promise-retry')
-const fs = require('fs/promises')
-const fsm = require('fs-minipass')
-const cacache = require('cacache')
-const isPackageBin = require('./util/is-package-bin.js')
-const removeTrailingSlashes = require('./util/trailing-slashes.js')
-const getContents = require('@npmcli/installed-package-contents')
-const readPackageJsonFast = require('read-package-json-fast')
-const readPackageJson = promisify(require('read-package-json'))
-const { Minipass } = require('minipass')
-
-const cacheDir = require('./util/cache-dir.js')
-
-// Private methods.
-// Child classes should not have to override these.
-// Users should never call them.
-const _extract = Symbol('_extract')
-const _mkdir = Symbol('_mkdir')
-const _empty = Symbol('_empty')
-const _toFile = Symbol('_toFile')
-const _tarxOptions = Symbol('_tarxOptions')
-const _entryMode = Symbol('_entryMode')
-const _istream = Symbol('_istream')
-const _assertType = Symbol('_assertType')
-const _tarballFromCache = Symbol('_tarballFromCache')
-const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
-const _cacheFetches = Symbol.for('pacote.Fetcher._cacheFetches')
-const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson')
-
-class FetcherBase {
-  constructor (spec, opts) {
-    if (!opts || typeof opts !== 'object') {
-      throw new TypeError('options object is required')
-    }
-    this.spec = npa(spec, opts.where)
-
-    this.allowGitIgnore = !!opts.allowGitIgnore
-
-    // a bit redundant because presumably the caller already knows this,
-    // but it makes it easier to not have to keep track of the requested
-    // spec when we're dispatching thousands of these at once, and normalizing
-    // is nice.  saveSpec is preferred if set, because it turns stuff like
-    // x/y#committish into github:x/y#committish.  use name@rawSpec for
-    // registry deps so that we turn xyz and xyz@ -> xyz@
-    this.from = this.spec.registry
-      ? `${this.spec.name}@${this.spec.rawSpec}` : this.spec.saveSpec
-
-    this[_assertType]()
-    // clone the opts object so that others aren't upset when we mutate it
-    // by adding/modifying the integrity value.
-    this.opts = { ...opts }
-
-    this.cache = opts.cache || cacheDir().cacache
-    this.tufCache = opts.tufCache || cacheDir().tufcache
-    this.resolved = opts.resolved || null
-
-    // default to caching/verifying with sha512, that's what we usually have
-    // need to change this default, or start overriding it, when sha512
-    // is no longer strong enough.
-    this.defaultIntegrityAlgorithm = opts.defaultIntegrityAlgorithm || 'sha512'
-
-    if (typeof opts.integrity === 'string') {
-      this.opts.integrity = ssri.parse(opts.integrity)
-    }
-
-    this.package = null
-    this.type = this.constructor.name
-    this.fmode = opts.fmode || 0o666
-    this.dmode = opts.dmode || 0o777
-    // we don't need a default umask, because we don't chmod files coming
-    // out of package tarballs.  they're forced to have a mode that is
-    // valid, regardless of what's in the tarball entry, and then we let
-    // the process's umask setting do its job.  but if configured, we do
-    // respect it.
-    this.umask = opts.umask || 0
-
-    this.preferOnline = !!opts.preferOnline
-    this.preferOffline = !!opts.preferOffline
-    this.offline = !!opts.offline
-
-    this.before = opts.before
-    this.fullMetadata = this.before ? true : !!opts.fullMetadata
-    this.fullReadJson = !!opts.fullReadJson
-    if (this.fullReadJson) {
-      this[_readPackageJson] = readPackageJson
-    } else {
-      this[_readPackageJson] = readPackageJsonFast
-    }
-
-    // rrh is a registry hostname or 'never' or 'always'
-    // defaults to registry.npmjs.org
-    this.replaceRegistryHost = (!opts.replaceRegistryHost || opts.replaceRegistryHost === 'npmjs') ?
-      'registry.npmjs.org' : opts.replaceRegistryHost
-
-    this.defaultTag = opts.defaultTag || 'latest'
-    this.registry = removeTrailingSlashes(opts.registry || 'https://registry.npmjs.org')
-
-    // command to run 'prepare' scripts on directories and git dirs
-    // To use pacote with yarn, for example, set npmBin to 'yarn'
-    // and npmCliConfig with yarn's equivalents.
-    this.npmBin = opts.npmBin || 'npm'
-
-    // command to install deps for preparing
-    this.npmInstallCmd = opts.npmInstallCmd || ['install', '--force']
-
-    // XXX fill more of this in based on what we know from this.opts
-    // we explicitly DO NOT fill in --tag, though, since we are often
-    // going to be packing in the context of a publish, which may set
-    // a dist-tag, but certainly wants to keep defaulting to latest.
-    this.npmCliConfig = opts.npmCliConfig || [
-      `--cache=${dirname(this.cache)}`,
-      `--prefer-offline=${!!this.preferOffline}`,
-      `--prefer-online=${!!this.preferOnline}`,
-      `--offline=${!!this.offline}`,
-      ...(this.before ? [`--before=${this.before.toISOString()}`] : []),
-      '--no-progress',
-      '--no-save',
-      '--no-audit',
-      // override any omit settings from the environment
-      '--include=dev',
-      '--include=peer',
-      '--include=optional',
-      // we need the actual things, not just the lockfile
-      '--no-package-lock-only',
-      '--no-dry-run',
-    ]
-  }
-
-  get integrity () {
-    return this.opts.integrity || null
-  }
-
-  set integrity (i) {
-    if (!i) {
-      return
-    }
-
-    i = ssri.parse(i)
-    const current = this.opts.integrity
-
-    // do not ever update an existing hash value, but do
-    // merge in NEW algos and hashes that we don't already have.
-    if (current) {
-      current.merge(i)
-    } else {
-      this.opts.integrity = i
-    }
-  }
-
-  get notImplementedError () {
-    return new Error('not implemented in this fetcher type: ' + this.type)
-  }
-
-  // override in child classes
-  // Returns a Promise that resolves to this.resolved string value
-  resolve () {
-    return this.resolved ? Promise.resolve(this.resolved)
-      : Promise.reject(this.notImplementedError)
-  }
-
-  packument () {
-    return Promise.reject(this.notImplementedError)
-  }
-
-  // override in child class
-  // returns a manifest containing:
-  // - name
-  // - version
-  // - _resolved
-  // - _integrity
-  // - plus whatever else was in there (corgi, full metadata, or pj file)
-  manifest () {
-    return Promise.reject(this.notImplementedError)
-  }
-
-  // private, should be overridden.
-  // Note that they should *not* calculate or check integrity or cache,
-  // but *just*  return the raw tarball data stream.
-  [_tarballFromResolved] () {
-    throw this.notImplementedError
-  }
-
-  // public, should not be overridden
-  tarball () {
-    return this.tarballStream(stream => stream.concat().then(data => {
-      data.integrity = this.integrity && String(this.integrity)
-      data.resolved = this.resolved
-      data.from = this.from
-      return data
-    }))
-  }
-
-  // private
-  // Note: cacache will raise a EINTEGRITY error if the integrity doesn't match
-  [_tarballFromCache] () {
-    return cacache.get.stream.byDigest(this.cache, this.integrity, this.opts)
-  }
-
-  get [_cacheFetches] () {
-    return true
-  }
-
-  [_istream] (stream) {
-    // if not caching this, just return it
-    if (!this.opts.cache || !this[_cacheFetches]) {
-      // instead of creating a new integrity stream, we only piggyback on the
-      // provided stream's events
-      if (stream.hasIntegrityEmitter) {
-        stream.on('integrity', i => this.integrity = i)
-        return stream
-      }
-
-      const istream = ssri.integrityStream(this.opts)
-      istream.on('integrity', i => this.integrity = i)
-      stream.on('error', err => istream.emit('error', err))
-      return stream.pipe(istream)
-    }
-
-    // we have to return a stream that gets ALL the data, and proxies errors,
-    // but then pipe from the original tarball stream into the cache as well.
-    // To do this without losing any data, and since the cacache put stream
-    // is not a passthrough, we have to pipe from the original stream into
-    // the cache AFTER we pipe into the middleStream.  Since the cache stream
-    // has an asynchronous flush to write its contents to disk, we need to
-    // defer the middleStream end until the cache stream ends.
-    const middleStream = new Minipass()
-    stream.on('error', err => middleStream.emit('error', err))
-    stream.pipe(middleStream, { end: false })
-    const cstream = cacache.put.stream(
-      this.opts.cache,
-      `pacote:tarball:${this.from}`,
-      this.opts
-    )
-    cstream.on('integrity', i => this.integrity = i)
-    cstream.on('error', err => stream.emit('error', err))
-    stream.pipe(cstream)
-
-    // eslint-disable-next-line promise/catch-or-return
-    cstream.promise().catch(() => {}).then(() => middleStream.end())
-    return middleStream
-  }
-
-  pickIntegrityAlgorithm () {
-    return this.integrity ? this.integrity.pickAlgorithm(this.opts)
-      : this.defaultIntegrityAlgorithm
-  }
-
-  // TODO: check error class, once those are rolled out to our deps
-  isDataCorruptionError (er) {
-    return er.code === 'EINTEGRITY' || er.code === 'Z_DATA_ERROR'
-  }
-
-  // override the types getter
-  get types () {
-    return false
-  }
-
-  [_assertType] () {
-    if (this.types && !this.types.includes(this.spec.type)) {
-      throw new TypeError(`Wrong spec type (${
-        this.spec.type
-      }) for ${
-        this.constructor.name
-      }. Supported types: ${this.types.join(', ')}`)
-    }
-  }
-
-  // We allow ENOENTs from cacache, but not anywhere else.
-  // An ENOENT trying to read a tgz file, for example, is Right Out.
-  isRetriableError (er) {
-    // TODO: check error class, once those are rolled out to our deps
-    return this.isDataCorruptionError(er) ||
-      er.code === 'ENOENT' ||
-      er.code === 'EISDIR'
-  }
-
-  // Mostly internal, but has some uses
-  // Pass in a function which returns a promise
-  // Function will be called 1 or more times with streams that may fail.
-  // Retries:
-  // Function MUST handle errors on the stream by rejecting the promise,
-  // so that retry logic can pick it up and either retry or fail whatever
-  // promise it was making (ie, failing extraction, etc.)
-  //
-  // The return value of this method is a Promise that resolves the same
-  // as whatever the streamHandler resolves to.
-  //
-  // This should never be overridden by child classes, but it is public.
-  tarballStream (streamHandler) {
-    // Only short-circuit via cache if we have everything else we'll need,
-    // and the user has not expressed a preference for checking online.
-
-    const fromCache = (
-      !this.preferOnline &&
-      this.integrity &&
-      this.resolved
-    ) ? streamHandler(this[_tarballFromCache]()).catch(er => {
-        if (this.isDataCorruptionError(er)) {
-          log.warn('tarball', `cached data for ${
-          this.spec
-        } (${this.integrity}) seems to be corrupted. Refreshing cache.`)
-          return this.cleanupCached().then(() => {
-            throw er
-          })
-        } else {
-          throw er
-        }
-      }) : null
-
-    const fromResolved = er => {
-      if (er) {
-        if (!this.isRetriableError(er)) {
-          throw er
-        }
-        log.silly('tarball', `no local data for ${
-          this.spec
-        }. Extracting by manifest.`)
-      }
-      return this.resolve().then(() => retry(tryAgain =>
-        streamHandler(this[_istream](this[_tarballFromResolved]()))
-          .catch(streamErr => {
-          // Most likely data integrity.  A cache ENOENT error is unlikely
-          // here, since we're definitely not reading from the cache, but it
-          // IS possible that the fetch subsystem accessed the cache, and the
-          // entry got blown away or something.  Try one more time to be sure.
-            if (this.isRetriableError(streamErr)) {
-              log.warn('tarball', `tarball data for ${
-              this.spec
-            } (${this.integrity}) seems to be corrupted. Trying again.`)
-              return this.cleanupCached().then(() => tryAgain(streamErr))
-            }
-            throw streamErr
-          }), { retries: 1, minTimeout: 0, maxTimeout: 0 }))
-    }
-
-    return fromCache ? fromCache.catch(fromResolved) : fromResolved()
-  }
-
-  cleanupCached () {
-    return cacache.rm.content(this.cache, this.integrity, this.opts)
-  }
-
-  [_empty] (path) {
-    return getContents({ path, depth: 1 }).then(contents => Promise.all(
-      contents.map(entry => fs.rm(entry, { recursive: true, force: true }))))
-  }
-
-  async [_mkdir] (dest) {
-    await this[_empty](dest)
-    return await fs.mkdir(dest, { recursive: true })
-  }
-
-  // extraction is always the same.  the only difference is where
-  // the tarball comes from.
-  async extract (dest) {
-    await this[_mkdir](dest)
-    return this.tarballStream((tarball) => this[_extract](dest, tarball))
-  }
-
-  [_toFile] (dest) {
-    return this.tarballStream(str => new Promise((res, rej) => {
-      const writer = new fsm.WriteStream(dest)
-      str.on('error', er => writer.emit('error', er))
-      writer.on('error', er => rej(er))
-      writer.on('close', () => res({
-        integrity: this.integrity && String(this.integrity),
-        resolved: this.resolved,
-        from: this.from,
-      }))
-      str.pipe(writer)
-    }))
-  }
-
-  // don't use this[_mkdir] because we don't want to rimraf anything
-  async tarballFile (dest) {
-    const dir = dirname(dest)
-    await fs.mkdir(dir, { recursive: true })
-    return this[_toFile](dest)
-  }
-
-  [_extract] (dest, tarball) {
-    const extractor = tar.x(this[_tarxOptions]({ cwd: dest }))
-    const p = new Promise((resolve, reject) => {
-      extractor.on('end', () => {
-        resolve({
-          resolved: this.resolved,
-          integrity: this.integrity && String(this.integrity),
-          from: this.from,
-        })
-      })
-
-      extractor.on('error', er => {
-        log.warn('tar', er.message)
-        log.silly('tar', er)
-        reject(er)
-      })
-
-      tarball.on('error', er => reject(er))
-    })
-
-    tarball.pipe(extractor)
-    return p
-  }
-
-  // always ensure that entries are at least as permissive as our configured
-  // dmode/fmode, but never more permissive than the umask allows.
-  [_entryMode] (path, mode, type) {
-    const m = /Directory|GNUDumpDir/.test(type) ? this.dmode
-      : /File$/.test(type) ? this.fmode
-      : /* istanbul ignore next - should never happen in a pkg */ 0
-
-    // make sure package bins are executable
-    const exe = isPackageBin(this.package, path) ? 0o111 : 0
-    // always ensure that files are read/writable by the owner
-    return ((mode | m) & ~this.umask) | exe | 0o600
-  }
-
-  [_tarxOptions] ({ cwd }) {
-    const sawIgnores = new Set()
-    return {
-      cwd,
-      noChmod: true,
-      noMtime: true,
-      filter: (name, entry) => {
-        if (/Link$/.test(entry.type)) {
-          return false
-        }
-        entry.mode = this[_entryMode](entry.path, entry.mode, entry.type)
-        // this replicates the npm pack behavior where .gitignore files
-        // are treated like .npmignore files, but only if a .npmignore
-        // file is not present.
-        if (/File$/.test(entry.type)) {
-          const base = basename(entry.path)
-          if (base === '.npmignore') {
-            sawIgnores.add(entry.path)
-          } else if (base === '.gitignore' && !this.allowGitIgnore) {
-            // rename, but only if there's not already a .npmignore
-            const ni = entry.path.replace(/\.gitignore$/, '.npmignore')
-            if (sawIgnores.has(ni)) {
-              return false
-            }
-            entry.path = ni
-          }
-          return true
-        }
-      },
-      strip: 1,
-      onwarn: /* istanbul ignore next - we can trust that tar logs */
-      (code, msg, data) => {
-        log.warn('tar', code, msg)
-        log.silly('tar', code, msg, data)
-      },
-      umask: this.umask,
-      // always ignore ownership info from tarball metadata
-      preserveOwner: false,
-    }
-  }
-}
-
-module.exports = FetcherBase
-
-// Child classes
-const GitFetcher = require('./git.js')
-const RegistryFetcher = require('./registry.js')
-const FileFetcher = require('./file.js')
-const DirFetcher = require('./dir.js')
-const RemoteFetcher = require('./remote.js')
-
-// Get an appropriate fetcher object from a spec and options
-FetcherBase.get = (rawSpec, opts = {}) => {
-  const spec = npa(rawSpec, opts.where)
-  switch (spec.type) {
-    case 'git':
-      return new GitFetcher(spec, opts)
-
-    case 'remote':
-      return new RemoteFetcher(spec, opts)
-
-    case 'version':
-    case 'range':
-    case 'tag':
-    case 'alias':
-      return new RegistryFetcher(spec.subSpec || spec, opts)
-
-    case 'file':
-      return new FileFetcher(spec, opts)
-
-    case 'directory':
-      return new DirFetcher(spec, opts)
-
-    default:
-      throw new TypeError('Unknown spec type: ' + spec.type)
-  }
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js
deleted file mode 100644
index bf99bb86e359e..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js
+++ /dev/null
@@ -1,96 +0,0 @@
-const Fetcher = require('./fetcher.js')
-const fsm = require('fs-minipass')
-const cacache = require('cacache')
-const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
-const _exeBins = Symbol('_exeBins')
-const { resolve } = require('path')
-const fs = require('fs')
-const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson')
-
-class FileFetcher extends Fetcher {
-  constructor (spec, opts) {
-    super(spec, opts)
-    // just the fully resolved filename
-    this.resolved = this.spec.fetchSpec
-  }
-
-  get types () {
-    return ['file']
-  }
-
-  manifest () {
-    if (this.package) {
-      return Promise.resolve(this.package)
-    }
-
-    // have to unpack the tarball for this.
-    return cacache.tmp.withTmp(this.cache, this.opts, dir =>
-      this.extract(dir)
-        .then(() => this[_readPackageJson](dir + '/package.json'))
-        .then(mani => this.package = {
-          ...mani,
-          _integrity: this.integrity && String(this.integrity),
-          _resolved: this.resolved,
-          _from: this.from,
-        }))
-  }
-
-  [_exeBins] (pkg, dest) {
-    if (!pkg.bin) {
-      return Promise.resolve()
-    }
-
-    return Promise.all(Object.keys(pkg.bin).map(k => new Promise(res => {
-      const script = resolve(dest, pkg.bin[k])
-      // Best effort.  Ignore errors here, the only result is that
-      // a bin script is not executable.  But if it's missing or
-      // something, we just leave it for a later stage to trip over
-      // when we can provide a more useful contextual error.
-      fs.stat(script, (er, st) => {
-        if (er) {
-          return res()
-        }
-        const mode = st.mode | 0o111
-        if (mode === st.mode) {
-          return res()
-        }
-        fs.chmod(script, mode, res)
-      })
-    })))
-  }
-
-  extract (dest) {
-    // if we've already loaded the manifest, then the super got it.
-    // but if not, read the unpacked manifest and chmod properly.
-    return super.extract(dest)
-      .then(result => this.package ? result
-      : this[_readPackageJson](dest + '/package.json').then(pkg =>
-        this[_exeBins](pkg, dest)).then(() => result))
-  }
-
-  [_tarballFromResolved] () {
-    // create a read stream and return it
-    return new fsm.ReadStream(this.resolved)
-  }
-
-  packument () {
-    // simulate based on manifest
-    return this.manifest().then(mani => ({
-      name: mani.name,
-      'dist-tags': {
-        [this.defaultTag]: mani.version,
-      },
-      versions: {
-        [mani.version]: {
-          ...mani,
-          dist: {
-            tarball: `file:${this.resolved}`,
-            integrity: this.integrity && String(this.integrity),
-          },
-        },
-      },
-    }))
-  }
-}
-
-module.exports = FileFetcher
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js
deleted file mode 100644
index 5d24f72497ec9..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js
+++ /dev/null
@@ -1,327 +0,0 @@
-const Fetcher = require('./fetcher.js')
-const FileFetcher = require('./file.js')
-const RemoteFetcher = require('./remote.js')
-const DirFetcher = require('./dir.js')
-const hashre = /^[a-f0-9]{40}$/
-const git = require('@npmcli/git')
-const pickManifest = require('npm-pick-manifest')
-const npa = require('npm-package-arg')
-const { Minipass } = require('minipass')
-const cacache = require('cacache')
-const log = require('proc-log')
-const npm = require('./util/npm.js')
-
-const _resolvedFromRepo = Symbol('_resolvedFromRepo')
-const _resolvedFromHosted = Symbol('_resolvedFromHosted')
-const _resolvedFromClone = Symbol('_resolvedFromClone')
-const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
-const _addGitSha = Symbol('_addGitSha')
-const addGitSha = require('./util/add-git-sha.js')
-const _clone = Symbol('_clone')
-const _cloneHosted = Symbol('_cloneHosted')
-const _cloneRepo = Symbol('_cloneRepo')
-const _setResolvedWithSha = Symbol('_setResolvedWithSha')
-const _prepareDir = Symbol('_prepareDir')
-const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson')
-
-// get the repository url.
-// prefer https if there's auth, since ssh will drop that.
-// otherwise, prefer ssh if available (more secure).
-// We have to add the git+ back because npa suppresses it.
-const repoUrl = (h, opts) =>
-  h.sshurl && !(h.https && h.auth) && addGitPlus(h.sshurl(opts)) ||
-  h.https && addGitPlus(h.https(opts))
-
-// add git+ to the url, but only one time.
-const addGitPlus = url => url && `git+${url}`.replace(/^(git\+)+/, 'git+')
-
-class GitFetcher extends Fetcher {
-  constructor (spec, opts) {
-    super(spec, opts)
-
-    // we never want to compare integrity for git dependencies: npm/rfcs#525
-    if (this.opts.integrity) {
-      delete this.opts.integrity
-      log.warn(`skipping integrity check for git dependency ${this.spec.fetchSpec}`)
-    }
-
-    this.resolvedRef = null
-    if (this.spec.hosted) {
-      this.from = this.spec.hosted.shortcut({ noCommittish: false })
-    }
-
-    // shortcut: avoid full clone when we can go straight to the tgz
-    // if we have the full sha and it's a hosted git platform
-    if (this.spec.gitCommittish && hashre.test(this.spec.gitCommittish)) {
-      this.resolvedSha = this.spec.gitCommittish
-      // use hosted.tarball() when we shell to RemoteFetcher later
-      this.resolved = this.spec.hosted
-        ? repoUrl(this.spec.hosted, { noCommittish: false })
-        : this.spec.rawSpec
-    } else {
-      this.resolvedSha = ''
-    }
-
-    this.Arborist = opts.Arborist || null
-  }
-
-  // just exposed to make it easier to test all the combinations
-  static repoUrl (hosted, opts) {
-    return repoUrl(hosted, opts)
-  }
-
-  get types () {
-    return ['git']
-  }
-
-  resolve () {
-    // likely a hosted git repo with a sha, so get the tarball url
-    // but in general, no reason to resolve() more than necessary!
-    if (this.resolved) {
-      return super.resolve()
-    }
-
-    // fetch the git repo and then look at the current hash
-    const h = this.spec.hosted
-    // try to use ssh, fall back to git.
-    return h ? this[_resolvedFromHosted](h)
-      : this[_resolvedFromRepo](this.spec.fetchSpec)
-  }
-
-  // first try https, since that's faster and passphrase-less for
-  // public repos, and supports private repos when auth is provided.
-  // Fall back to SSH to support private repos
-  // NB: we always store the https url in resolved field if auth
-  // is present, otherwise ssh if the hosted type provides it
-  [_resolvedFromHosted] (hosted) {
-    return this[_resolvedFromRepo](hosted.https && hosted.https())
-      .catch(er => {
-        // Throw early since we know pathspec errors will fail again if retried
-        if (er instanceof git.errors.GitPathspecError) {
-          throw er
-        }
-        const ssh = hosted.sshurl && hosted.sshurl()
-        // no fallthrough if we can't fall through or have https auth
-        if (!ssh || hosted.auth) {
-          throw er
-        }
-        return this[_resolvedFromRepo](ssh)
-      })
-  }
-
-  [_resolvedFromRepo] (gitRemote) {
-    // XXX make this a custom error class
-    if (!gitRemote) {
-      return Promise.reject(new Error(`No git url for ${this.spec}`))
-    }
-    const gitRange = this.spec.gitRange
-    const name = this.spec.name
-    return git.revs(gitRemote, this.opts).then(remoteRefs => {
-      return gitRange ? pickManifest({
-        versions: remoteRefs.versions,
-        'dist-tags': remoteRefs['dist-tags'],
-        name,
-      }, gitRange, this.opts)
-        : this.spec.gitCommittish ?
-          remoteRefs.refs[this.spec.gitCommittish] ||
-          remoteRefs.refs[remoteRefs.shas[this.spec.gitCommittish]]
-          : remoteRefs.refs.HEAD // no git committish, get default head
-    }).then(revDoc => {
-      // the committish provided isn't in the rev list
-      // things like HEAD~3 or @yesterday can land here.
-      if (!revDoc || !revDoc.sha) {
-        return this[_resolvedFromClone]()
-      }
-
-      this.resolvedRef = revDoc
-      this.resolvedSha = revDoc.sha
-      this[_addGitSha](revDoc.sha)
-      return this.resolved
-    })
-  }
-
-  [_setResolvedWithSha] (withSha) {
-    // we haven't cloned, so a tgz download is still faster
-    // of course, if it's not a known host, we can't do that.
-    this.resolved = !this.spec.hosted ? withSha
-      : repoUrl(npa(withSha).hosted, { noCommittish: false })
-  }
-
-  // when we get the git sha, we affix it to our spec to build up
-  // either a git url with a hash, or a tarball download URL
-  [_addGitSha] (sha) {
-    this[_setResolvedWithSha](addGitSha(this.spec, sha))
-  }
-
-  [_resolvedFromClone] () {
-    // do a full or shallow clone, then look at the HEAD
-    // kind of wasteful, but no other option, really
-    return this[_clone](dir => this.resolved)
-  }
-
-  [_prepareDir] (dir) {
-    return this[_readPackageJson](dir + '/package.json').then(mani => {
-      // no need if we aren't going to do any preparation.
-      const scripts = mani.scripts
-      if (!mani.workspaces && (!scripts || !(
-        scripts.postinstall ||
-          scripts.build ||
-          scripts.preinstall ||
-          scripts.install ||
-          scripts.prepack ||
-          scripts.prepare))) {
-        return
-      }
-
-      // to avoid cases where we have an cycle of git deps that depend
-      // on one another, we only ever do preparation for one instance
-      // of a given git dep along the chain of installations.
-      // Note that this does mean that a dependency MAY in theory end up
-      // trying to run its prepare script using a dependency that has not
-      // been properly prepared itself, but that edge case is smaller
-      // and less hazardous than a fork bomb of npm and git commands.
-      const noPrepare = !process.env._PACOTE_NO_PREPARE_ ? []
-        : process.env._PACOTE_NO_PREPARE_.split('\n')
-      if (noPrepare.includes(this.resolved)) {
-        log.info('prepare', 'skip prepare, already seen', this.resolved)
-        return
-      }
-      noPrepare.push(this.resolved)
-
-      // the DirFetcher will do its own preparation to run the prepare scripts
-      // All we have to do is put the deps in place so that it can succeed.
-      return npm(
-        this.npmBin,
-        [].concat(this.npmInstallCmd).concat(this.npmCliConfig),
-        dir,
-        { ...process.env, _PACOTE_NO_PREPARE_: noPrepare.join('\n') },
-        { message: 'git dep preparation failed' }
-      )
-    })
-  }
-
-  [_tarballFromResolved] () {
-    const stream = new Minipass()
-    stream.resolved = this.resolved
-    stream.from = this.from
-
-    // check it out and then shell out to the DirFetcher tarball packer
-    this[_clone](dir => this[_prepareDir](dir)
-      .then(() => new Promise((res, rej) => {
-        if (!this.Arborist) {
-          throw new Error('GitFetcher requires an Arborist constructor to pack a tarball')
-        }
-        const df = new DirFetcher(`file:${dir}`, {
-          ...this.opts,
-          Arborist: this.Arborist,
-          resolved: null,
-          integrity: null,
-        })
-        const dirStream = df[_tarballFromResolved]()
-        dirStream.on('error', rej)
-        dirStream.on('end', res)
-        dirStream.pipe(stream)
-      }))).catch(
-      /* istanbul ignore next: very unlikely and hard to test */
-      er => stream.emit('error', er)
-    )
-    return stream
-  }
-
-  // clone a git repo into a temp folder (or fetch and unpack if possible)
-  // handler accepts a directory, and returns a promise that resolves
-  // when we're done with it, at which point, cacache deletes it
-  //
-  // TODO: after cloning, create a tarball of the folder, and add to the cache
-  // with cacache.put.stream(), using a key that's deterministic based on the
-  // spec and repo, so that we don't ever clone the same thing multiple times.
-  [_clone] (handler, tarballOk = true) {
-    const o = { tmpPrefix: 'git-clone' }
-    const ref = this.resolvedSha || this.spec.gitCommittish
-    const h = this.spec.hosted
-    const resolved = this.resolved
-
-    // can be set manually to false to fall back to actual git clone
-    tarballOk = tarballOk &&
-      h && resolved === repoUrl(h, { noCommittish: false }) && h.tarball
-
-    return cacache.tmp.withTmp(this.cache, o, async tmp => {
-      // if we're resolved, and have a tarball url, shell out to RemoteFetcher
-      if (tarballOk) {
-        const nameat = this.spec.name ? `${this.spec.name}@` : ''
-        return new RemoteFetcher(h.tarball({ noCommittish: false }), {
-          ...this.opts,
-          allowGitIgnore: true,
-          pkgid: `git:${nameat}${this.resolved}`,
-          resolved: this.resolved,
-          integrity: null, // it'll always be different, if we have one
-        }).extract(tmp).then(() => handler(tmp), er => {
-          // fall back to ssh download if tarball fails
-          if (er.constructor.name.match(/^Http/)) {
-            return this[_clone](handler, false)
-          } else {
-            throw er
-          }
-        })
-      }
-
-      const sha = await (
-        h ? this[_cloneHosted](ref, tmp)
-        : this[_cloneRepo](this.spec.fetchSpec, ref, tmp)
-      )
-      this.resolvedSha = sha
-      if (!this.resolved) {
-        await this[_addGitSha](sha)
-      }
-      return handler(tmp)
-    })
-  }
-
-  // first try https, since that's faster and passphrase-less for
-  // public repos, and supports private repos when auth is provided.
-  // Fall back to SSH to support private repos
-  // NB: we always store the https url in resolved field if auth
-  // is present, otherwise ssh if the hosted type provides it
-  [_cloneHosted] (ref, tmp) {
-    const hosted = this.spec.hosted
-    return this[_cloneRepo](hosted.https({ noCommittish: true }), ref, tmp)
-      .catch(er => {
-        // Throw early since we know pathspec errors will fail again if retried
-        if (er instanceof git.errors.GitPathspecError) {
-          throw er
-        }
-        const ssh = hosted.sshurl && hosted.sshurl({ noCommittish: true })
-        // no fallthrough if we can't fall through or have https auth
-        if (!ssh || hosted.auth) {
-          throw er
-        }
-        return this[_cloneRepo](ssh, ref, tmp)
-      })
-  }
-
-  [_cloneRepo] (repo, ref, tmp) {
-    const { opts, spec } = this
-    return git.clone(repo, ref, tmp, { ...opts, spec })
-  }
-
-  manifest () {
-    if (this.package) {
-      return Promise.resolve(this.package)
-    }
-
-    return this.spec.hosted && this.resolved
-      ? FileFetcher.prototype.manifest.apply(this)
-      : this[_clone](dir =>
-        this[_readPackageJson](dir + '/package.json')
-          .then(mani => this.package = {
-            ...mani,
-            _resolved: this.resolved,
-            _from: this.from,
-          }))
-  }
-
-  packument () {
-    return FileFetcher.prototype.packument.apply(this)
-  }
-}
-module.exports = GitFetcher
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js
deleted file mode 100644
index cbcbd7c92d15f..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js
+++ /dev/null
@@ -1,23 +0,0 @@
-const { get } = require('./fetcher.js')
-const GitFetcher = require('./git.js')
-const RegistryFetcher = require('./registry.js')
-const FileFetcher = require('./file.js')
-const DirFetcher = require('./dir.js')
-const RemoteFetcher = require('./remote.js')
-
-module.exports = {
-  GitFetcher,
-  RegistryFetcher,
-  FileFetcher,
-  DirFetcher,
-  RemoteFetcher,
-  resolve: (spec, opts) => get(spec, opts).resolve(),
-  extract: (spec, dest, opts) => get(spec, opts).extract(dest),
-  manifest: (spec, opts) => get(spec, opts).manifest(),
-  tarball: (spec, opts) => get(spec, opts).tarball(),
-  packument: (spec, opts) => get(spec, opts).packument(),
-}
-module.exports.tarball.stream = (spec, handler, opts) =>
-  get(spec, opts).tarballStream(handler)
-module.exports.tarball.file = (spec, dest, opts) =>
-  get(spec, opts).tarballFile(dest)
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js
deleted file mode 100644
index 34d9b2b87f3f3..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js
+++ /dev/null
@@ -1,344 +0,0 @@
-const Fetcher = require('./fetcher.js')
-const RemoteFetcher = require('./remote.js')
-const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
-const pacoteVersion = require('../package.json').version
-const removeTrailingSlashes = require('./util/trailing-slashes.js')
-const rpj = require('read-package-json-fast')
-const pickManifest = require('npm-pick-manifest')
-const ssri = require('ssri')
-const crypto = require('crypto')
-const npa = require('npm-package-arg')
-const { sigstore } = require('sigstore')
-
-// Corgis are cute. 🐕🐶
-const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*'
-const fullDoc = 'application/json'
-
-const fetch = require('npm-registry-fetch')
-
-const _headers = Symbol('_headers')
-class RegistryFetcher extends Fetcher {
-  constructor (spec, opts) {
-    super(spec, opts)
-
-    // you usually don't want to fetch the same packument multiple times in
-    // the span of a given script or command, no matter how many pacote calls
-    // are made, so this lets us avoid doing that.  It's only relevant for
-    // registry fetchers, because other types simulate their packument from
-    // the manifest, which they memoize on this.package, so it's very cheap
-    // already.
-    this.packumentCache = this.opts.packumentCache || null
-
-    this.registry = fetch.pickRegistry(spec, opts)
-    this.packumentUrl = removeTrailingSlashes(this.registry) + '/' +
-      this.spec.escapedName
-
-    const parsed = new URL(this.registry)
-    const regKey = `//${parsed.host}${parsed.pathname}`
-    // unlike the nerf-darted auth keys, this one does *not* allow a mismatch
-    // of trailing slashes.  It must match exactly.
-    if (this.opts[`${regKey}:_keys`]) {
-      this.registryKeys = this.opts[`${regKey}:_keys`]
-    }
-
-    // XXX pacote <=9 has some logic to ignore opts.resolved if
-    // the resolved URL doesn't go to the same registry.
-    // Consider reproducing that here, to throw away this.resolved
-    // in that case.
-  }
-
-  async resolve () {
-    // fetching the manifest sets resolved and (if present) integrity
-    await this.manifest()
-    if (!this.resolved) {
-      throw Object.assign(
-        new Error('Invalid package manifest: no `dist.tarball` field'),
-        { package: this.spec.toString() }
-      )
-    }
-    return this.resolved
-  }
-
-  [_headers] () {
-    return {
-      // npm will override UA, but ensure that we always send *something*
-      'user-agent': this.opts.userAgent ||
-        `pacote/${pacoteVersion} node/${process.version}`,
-      ...(this.opts.headers || {}),
-      'pacote-version': pacoteVersion,
-      'pacote-req-type': 'packument',
-      'pacote-pkg-id': `registry:${this.spec.name}`,
-      accept: this.fullMetadata ? fullDoc : corgiDoc,
-    }
-  }
-
-  async packument () {
-    // note this might be either an in-flight promise for a request,
-    // or the actual packument, but we never want to make more than
-    // one request at a time for the same thing regardless.
-    if (this.packumentCache && this.packumentCache.has(this.packumentUrl)) {
-      return this.packumentCache.get(this.packumentUrl)
-    }
-
-    // npm-registry-fetch the packument
-    // set the appropriate header for corgis if fullMetadata isn't set
-    // return the res.json() promise
-    try {
-      const res = await fetch(this.packumentUrl, {
-        ...this.opts,
-        headers: this[_headers](),
-        spec: this.spec,
-        // never check integrity for packuments themselves
-        integrity: null,
-      })
-      const packument = await res.json()
-      packument._contentLength = +res.headers.get('content-length')
-      if (this.packumentCache) {
-        this.packumentCache.set(this.packumentUrl, packument)
-      }
-      return packument
-    } catch (err) {
-      if (this.packumentCache) {
-        this.packumentCache.delete(this.packumentUrl)
-      }
-      if (err.code !== 'E404' || this.fullMetadata) {
-        throw err
-      }
-      // possible that corgis are not supported by this registry
-      this.fullMetadata = true
-      return this.packument()
-    }
-  }
-
-  async manifest () {
-    if (this.package) {
-      return this.package
-    }
-
-    const packument = await this.packument()
-    let mani = await pickManifest(packument, this.spec.fetchSpec, {
-      ...this.opts,
-      defaultTag: this.defaultTag,
-      before: this.before,
-    })
-    mani = rpj.normalize(mani)
-    /* XXX add ETARGET and E403 revalidation of cached packuments here */
-
-    // add _resolved and _integrity from dist object
-    const { dist } = mani
-    if (dist) {
-      this.resolved = mani._resolved = dist.tarball
-      mani._from = this.from
-      const distIntegrity = dist.integrity ? ssri.parse(dist.integrity)
-        : dist.shasum ? ssri.fromHex(dist.shasum, 'sha1', { ...this.opts })
-        : null
-      if (distIntegrity) {
-        if (this.integrity && !this.integrity.match(distIntegrity)) {
-          // only bork if they have algos in common.
-          // otherwise we end up breaking if we have saved a sha512
-          // previously for the tarball, but the manifest only
-          // provides a sha1, which is possible for older publishes.
-          // Otherwise, this is almost certainly a case of holding it
-          // wrong, and will result in weird or insecure behavior
-          // later on when building package tree.
-          for (const algo of Object.keys(this.integrity)) {
-            if (distIntegrity[algo]) {
-              throw Object.assign(new Error(
-                `Integrity checksum failed when using ${algo}: ` +
-                `wanted ${this.integrity} but got ${distIntegrity}.`
-              ), { code: 'EINTEGRITY' })
-            }
-          }
-        }
-        // made it this far, the integrity is worthwhile.  accept it.
-        // the setter here will take care of merging it into what we already
-        // had.
-        this.integrity = distIntegrity
-      }
-    }
-    if (this.integrity) {
-      mani._integrity = String(this.integrity)
-      if (dist.signatures) {
-        if (this.opts.verifySignatures) {
-          // validate and throw on error, then set _signatures
-          const message = `${mani._id}:${mani._integrity}`
-          for (const signature of dist.signatures) {
-            const publicKey = this.registryKeys &&
-              this.registryKeys.filter(key => (key.keyid === signature.keyid))[0]
-            if (!publicKey) {
-              throw Object.assign(new Error(
-                  `${mani._id} has a registry signature with keyid: ${signature.keyid} ` +
-                  'but no corresponding public key can be found'
-              ), { code: 'EMISSINGSIGNATUREKEY' })
-            }
-            const validPublicKey =
-              !publicKey.expires || (Date.parse(publicKey.expires) > Date.now())
-            if (!validPublicKey) {
-              throw Object.assign(new Error(
-                  `${mani._id} has a registry signature with keyid: ${signature.keyid} ` +
-                  `but the corresponding public key has expired ${publicKey.expires}`
-              ), { code: 'EEXPIREDSIGNATUREKEY' })
-            }
-            const verifier = crypto.createVerify('SHA256')
-            verifier.write(message)
-            verifier.end()
-            const valid = verifier.verify(
-              publicKey.pemkey,
-              signature.sig,
-              'base64'
-            )
-            if (!valid) {
-              throw Object.assign(new Error(
-                  `${mani._id} has an invalid registry signature with ` +
-                  `keyid: ${publicKey.keyid} and signature: ${signature.sig}`
-              ), {
-                code: 'EINTEGRITYSIGNATURE',
-                keyid: publicKey.keyid,
-                signature: signature.sig,
-                resolved: mani._resolved,
-                integrity: mani._integrity,
-              })
-            }
-          }
-          mani._signatures = dist.signatures
-        } else {
-          mani._signatures = dist.signatures
-        }
-      }
-
-      if (dist.attestations) {
-        if (this.opts.verifyAttestations) {
-          // Always fetch attestations from the current registry host
-          const attestationsPath = new URL(dist.attestations.url).pathname
-          const attestationsUrl = removeTrailingSlashes(this.registry) + attestationsPath
-          const res = await fetch(attestationsUrl, {
-            ...this.opts,
-            // disable integrity check for attestations json payload, we check the
-            // integrity in the verification steps below
-            integrity: null,
-          })
-          const { attestations } = await res.json()
-          const bundles = attestations.map(({ predicateType, bundle }) => {
-            const statement = JSON.parse(
-              Buffer.from(bundle.dsseEnvelope.payload, 'base64').toString('utf8')
-            )
-            const keyid = bundle.dsseEnvelope.signatures[0].keyid
-            const signature = bundle.dsseEnvelope.signatures[0].sig
-
-            return {
-              predicateType,
-              bundle,
-              statement,
-              keyid,
-              signature,
-            }
-          })
-
-          const attestationKeyIds = bundles.map((b) => b.keyid).filter((k) => !!k)
-          const attestationRegistryKeys = (this.registryKeys || [])
-            .filter(key => attestationKeyIds.includes(key.keyid))
-          if (!attestationRegistryKeys.length) {
-            throw Object.assign(new Error(
-              `${mani._id} has attestations but no corresponding public key(s) can be found`
-            ), { code: 'EMISSINGSIGNATUREKEY' })
-          }
-
-          for (const { predicateType, bundle, keyid, signature, statement } of bundles) {
-            const publicKey = attestationRegistryKeys.find(key => key.keyid === keyid)
-            // Publish attestations have a keyid set and a valid public key must be found
-            if (keyid) {
-              if (!publicKey) {
-                throw Object.assign(new Error(
-                  `${mani._id} has attestations with keyid: ${keyid} ` +
-                  'but no corresponding public key can be found'
-                ), { code: 'EMISSINGSIGNATUREKEY' })
-              }
-
-              const validPublicKey =
-                !publicKey.expires || (Date.parse(publicKey.expires) > Date.now())
-              if (!validPublicKey) {
-                throw Object.assign(new Error(
-                  `${mani._id} has attestations with keyid: ${keyid} ` +
-                  `but the corresponding public key has expired ${publicKey.expires}`
-                ), { code: 'EEXPIREDSIGNATUREKEY' })
-              }
-            }
-
-            const subject = {
-              name: statement.subject[0].name,
-              sha512: statement.subject[0].digest.sha512,
-            }
-
-            // Only type 'version' can be turned into a PURL
-            const purl = this.spec.type === 'version' ? npa.toPurl(this.spec) : this.spec
-            // Verify the statement subject matches the package, version
-            if (subject.name !== purl) {
-              throw Object.assign(new Error(
-                `${mani._id} package name and version (PURL): ${purl} ` +
-                `doesn't match what was signed: ${subject.name}`
-              ), { code: 'EATTESTATIONSUBJECT' })
-            }
-
-            // Verify the statement subject matches the tarball integrity
-            const integrityHexDigest = ssri.parse(this.integrity).hexDigest()
-            if (subject.sha512 !== integrityHexDigest) {
-              throw Object.assign(new Error(
-                `${mani._id} package integrity (hex digest): ` +
-                `${integrityHexDigest} ` +
-                `doesn't match what was signed: ${subject.sha512}`
-              ), { code: 'EATTESTATIONSUBJECT' })
-            }
-
-            try {
-              // Provenance attestations are signed with a signing certificate
-              // (including the key) so we don't need to return a public key.
-              //
-              // Publish attestations are signed with a keyid so we need to
-              // specify a public key from the keys endpoint: `registry-host.tld/-/npm/v1/keys`
-              const options = {
-                tufCachePath: this.tufCache,
-                keySelector: publicKey ? () => publicKey.pemkey : undefined,
-              }
-              await sigstore.verify(bundle, null, options)
-            } catch (e) {
-              throw Object.assign(new Error(
-                `${mani._id} failed to verify attestation: ${e.message}`
-              ), {
-                code: 'EATTESTATIONVERIFY',
-                predicateType,
-                keyid,
-                signature,
-                resolved: mani._resolved,
-                integrity: mani._integrity,
-              })
-            }
-          }
-          mani._attestations = dist.attestations
-        } else {
-          mani._attestations = dist.attestations
-        }
-      }
-    }
-
-    this.package = mani
-    return this.package
-  }
-
-  [_tarballFromResolved] () {
-    // we use a RemoteFetcher to get the actual tarball stream
-    return new RemoteFetcher(this.resolved, {
-      ...this.opts,
-      resolved: this.resolved,
-      pkgid: `registry:${this.spec.name}@${this.resolved}`,
-    })[_tarballFromResolved]()
-  }
-
-  get types () {
-    return [
-      'tag',
-      'version',
-      'range',
-    ]
-  }
-}
-module.exports = RegistryFetcher
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js
deleted file mode 100644
index fd617459fb031..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js
+++ /dev/null
@@ -1,91 +0,0 @@
-const Fetcher = require('./fetcher.js')
-const FileFetcher = require('./file.js')
-const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
-const pacoteVersion = require('../package.json').version
-const fetch = require('npm-registry-fetch')
-const { Minipass } = require('minipass')
-
-const _cacheFetches = Symbol.for('pacote.Fetcher._cacheFetches')
-const _headers = Symbol('_headers')
-class RemoteFetcher extends Fetcher {
-  constructor (spec, opts) {
-    super(spec, opts)
-    this.resolved = this.spec.fetchSpec
-    const resolvedURL = new URL(this.resolved)
-    if (this.replaceRegistryHost !== 'never'
-      && (this.replaceRegistryHost === 'always'
-      || this.replaceRegistryHost === resolvedURL.host)) {
-      this.resolved = new URL(resolvedURL.pathname, this.registry).href
-    }
-
-    // nam is a fermented pork sausage that is good to eat
-    const nameat = this.spec.name ? `${this.spec.name}@` : ''
-    this.pkgid = opts.pkgid ? opts.pkgid : `remote:${nameat}${this.resolved}`
-  }
-
-  // Don't need to cache tarball fetches in pacote, because make-fetch-happen
-  // will write into cacache anyway.
-  get [_cacheFetches] () {
-    return false
-  }
-
-  [_tarballFromResolved] () {
-    const stream = new Minipass()
-    stream.hasIntegrityEmitter = true
-
-    const fetchOpts = {
-      ...this.opts,
-      headers: this[_headers](),
-      spec: this.spec,
-      integrity: this.integrity,
-      algorithms: [this.pickIntegrityAlgorithm()],
-    }
-
-    // eslint-disable-next-line promise/always-return
-    fetch(this.resolved, fetchOpts).then(res => {
-      res.body.on('error',
-        /* istanbul ignore next - exceedingly rare and hard to simulate */
-        er => stream.emit('error', er)
-      )
-
-      res.body.on('integrity', i => {
-        this.integrity = i
-        stream.emit('integrity', i)
-      })
-
-      res.body.pipe(stream)
-    }).catch(er => stream.emit('error', er))
-
-    return stream
-  }
-
-  [_headers] () {
-    return {
-      // npm will override this, but ensure that we always send *something*
-      'user-agent': this.opts.userAgent ||
-        `pacote/${pacoteVersion} node/${process.version}`,
-      ...(this.opts.headers || {}),
-      'pacote-version': pacoteVersion,
-      'pacote-req-type': 'tarball',
-      'pacote-pkg-id': this.pkgid,
-      ...(this.integrity ? { 'pacote-integrity': String(this.integrity) }
-      : {}),
-      ...(this.opts.headers || {}),
-    }
-  }
-
-  get types () {
-    return ['remote']
-  }
-
-  // getting a packument and/or manifest is the same as with a file: spec.
-  // unpack the tarball stream, and then read from the package.json file.
-  packument () {
-    return FileFetcher.prototype.packument.apply(this)
-  }
-
-  manifest () {
-    return FileFetcher.prototype.manifest.apply(this)
-  }
-}
-module.exports = RemoteFetcher
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js
deleted file mode 100644
index 843fe5b600caf..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js
+++ /dev/null
@@ -1,15 +0,0 @@
-// add a sha to a git remote url spec
-const addGitSha = (spec, sha) => {
-  if (spec.hosted) {
-    const h = spec.hosted
-    const opt = { noCommittish: true }
-    const base = h.https && h.auth ? h.https(opt) : h.shortcut(opt)
-
-    return `${base}#${sha}`
-  } else {
-    // don't use new URL for this, because it doesn't handle scp urls
-    return spec.rawSpec.replace(/#.*$/, '') + `#${sha}`
-  }
-}
-
-module.exports = addGitSha
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js
deleted file mode 100644
index ac83b1793f199..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js
+++ /dev/null
@@ -1,15 +0,0 @@
-const os = require('os')
-const { resolve } = require('path')
-
-module.exports = (fakePlatform = false) => {
-  const temp = os.tmpdir()
-  const uidOrPid = process.getuid ? process.getuid() : process.pid
-  const home = os.homedir() || resolve(temp, 'npm-' + uidOrPid)
-  const platform = fakePlatform || process.platform
-  const cacheExtra = platform === 'win32' ? 'npm-cache' : '.npm'
-  const cacheRoot = (platform === 'win32' && process.env.LOCALAPPDATA) || home
-  return {
-    cacache: resolve(cacheRoot, cacheExtra, '_cacache'),
-    tufcache: resolve(cacheRoot, cacheExtra, '_tuf'),
-  }
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js
deleted file mode 100644
index 49a3f73f537ce..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js
+++ /dev/null
@@ -1,25 +0,0 @@
-// Function to determine whether a path is in the package.bin set.
-// Used to prevent issues when people publish a package from a
-// windows machine, and then install with --no-bin-links.
-//
-// Note: this is not possible in remote or file fetchers, since
-// we don't have the manifest until AFTER we've unpacked.  But the
-// main use case is registry fetching with git a distant second,
-// so that's an acceptable edge case to not handle.
-
-const binObj = (name, bin) =>
-  typeof bin === 'string' ? { [name]: bin } : bin
-
-const hasBin = (pkg, path) => {
-  const bin = binObj(pkg.name, pkg.bin)
-  const p = path.replace(/^[^\\/]*\//, '')
-  for (const kv of Object.entries(bin)) {
-    if (kv[1] === p) {
-      return true
-    }
-  }
-  return false
-}
-
-module.exports = (pkg, path) =>
-  pkg && pkg.bin ? hasBin(pkg, path) : false
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js
deleted file mode 100644
index a3005c255565f..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js
+++ /dev/null
@@ -1,14 +0,0 @@
-// run an npm command
-const spawn = require('@npmcli/promise-spawn')
-
-module.exports = (npmBin, npmCommand, cwd, env, extra) => {
-  const isJS = npmBin.endsWith('.js')
-  const cmd = isJS ? process.execPath : npmBin
-  const args = (isJS ? [npmBin] : []).concat(npmCommand)
-  // when installing to run the `prepare` script for a git dep, we need
-  // to ensure that we don't run into a cycle of checking out packages
-  // in temp directories.  this lets us link previously-seen repos that
-  // are also being prepared.
-
-  return spawn(cmd, args, { cwd, env }, extra)
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js
deleted file mode 100644
index d070f0f7ba2d4..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js
+++ /dev/null
@@ -1,31 +0,0 @@
-const isPackageBin = require('./is-package-bin.js')
-
-const tarCreateOptions = manifest => ({
-  cwd: manifest._resolved,
-  prefix: 'package/',
-  portable: true,
-  gzip: {
-    // forcing the level to 9 seems to avoid some
-    // platform specific optimizations that cause
-    // integrity mismatch errors due to differing
-    // end results after compression
-    level: 9,
-  },
-
-  // ensure that package bins are always executable
-  // Note that npm-packlist is already filtering out
-  // anything that is not a regular file, ignored by
-  // .npmignore or package.json "files", etc.
-  filter: (path, stat) => {
-    if (isPackageBin(manifest, path)) {
-      stat.mode |= 0o111
-    }
-    return true
-  },
-
-  // Provide a specific date in the 1980s for the benefit of zip,
-  // which is confounded by files dated at the Unix epoch 0.
-  mtime: new Date('1985-10-26T08:15:00.000Z'),
-})
-
-module.exports = tarCreateOptions
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js
deleted file mode 100644
index c50cb6173b92e..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js
+++ /dev/null
@@ -1,10 +0,0 @@
-const removeTrailingSlashes = (input) => {
-  // in order to avoid regexp redos detection
-  let output = input
-  while (output.endsWith('/')) {
-    output = output.slice(0, -1)
-  }
-  return output
-}
-
-module.exports = removeTrailingSlashes
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json
deleted file mode 100644
index d9119065bfc3d..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json
+++ /dev/null
@@ -1,85 +0,0 @@
-{
-  "name": "pacote",
-  "version": "16.0.0",
-  "description": "JavaScript package downloader",
-  "author": "GitHub Inc.",
-  "bin": {
-    "pacote": "lib/bin.js"
-  },
-  "license": "ISC",
-  "main": "lib/index.js",
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "lint": "eslint \"**/*.js\"",
-    "postlint": "template-oss-check",
-    "lintfix": "npm run lint -- --fix",
-    "posttest": "npm run lint",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "tap": {
-    "timeout": 300,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "devDependencies": {
-    "@npmcli/arborist": "^6.0.0 || ^6.0.0-pre.0",
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.18.0",
-    "hosted-git-info": "^6.0.0",
-    "mutate-fs": "^2.1.1",
-    "nock": "^13.2.4",
-    "npm-registry-mock": "^1.3.2",
-    "tap": "^16.0.1"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "keywords": [
-    "packages",
-    "npm",
-    "git"
-  ],
-  "dependencies": {
-    "@npmcli/git": "^4.0.0",
-    "@npmcli/installed-package-contents": "^2.0.1",
-    "@npmcli/promise-spawn": "^6.0.1",
-    "@npmcli/run-script": "^6.0.0",
-    "cacache": "^17.0.0",
-    "fs-minipass": "^3.0.0",
-    "minipass": "^7.0.2",
-    "npm-package-arg": "^10.0.0",
-    "npm-packlist": "^7.0.0",
-    "npm-pick-manifest": "^8.0.0",
-    "npm-registry-fetch": "^15.0.0",
-    "proc-log": "^3.0.0",
-    "promise-retry": "^2.0.1",
-    "read-package-json": "^6.0.0",
-    "read-package-json-fast": "^3.0.0",
-    "sigstore": "^1.3.0",
-    "ssri": "^10.0.0",
-    "tar": "^6.1.11"
-  },
-  "engines": {
-    "node": "^16.13.0 || >=18.0.0"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/pacote.git"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "ciVersions": [
-      "16.13.0",
-      "16.x",
-      "18.0.0",
-      "18.x"
-    ],
-    "version": "4.18.0",
-    "windowsCI": false,
-    "publish": "true"
-  }
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/package.json b/node_modules/@npmcli/metavuln-calculator/package.json
index 8db917ab9524e..4d0af031d5414 100644
--- a/node_modules/@npmcli/metavuln-calculator/package.json
+++ b/node_modules/@npmcli/metavuln-calculator/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@npmcli/metavuln-calculator",
-  "version": "6.0.1",
+  "version": "7.0.0",
   "main": "lib/index.js",
   "files": [
     "bin/",
@@ -39,20 +39,20 @@
     "tap": "^16.0.1"
   },
   "dependencies": {
-    "cacache": "^17.0.0",
+    "cacache": "^18.0.0",
     "json-parse-even-better-errors": "^3.0.0",
-    "pacote": "^16.0.0",
+    "pacote": "^17.0.0",
     "semver": "^7.3.5"
   },
   "engines": {
-    "node": "^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "version": "4.18.0",
     "publish": "true",
     "ciVersions": [
-      "16.13.0",
+      "16.14.0",
       "16.x",
       "18.0.0",
       "18.x"
diff --git a/node_modules/normalize-package-data/LICENSE b/node_modules/normalize-package-data/LICENSE
deleted file mode 100644
index 19d1364a8ac08..0000000000000
--- a/node_modules/normalize-package-data/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-This package contains code originally written by Isaac Z. Schlueter.
-Used with permission.
-
-Copyright (c) Meryn Stol ("Author")
-All rights reserved.
-
-The BSD License
-
-Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
-
-1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/node_modules/normalize-package-data/lib/extract_description.js b/node_modules/normalize-package-data/lib/extract_description.js
deleted file mode 100644
index 631966b5f29af..0000000000000
--- a/node_modules/normalize-package-data/lib/extract_description.js
+++ /dev/null
@@ -1,24 +0,0 @@
-module.exports = extractDescription
-
-// Extracts description from contents of a readme file in markdown format
-function extractDescription (d) {
-  if (!d) {
-    return
-  }
-  if (d === 'ERROR: No README data found!') {
-    return
-  }
-  // the first block of text before the first heading
-  // that isn't the first line heading
-  d = d.trim().split('\n')
-  let s = 0
-  while (d[s] && d[s].trim().match(/^(#|$)/)) {
-    s++
-  }
-  const l = d.length
-  let e = s + 1
-  while (e < l && d[e].trim()) {
-    e++
-  }
-  return d.slice(s, e).join(' ').trim()
-}
diff --git a/node_modules/normalize-package-data/lib/fixer.js b/node_modules/normalize-package-data/lib/fixer.js
deleted file mode 100644
index bb78231d83ca9..0000000000000
--- a/node_modules/normalize-package-data/lib/fixer.js
+++ /dev/null
@@ -1,475 +0,0 @@
-var isValidSemver = require('semver/functions/valid')
-var cleanSemver = require('semver/functions/clean')
-var validateLicense = require('validate-npm-package-license')
-var hostedGitInfo = require('hosted-git-info')
-var isBuiltinModule = require('is-core-module')
-var depTypes = ['dependencies', 'devDependencies', 'optionalDependencies']
-var extractDescription = require('./extract_description')
-var url = require('url')
-var typos = require('./typos.json')
-
-var isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.'))
-
-module.exports = {
-  // default warning function
-  warn: function () {},
-
-  fixRepositoryField: function (data) {
-    if (data.repositories) {
-      this.warn('repositories')
-      data.repository = data.repositories[0]
-    }
-    if (!data.repository) {
-      return this.warn('missingRepository')
-    }
-    if (typeof data.repository === 'string') {
-      data.repository = {
-        type: 'git',
-        url: data.repository,
-      }
-    }
-    var r = data.repository.url || ''
-    if (r) {
-      var hosted = hostedGitInfo.fromUrl(r)
-      if (hosted) {
-        r = data.repository.url
-          = hosted.getDefaultRepresentation() === 'shortcut' ? hosted.https() : hosted.toString()
-      }
-    }
-
-    if (r.match(/github.com\/[^/]+\/[^/]+\.git\.git$/)) {
-      this.warn('brokenGitUrl', r)
-    }
-  },
-
-  fixTypos: function (data) {
-    Object.keys(typos.topLevel).forEach(function (d) {
-      if (Object.prototype.hasOwnProperty.call(data, d)) {
-        this.warn('typo', d, typos.topLevel[d])
-      }
-    }, this)
-  },
-
-  fixScriptsField: function (data) {
-    if (!data.scripts) {
-      return
-    }
-    if (typeof data.scripts !== 'object') {
-      this.warn('nonObjectScripts')
-      delete data.scripts
-      return
-    }
-    Object.keys(data.scripts).forEach(function (k) {
-      if (typeof data.scripts[k] !== 'string') {
-        this.warn('nonStringScript')
-        delete data.scripts[k]
-      } else if (typos.script[k] && !data.scripts[typos.script[k]]) {
-        this.warn('typo', k, typos.script[k], 'scripts')
-      }
-    }, this)
-  },
-
-  fixFilesField: function (data) {
-    var files = data.files
-    if (files && !Array.isArray(files)) {
-      this.warn('nonArrayFiles')
-      delete data.files
-    } else if (data.files) {
-      data.files = data.files.filter(function (file) {
-        if (!file || typeof file !== 'string') {
-          this.warn('invalidFilename', file)
-          return false
-        } else {
-          return true
-        }
-      }, this)
-    }
-  },
-
-  fixBinField: function (data) {
-    if (!data.bin) {
-      return
-    }
-    if (typeof data.bin === 'string') {
-      var b = {}
-      var match
-      if (match = data.name.match(/^@[^/]+[/](.*)$/)) {
-        b[match[1]] = data.bin
-      } else {
-        b[data.name] = data.bin
-      }
-      data.bin = b
-    }
-  },
-
-  fixManField: function (data) {
-    if (!data.man) {
-      return
-    }
-    if (typeof data.man === 'string') {
-      data.man = [data.man]
-    }
-  },
-  fixBundleDependenciesField: function (data) {
-    var bdd = 'bundledDependencies'
-    var bd = 'bundleDependencies'
-    if (data[bdd] && !data[bd]) {
-      data[bd] = data[bdd]
-      delete data[bdd]
-    }
-    if (data[bd] && !Array.isArray(data[bd])) {
-      this.warn('nonArrayBundleDependencies')
-      delete data[bd]
-    } else if (data[bd]) {
-      data[bd] = data[bd].filter(function (filtered) {
-        if (!filtered || typeof filtered !== 'string') {
-          this.warn('nonStringBundleDependency', filtered)
-          return false
-        } else {
-          if (!data.dependencies) {
-            data.dependencies = {}
-          }
-          if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) {
-            this.warn('nonDependencyBundleDependency', filtered)
-            data.dependencies[filtered] = '*'
-          }
-          return true
-        }
-      }, this)
-    }
-  },
-
-  fixDependencies: function (data, strict) {
-    objectifyDeps(data, this.warn)
-    addOptionalDepsToDeps(data, this.warn)
-    this.fixBundleDependenciesField(data)
-
-    ;['dependencies', 'devDependencies'].forEach(function (deps) {
-      if (!(deps in data)) {
-        return
-      }
-      if (!data[deps] || typeof data[deps] !== 'object') {
-        this.warn('nonObjectDependencies', deps)
-        delete data[deps]
-        return
-      }
-      Object.keys(data[deps]).forEach(function (d) {
-        var r = data[deps][d]
-        if (typeof r !== 'string') {
-          this.warn('nonStringDependency', d, JSON.stringify(r))
-          delete data[deps][d]
-        }
-        var hosted = hostedGitInfo.fromUrl(data[deps][d])
-        if (hosted) {
-          data[deps][d] = hosted.toString()
-        }
-      }, this)
-    }, this)
-  },
-
-  fixModulesField: function (data) {
-    if (data.modules) {
-      this.warn('deprecatedModules')
-      delete data.modules
-    }
-  },
-
-  fixKeywordsField: function (data) {
-    if (typeof data.keywords === 'string') {
-      data.keywords = data.keywords.split(/,\s+/)
-    }
-    if (data.keywords && !Array.isArray(data.keywords)) {
-      delete data.keywords
-      this.warn('nonArrayKeywords')
-    } else if (data.keywords) {
-      data.keywords = data.keywords.filter(function (kw) {
-        if (typeof kw !== 'string' || !kw) {
-          this.warn('nonStringKeyword')
-          return false
-        } else {
-          return true
-        }
-      }, this)
-    }
-  },
-
-  fixVersionField: function (data, strict) {
-    // allow "loose" semver 1.0 versions in non-strict mode
-    // enforce strict semver 2.0 compliance in strict mode
-    var loose = !strict
-    if (!data.version) {
-      data.version = ''
-      return true
-    }
-    if (!isValidSemver(data.version, loose)) {
-      throw new Error('Invalid version: "' + data.version + '"')
-    }
-    data.version = cleanSemver(data.version, loose)
-    return true
-  },
-
-  fixPeople: function (data) {
-    modifyPeople(data, unParsePerson)
-    modifyPeople(data, parsePerson)
-  },
-
-  fixNameField: function (data, options) {
-    if (typeof options === 'boolean') {
-      options = { strict: options }
-    } else if (typeof options === 'undefined') {
-      options = {}
-    }
-    var strict = options.strict
-    if (!data.name && !strict) {
-      data.name = ''
-      return
-    }
-    if (typeof data.name !== 'string') {
-      throw new Error('name field must be a string.')
-    }
-    if (!strict) {
-      data.name = data.name.trim()
-    }
-    ensureValidName(data.name, strict, options.allowLegacyCase)
-    if (isBuiltinModule(data.name)) {
-      this.warn('conflictingName', data.name)
-    }
-  },
-
-  fixDescriptionField: function (data) {
-    if (data.description && typeof data.description !== 'string') {
-      this.warn('nonStringDescription')
-      delete data.description
-    }
-    if (data.readme && !data.description) {
-      data.description = extractDescription(data.readme)
-    }
-    if (data.description === undefined) {
-      delete data.description
-    }
-    if (!data.description) {
-      this.warn('missingDescription')
-    }
-  },
-
-  fixReadmeField: function (data) {
-    if (!data.readme) {
-      this.warn('missingReadme')
-      data.readme = 'ERROR: No README data found!'
-    }
-  },
-
-  fixBugsField: function (data) {
-    if (!data.bugs && data.repository && data.repository.url) {
-      var hosted = hostedGitInfo.fromUrl(data.repository.url)
-      if (hosted && hosted.bugs()) {
-        data.bugs = { url: hosted.bugs() }
-      }
-    } else if (data.bugs) {
-      if (typeof data.bugs === 'string') {
-        if (isEmail(data.bugs)) {
-          data.bugs = { email: data.bugs }
-        /* eslint-disable-next-line node/no-deprecated-api */
-        } else if (url.parse(data.bugs).protocol) {
-          data.bugs = { url: data.bugs }
-        } else {
-          this.warn('nonEmailUrlBugsString')
-        }
-      } else {
-        bugsTypos(data.bugs, this.warn)
-        var oldBugs = data.bugs
-        data.bugs = {}
-        if (oldBugs.url) {
-          /* eslint-disable-next-line node/no-deprecated-api */
-          if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) {
-            data.bugs.url = oldBugs.url
-          } else {
-            this.warn('nonUrlBugsUrlField')
-          }
-        }
-        if (oldBugs.email) {
-          if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) {
-            data.bugs.email = oldBugs.email
-          } else {
-            this.warn('nonEmailBugsEmailField')
-          }
-        }
-      }
-      if (!data.bugs.email && !data.bugs.url) {
-        delete data.bugs
-        this.warn('emptyNormalizedBugs')
-      }
-    }
-  },
-
-  fixHomepageField: function (data) {
-    if (!data.homepage && data.repository && data.repository.url) {
-      var hosted = hostedGitInfo.fromUrl(data.repository.url)
-      if (hosted && hosted.docs()) {
-        data.homepage = hosted.docs()
-      }
-    }
-    if (!data.homepage) {
-      return
-    }
-
-    if (typeof data.homepage !== 'string') {
-      this.warn('nonUrlHomepage')
-      return delete data.homepage
-    }
-    /* eslint-disable-next-line node/no-deprecated-api */
-    if (!url.parse(data.homepage).protocol) {
-      data.homepage = 'http://' + data.homepage
-    }
-  },
-
-  fixLicenseField: function (data) {
-    const license = data.license || data.licence
-    if (!license) {
-      return this.warn('missingLicense')
-    }
-    if (
-      typeof (license) !== 'string' ||
-      license.length < 1 ||
-      license.trim() === ''
-    ) {
-      return this.warn('invalidLicense')
-    }
-    if (!validateLicense(license).validForNewPackages) {
-      return this.warn('invalidLicense')
-    }
-  },
-}
-
-function isValidScopedPackageName (spec) {
-  if (spec.charAt(0) !== '@') {
-    return false
-  }
-
-  var rest = spec.slice(1).split('/')
-  if (rest.length !== 2) {
-    return false
-  }
-
-  return rest[0] && rest[1] &&
-    rest[0] === encodeURIComponent(rest[0]) &&
-    rest[1] === encodeURIComponent(rest[1])
-}
-
-function isCorrectlyEncodedName (spec) {
-  return !spec.match(/[/@\s+%:]/) &&
-    spec === encodeURIComponent(spec)
-}
-
-function ensureValidName (name, strict, allowLegacyCase) {
-  if (name.charAt(0) === '.' ||
-      !(isValidScopedPackageName(name) || isCorrectlyEncodedName(name)) ||
-      (strict && (!allowLegacyCase) && name !== name.toLowerCase()) ||
-      name.toLowerCase() === 'node_modules' ||
-      name.toLowerCase() === 'favicon.ico') {
-    throw new Error('Invalid name: ' + JSON.stringify(name))
-  }
-}
-
-function modifyPeople (data, fn) {
-  if (data.author) {
-    data.author = fn(data.author)
-  }['maintainers', 'contributors'].forEach(function (set) {
-    if (!Array.isArray(data[set])) {
-      return
-    }
-    data[set] = data[set].map(fn)
-  })
-  return data
-}
-
-function unParsePerson (person) {
-  if (typeof person === 'string') {
-    return person
-  }
-  var name = person.name || ''
-  var u = person.url || person.web
-  var wrappedUrl = u ? (' (' + u + ')') : ''
-  var e = person.email || person.mail
-  var wrappedEmail = e ? (' <' + e + '>') : ''
-  return name + wrappedEmail + wrappedUrl
-}
-
-function parsePerson (person) {
-  if (typeof person !== 'string') {
-    return person
-  }
-  var matchedName = person.match(/^([^(<]+)/)
-  var matchedUrl = person.match(/\(([^()]+)\)/)
-  var matchedEmail = person.match(/<([^<>]+)>/)
-  var obj = {}
-  if (matchedName && matchedName[0].trim()) {
-    obj.name = matchedName[0].trim()
-  }
-  if (matchedEmail) {
-    obj.email = matchedEmail[1]
-  }
-  if (matchedUrl) {
-    obj.url = matchedUrl[1]
-  }
-  return obj
-}
-
-function addOptionalDepsToDeps (data, warn) {
-  var o = data.optionalDependencies
-  if (!o) {
-    return
-  }
-  var d = data.dependencies || {}
-  Object.keys(o).forEach(function (k) {
-    d[k] = o[k]
-  })
-  data.dependencies = d
-}
-
-function depObjectify (deps, type, warn) {
-  if (!deps) {
-    return {}
-  }
-  if (typeof deps === 'string') {
-    deps = deps.trim().split(/[\n\r\s\t ,]+/)
-  }
-  if (!Array.isArray(deps)) {
-    return deps
-  }
-  warn('deprecatedArrayDependencies', type)
-  var o = {}
-  deps.filter(function (d) {
-    return typeof d === 'string'
-  }).forEach(function (d) {
-    d = d.trim().split(/(:?[@\s><=])/)
-    var dn = d.shift()
-    var dv = d.join('')
-    dv = dv.trim()
-    dv = dv.replace(/^@/, '')
-    o[dn] = dv
-  })
-  return o
-}
-
-function objectifyDeps (data, warn) {
-  depTypes.forEach(function (type) {
-    if (!data[type]) {
-      return
-    }
-    data[type] = depObjectify(data[type], type, warn)
-  })
-}
-
-function bugsTypos (bugs, warn) {
-  if (!bugs) {
-    return
-  }
-  Object.keys(bugs).forEach(function (k) {
-    if (typos.bugs[k]) {
-      warn('typo', k, typos.bugs[k], 'bugs')
-      bugs[typos.bugs[k]] = bugs[k]
-      delete bugs[k]
-    }
-  })
-}
diff --git a/node_modules/normalize-package-data/lib/make_warning.js b/node_modules/normalize-package-data/lib/make_warning.js
deleted file mode 100644
index 3be9c86539952..0000000000000
--- a/node_modules/normalize-package-data/lib/make_warning.js
+++ /dev/null
@@ -1,22 +0,0 @@
-var util = require('util')
-var messages = require('./warning_messages.json')
-
-module.exports = function () {
-  var args = Array.prototype.slice.call(arguments, 0)
-  var warningName = args.shift()
-  if (warningName === 'typo') {
-    return makeTypoWarning.apply(null, args)
-  } else {
-    var msgTemplate = messages[warningName] ? messages[warningName] : warningName + ": '%s'"
-    args.unshift(msgTemplate)
-    return util.format.apply(null, args)
-  }
-}
-
-function makeTypoWarning (providedName, probableName, field) {
-  if (field) {
-    providedName = field + "['" + providedName + "']"
-    probableName = field + "['" + probableName + "']"
-  }
-  return util.format(messages.typo, providedName, probableName)
-}
diff --git a/node_modules/normalize-package-data/lib/normalize.js b/node_modules/normalize-package-data/lib/normalize.js
deleted file mode 100644
index bf71d2c1e2235..0000000000000
--- a/node_modules/normalize-package-data/lib/normalize.js
+++ /dev/null
@@ -1,48 +0,0 @@
-module.exports = normalize
-
-var fixer = require('./fixer')
-normalize.fixer = fixer
-
-var makeWarning = require('./make_warning')
-
-var fieldsToFix = ['name', 'version', 'description', 'repository', 'modules', 'scripts',
-  'files', 'bin', 'man', 'bugs', 'keywords', 'readme', 'homepage', 'license']
-var otherThingsToFix = ['dependencies', 'people', 'typos']
-
-var thingsToFix = fieldsToFix.map(function (fieldName) {
-  return ucFirst(fieldName) + 'Field'
-})
-// two ways to do this in CoffeeScript on only one line, sub-70 chars:
-// thingsToFix = fieldsToFix.map (name) -> ucFirst(name) + "Field"
-// thingsToFix = (ucFirst(name) + "Field" for name in fieldsToFix)
-thingsToFix = thingsToFix.concat(otherThingsToFix)
-
-function normalize (data, warn, strict) {
-  if (warn === true) {
-    warn = null
-    strict = true
-  }
-  if (!strict) {
-    strict = false
-  }
-  if (!warn || data.private) {
-    warn = function (msg) { /* noop */ }
-  }
-
-  if (data.scripts &&
-      data.scripts.install === 'node-gyp rebuild' &&
-      !data.scripts.preinstall) {
-    data.gypfile = true
-  }
-  fixer.warn = function () {
-    warn(makeWarning.apply(null, arguments))
-  }
-  thingsToFix.forEach(function (thingName) {
-    fixer['fix' + ucFirst(thingName)](data, strict)
-  })
-  data._id = data.name + '@' + data.version
-}
-
-function ucFirst (string) {
-  return string.charAt(0).toUpperCase() + string.slice(1)
-}
diff --git a/node_modules/normalize-package-data/lib/safe_format.js b/node_modules/normalize-package-data/lib/safe_format.js
deleted file mode 100644
index 5fc888e5450cd..0000000000000
--- a/node_modules/normalize-package-data/lib/safe_format.js
+++ /dev/null
@@ -1,11 +0,0 @@
-var util = require('util')
-
-module.exports = function () {
-  var args = Array.prototype.slice.call(arguments, 0)
-  args.forEach(function (arg) {
-    if (!arg) {
-      throw new TypeError('Bad arguments.')
-    }
-  })
-  return util.format.apply(null, arguments)
-}
diff --git a/node_modules/normalize-package-data/lib/typos.json b/node_modules/normalize-package-data/lib/typos.json
deleted file mode 100644
index 7f9dd283b30ff..0000000000000
--- a/node_modules/normalize-package-data/lib/typos.json
+++ /dev/null
@@ -1,25 +0,0 @@
-{
-  "topLevel": {
-    "dependancies": "dependencies"
-   ,"dependecies": "dependencies"
-   ,"depdenencies": "dependencies"
-   ,"devEependencies": "devDependencies"
-   ,"depends": "dependencies"
-   ,"dev-dependencies": "devDependencies"
-   ,"devDependences": "devDependencies"
-   ,"devDepenencies": "devDependencies"
-   ,"devdependencies": "devDependencies"
-   ,"repostitory": "repository"
-   ,"repo": "repository"
-   ,"prefereGlobal": "preferGlobal"
-   ,"hompage": "homepage"
-   ,"hampage": "homepage"
-   ,"autohr": "author"
-   ,"autor": "author"
-   ,"contributers": "contributors"
-   ,"publicationConfig": "publishConfig"
-   ,"script": "scripts"
-  },
-  "bugs": { "web": "url", "name": "url" },
-  "script": { "server": "start", "tests": "test" }
-}
diff --git a/node_modules/normalize-package-data/lib/warning_messages.json b/node_modules/normalize-package-data/lib/warning_messages.json
deleted file mode 100644
index 4890f506ed965..0000000000000
--- a/node_modules/normalize-package-data/lib/warning_messages.json
+++ /dev/null
@@ -1,30 +0,0 @@
-{
-  "repositories": "'repositories' (plural) Not supported. Please pick one as the 'repository' field"
-  ,"missingRepository": "No repository field."
-  ,"brokenGitUrl": "Probably broken git url: %s"
-  ,"nonObjectScripts": "scripts must be an object"
-  ,"nonStringScript": "script values must be string commands"
-  ,"nonArrayFiles": "Invalid 'files' member"
-  ,"invalidFilename": "Invalid filename in 'files' list: %s"
-  ,"nonArrayBundleDependencies": "Invalid 'bundleDependencies' list. Must be array of package names"
-  ,"nonStringBundleDependency": "Invalid bundleDependencies member: %s"
-  ,"nonDependencyBundleDependency": "Non-dependency in bundleDependencies: %s"
-  ,"nonObjectDependencies": "%s field must be an object"
-  ,"nonStringDependency": "Invalid dependency: %s %s"
-  ,"deprecatedArrayDependencies": "specifying %s as array is deprecated"
-  ,"deprecatedModules": "modules field is deprecated"
-  ,"nonArrayKeywords": "keywords should be an array of strings"
-  ,"nonStringKeyword": "keywords should be an array of strings"
-  ,"conflictingName": "%s is also the name of a node core module."
-  ,"nonStringDescription": "'description' field should be a string"
-  ,"missingDescription": "No description"
-  ,"missingReadme": "No README data"
-  ,"missingLicense": "No license field."
-  ,"nonEmailUrlBugsString": "Bug string field must be url, email, or {email,url}"
-  ,"nonUrlBugsUrlField": "bugs.url field must be a string url. Deleted."
-  ,"nonEmailBugsEmailField": "bugs.email field must be a string email. Deleted."
-  ,"emptyNormalizedBugs": "Normalized value of bugs field is an empty object. Deleted."
-  ,"nonUrlHomepage": "homepage field must be a string url. Deleted."
-  ,"invalidLicense": "license should be a valid SPDX license expression"
-  ,"typo": "%s should probably be %s."
-}
diff --git a/node_modules/normalize-package-data/node_modules/hosted-git-info/LICENSE b/node_modules/normalize-package-data/node_modules/hosted-git-info/LICENSE
deleted file mode 100644
index 45055763dc838..0000000000000
--- a/node_modules/normalize-package-data/node_modules/hosted-git-info/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright (c) 2015, Rebecca Turner
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/from-url.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/from-url.js
deleted file mode 100644
index efc1247d59d12..0000000000000
--- a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/from-url.js
+++ /dev/null
@@ -1,122 +0,0 @@
-'use strict'
-
-const parseUrl = require('./parse-url')
-
-// look for github shorthand inputs, such as npm/cli
-const isGitHubShorthand = (arg) => {
-  // it cannot contain whitespace before the first #
-  // it cannot start with a / because that's probably an absolute file path
-  // but it must include a slash since repos are username/repository
-  // it cannot start with a . because that's probably a relative file path
-  // it cannot start with an @ because that's a scoped package if it passes the other tests
-  // it cannot contain a : before a # because that tells us that there's a protocol
-  // a second / may not exist before a #
-  const firstHash = arg.indexOf('#')
-  const firstSlash = arg.indexOf('/')
-  const secondSlash = arg.indexOf('/', firstSlash + 1)
-  const firstColon = arg.indexOf(':')
-  const firstSpace = /\s/.exec(arg)
-  const firstAt = arg.indexOf('@')
-
-  const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash)
-  const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash)
-  const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash)
-  const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash)
-  const hasSlash = firstSlash > 0
-  // if a # is found, what we really want to know is that the character
-  // immediately before # is not a /
-  const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/')
-  const doesNotStartWithDot = !arg.startsWith('.')
-
-  return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash &&
-    doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash &&
-    secondSlashOnlyAfterHash
-}
-
-module.exports = (giturl, opts, { gitHosts, protocols }) => {
-  if (!giturl) {
-    return
-  }
-
-  const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl
-  const parsed = parseUrl(correctedUrl, protocols)
-  if (!parsed) {
-    return
-  }
-
-  const gitHostShortcut = gitHosts.byShortcut[parsed.protocol]
-  const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.')
-    ? parsed.hostname.slice(4)
-    : parsed.hostname]
-  const gitHostName = gitHostShortcut || gitHostDomain
-  if (!gitHostName) {
-    return
-  }
-
-  const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain]
-  let auth = null
-  if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) {
-    auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}`
-  }
-
-  let committish = null
-  let user = null
-  let project = null
-  let defaultRepresentation = null
-
-  try {
-    if (gitHostShortcut) {
-      let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname
-      const firstAt = pathname.indexOf('@')
-      // we ignore auth for shortcuts, so just trim it out
-      if (firstAt > -1) {
-        pathname = pathname.slice(firstAt + 1)
-      }
-
-      const lastSlash = pathname.lastIndexOf('/')
-      if (lastSlash > -1) {
-        user = decodeURIComponent(pathname.slice(0, lastSlash))
-        // we want nulls only, never empty strings
-        if (!user) {
-          user = null
-        }
-        project = decodeURIComponent(pathname.slice(lastSlash + 1))
-      } else {
-        project = decodeURIComponent(pathname)
-      }
-
-      if (project.endsWith('.git')) {
-        project = project.slice(0, -4)
-      }
-
-      if (parsed.hash) {
-        committish = decodeURIComponent(parsed.hash.slice(1))
-      }
-
-      defaultRepresentation = 'shortcut'
-    } else {
-      if (!gitHostInfo.protocols.includes(parsed.protocol)) {
-        return
-      }
-
-      const segments = gitHostInfo.extract(parsed)
-      if (!segments) {
-        return
-      }
-
-      user = segments.user && decodeURIComponent(segments.user)
-      project = decodeURIComponent(segments.project)
-      committish = decodeURIComponent(segments.committish)
-      defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1)
-    }
-  } catch (err) {
-    /* istanbul ignore else */
-    if (err instanceof URIError) {
-      return
-    } else {
-      throw err
-    }
-  }
-
-  return [gitHostName, user, auth, project, committish, defaultRepresentation, opts]
-}
diff --git a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/hosts.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/hosts.js
deleted file mode 100644
index 013712b7842c8..0000000000000
--- a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/hosts.js
+++ /dev/null
@@ -1,228 +0,0 @@
-/* eslint-disable max-len */
-
-'use strict'
-
-const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
-const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
-const formatHashFragment = (f) => f.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-')
-
-const defaults = {
-  sshtemplate: ({ domain, user, project, committish }) =>
-    `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, user, project, committish }) =>
-    `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath, path }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`,
-  browsetemplate: ({ domain, user, project, committish, treepath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  docstemplate: ({ domain, user, project, treepath, committish }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`,
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`,
-  shortcuttemplate: ({ type, user, project, committish }) =>
-    `${type}:${user}/${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ user, project, committish }) =>
-    `${user}/${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, user, project }) =>
-    `https://${domain}/${user}/${project}/issues`,
-  hashformat: formatHashFragment,
-}
-
-const hosts = {}
-hosts.github = {
-  // First two are insecure and generally shouldn't be used any more, but
-  // they are still supported.
-  protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'github.com',
-  treepath: 'tree',
-  blobpath: 'blob',
-  editpath: 'edit',
-  filetemplate: ({ auth, user, project, committish, path }) =>
-    `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`,
-  gittemplate: ({ auth, domain, user, project, committish }) =>
-    `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, type, committish] = url.pathname.split('/', 5)
-    if (type && type !== 'tree') {
-      return
-    }
-
-    if (!type) {
-      committish = url.hash.slice(1)
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish }
-  },
-}
-
-hosts.bitbucket = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'bitbucket.org',
-  treepath: 'src',
-  blobpath: 'src',
-  editpath: '?mode=edit',
-  edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (['get'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gitlab = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gitlab.com',
-  treepath: 'tree',
-  blobpath: 'tree',
-  editpath: '-/edit',
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    const path = url.pathname.slice(1)
-    if (path.includes('/-/') || path.includes('/archive.tar.gz')) {
-      return
-    }
-
-    const segments = path.split('/')
-    let project = segments.pop()
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    const user = segments.join('/')
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gist = {
-  protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gist.github.com',
-  editpath: 'edit',
-  sshtemplate: ({ domain, project, committish }) =>
-    `git@${domain}:${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, project, committish }) =>
-    `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`,
-  browsetemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  browseblobtemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  docstemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  httpstemplate: ({ domain, project, committish }) =>
-    `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ user, project, committish, path }) =>
-    `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`,
-  shortcuttemplate: ({ type, project, committish }) =>
-    `${type}:${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ project, committish }) =>
-    `${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, project }) =>
-    `https://${domain}/${project}`,
-  gittemplate: ({ domain, project, committish }) =>
-    `git://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ project, committish }) =>
-    `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (aux === 'raw') {
-      return
-    }
-
-    if (!project) {
-      if (!user) {
-        return
-      }
-
-      project = user
-      user = null
-    }
-
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-  hashformat: function (fragment) {
-    return fragment && 'file-' + formatHashFragment(fragment)
-  },
-}
-
-hosts.sourcehut = {
-  protocols: ['git+ssh:', 'https:'],
-  domain: 'git.sr.ht',
-  treepath: 'tree',
-  blobpath: 'tree',
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`,
-  httpstemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`,
-  bugstemplate: ({ user, project }) =>
-    `https://todo.sr.ht/${user}/${project}`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-
-    // tarball url
-    if (['archive'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  hosts[name] = Object.assign({}, defaults, host)
-}
-
-module.exports = hosts
diff --git a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/index.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/index.js
deleted file mode 100644
index a7339c217e9a3..0000000000000
--- a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/index.js
+++ /dev/null
@@ -1,179 +0,0 @@
-'use strict'
-
-const LRU = require('lru-cache')
-const hosts = require('./hosts.js')
-const fromUrl = require('./from-url.js')
-const parseUrl = require('./parse-url.js')
-
-const cache = new LRU({ max: 1000 })
-
-class GitHost {
-  constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
-    Object.assign(this, GitHost.#gitHosts[type], {
-      type,
-      user,
-      auth,
-      project,
-      committish,
-      default: defaultRepresentation,
-      opts,
-    })
-  }
-
-  static #gitHosts = { byShortcut: {}, byDomain: {} }
-  static #protocols = {
-    'git+ssh:': { name: 'sshurl' },
-    'ssh:': { name: 'sshurl' },
-    'git+https:': { name: 'https', auth: true },
-    'git:': { auth: true },
-    'http:': { auth: true },
-    'https:': { auth: true },
-    'git+http:': { auth: true },
-  }
-
-  static addHost (name, host) {
-    GitHost.#gitHosts[name] = host
-    GitHost.#gitHosts.byDomain[host.domain] = name
-    GitHost.#gitHosts.byShortcut[`${name}:`] = name
-    GitHost.#protocols[`${name}:`] = { name }
-  }
-
-  static fromUrl (giturl, opts) {
-    if (typeof giturl !== 'string') {
-      return
-    }
-
-    const key = giturl + JSON.stringify(opts || {})
-
-    if (!cache.has(key)) {
-      const hostArgs = fromUrl(giturl, opts, {
-        gitHosts: GitHost.#gitHosts,
-        protocols: GitHost.#protocols,
-      })
-      cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined)
-    }
-
-    return cache.get(key)
-  }
-
-  static parseUrl (url) {
-    return parseUrl(url)
-  }
-
-  #fill (template, opts) {
-    if (typeof template !== 'function') {
-      return null
-    }
-
-    const options = { ...this, ...this.opts, ...opts }
-
-    // the path should always be set so we don't end up with 'undefined' in urls
-    if (!options.path) {
-      options.path = ''
-    }
-
-    // template functions will insert the leading slash themselves
-    if (options.path.startsWith('/')) {
-      options.path = options.path.slice(1)
-    }
-
-    if (options.noCommittish) {
-      options.committish = null
-    }
-
-    const result = template(options)
-    return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result
-  }
-
-  hash () {
-    return this.committish ? `#${this.committish}` : ''
-  }
-
-  ssh (opts) {
-    return this.#fill(this.sshtemplate, opts)
-  }
-
-  sshurl (opts) {
-    return this.#fill(this.sshurltemplate, opts)
-  }
-
-  browse (path, ...args) {
-    // not a string, treat path as opts
-    if (typeof path !== 'string') {
-      return this.#fill(this.browsetemplate, path)
-    }
-
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browsetreetemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  // If the path is known to be a file, then browseFile should be used. For some hosts
-  // the url is the same as browse, but for others like GitHub a file can use both `/tree/`
-  // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/`
-  // path will redirect to a specific commit. Using the `/blob/` path avoids this and
-  // does not redirect to a different commit.
-  browseFile (path, ...args) {
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browseblobtemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  docs (opts) {
-    return this.#fill(this.docstemplate, opts)
-  }
-
-  bugs (opts) {
-    return this.#fill(this.bugstemplate, opts)
-  }
-
-  https (opts) {
-    return this.#fill(this.httpstemplate, opts)
-  }
-
-  git (opts) {
-    return this.#fill(this.gittemplate, opts)
-  }
-
-  shortcut (opts) {
-    return this.#fill(this.shortcuttemplate, opts)
-  }
-
-  path (opts) {
-    return this.#fill(this.pathtemplate, opts)
-  }
-
-  tarball (opts) {
-    return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false })
-  }
-
-  file (path, opts) {
-    return this.#fill(this.filetemplate, { ...opts, path })
-  }
-
-  edit (path, opts) {
-    return this.#fill(this.edittemplate, { ...opts, path })
-  }
-
-  getDefaultRepresentation () {
-    return this.default
-  }
-
-  toString (opts) {
-    if (this.default && typeof this[this.default] === 'function') {
-      return this[this.default](opts)
-    }
-
-    return this.sshurl(opts)
-  }
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  GitHost.addHost(name, host)
-}
-
-module.exports = GitHost
diff --git a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/parse-url.js
deleted file mode 100644
index 7d5489c008ab4..0000000000000
--- a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/parse-url.js
+++ /dev/null
@@ -1,78 +0,0 @@
-const url = require('url')
-
-const lastIndexOfBefore = (str, char, beforeChar) => {
-  const startPosition = str.indexOf(beforeChar)
-  return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity)
-}
-
-const safeUrl = (u) => {
-  try {
-    return new url.URL(u)
-  } catch {
-    // this fn should never throw
-  }
-}
-
-// accepts input like git:github.com:user/repo and inserts the // after the first :
-const correctProtocol = (arg, protocols) => {
-  const firstColon = arg.indexOf(':')
-  const proto = arg.slice(0, firstColon + 1)
-  if (Object.prototype.hasOwnProperty.call(protocols, proto)) {
-    return arg
-  }
-
-  const firstAt = arg.indexOf('@')
-  if (firstAt > -1) {
-    if (firstAt > firstColon) {
-      return `git+ssh://${arg}`
-    } else {
-      return arg
-    }
-  }
-
-  const doubleSlash = arg.indexOf('//')
-  if (doubleSlash === firstColon + 1) {
-    return arg
-  }
-
-  return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}`
-}
-
-// attempt to correct an scp style url so that it will parse with `new URL()`
-const correctUrl = (giturl) => {
-  // ignore @ that come after the first hash since the denotes the start
-  // of a committish which can contain @ characters
-  const firstAt = lastIndexOfBefore(giturl, '@', '#')
-  // ignore colons that come after the hash since that could include colons such as:
-  // git@github.com:user/package-2#semver:^1.0.0
-  const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#')
-
-  if (lastColonBeforeHash > firstAt) {
-    // the last : comes after the first @ (or there is no @)
-    // like it would in:
-    // proto://hostname.com:user/repo
-    // username@hostname.com:user/repo
-    // :password@hostname.com:user/repo
-    // username:password@hostname.com:user/repo
-    // proto://username@hostname.com:user/repo
-    // proto://:password@hostname.com:user/repo
-    // proto://username:password@hostname.com:user/repo
-    // then we replace the last : with a / to create a valid path
-    giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1)
-  }
-
-  if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) {
-    // we have no : at all
-    // as it would be in:
-    // username@hostname.com/user/repo
-    // then we prepend a protocol
-    giturl = `git+ssh://${giturl}`
-  }
-
-  return giturl
-}
-
-module.exports = (giturl, protocols) => {
-  const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl
-  return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol))
-}
diff --git a/node_modules/normalize-package-data/node_modules/hosted-git-info/package.json b/node_modules/normalize-package-data/node_modules/hosted-git-info/package.json
deleted file mode 100644
index 612259948afe7..0000000000000
--- a/node_modules/normalize-package-data/node_modules/hosted-git-info/package.json
+++ /dev/null
@@ -1,59 +0,0 @@
-{
-  "name": "hosted-git-info",
-  "version": "6.1.1",
-  "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
-  "main": "./lib/index.js",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/hosted-git-info.git"
-  },
-  "keywords": [
-    "git",
-    "github",
-    "bitbucket",
-    "gitlab"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/hosted-git-info/issues"
-  },
-  "homepage": "https://github.com/npm/hosted-git-info",
-  "scripts": {
-    "posttest": "npm run lint",
-    "snap": "tap",
-    "test": "tap",
-    "test:coverage": "tap --coverage-report=html",
-    "lint": "eslint \"**/*.js\"",
-    "postlint": "template-oss-check",
-    "lintfix": "npm run lint -- --fix",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "dependencies": {
-    "lru-cache": "^7.5.1"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.7.1",
-    "tap": "^16.0.1"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "tap": {
-    "color": 1,
-    "coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.7.1"
-  }
-}
diff --git a/node_modules/normalize-package-data/node_modules/lru-cache/LICENSE b/node_modules/normalize-package-data/node_modules/lru-cache/LICENSE
deleted file mode 100644
index f785757cd63f8..0000000000000
--- a/node_modules/normalize-package-data/node_modules/lru-cache/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/normalize-package-data/node_modules/lru-cache/index.js b/node_modules/normalize-package-data/node_modules/lru-cache/index.js
deleted file mode 100644
index 48e99fe5e5a70..0000000000000
--- a/node_modules/normalize-package-data/node_modules/lru-cache/index.js
+++ /dev/null
@@ -1,1227 +0,0 @@
-const perf =
-  typeof performance === 'object' &&
-  performance &&
-  typeof performance.now === 'function'
-    ? performance
-    : Date
-
-const hasAbortController = typeof AbortController === 'function'
-
-// minimal backwards-compatibility polyfill
-// this doesn't have nearly all the checks and whatnot that
-// actual AbortController/Signal has, but it's enough for
-// our purposes, and if used properly, behaves the same.
-const AC = hasAbortController
-  ? AbortController
-  : class AbortController {
-      constructor() {
-        this.signal = new AS()
-      }
-      abort(reason = new Error('This operation was aborted')) {
-        this.signal.reason = this.signal.reason || reason
-        this.signal.aborted = true
-        this.signal.dispatchEvent({
-          type: 'abort',
-          target: this.signal,
-        })
-      }
-    }
-
-const hasAbortSignal = typeof AbortSignal === 'function'
-// Some polyfills put this on the AC class, not global
-const hasACAbortSignal = typeof AC.AbortSignal === 'function'
-const AS = hasAbortSignal
-  ? AbortSignal
-  : hasACAbortSignal
-  ? AC.AbortController
-  : class AbortSignal {
-      constructor() {
-        this.reason = undefined
-        this.aborted = false
-        this._listeners = []
-      }
-      dispatchEvent(e) {
-        if (e.type === 'abort') {
-          this.aborted = true
-          this.onabort(e)
-          this._listeners.forEach(f => f(e), this)
-        }
-      }
-      onabort() {}
-      addEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners.push(fn)
-        }
-      }
-      removeEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners = this._listeners.filter(f => f !== fn)
-        }
-      }
-    }
-
-const warned = new Set()
-const deprecatedOption = (opt, instead) => {
-  const code = `LRU_CACHE_OPTION_${opt}`
-  if (shouldWarn(code)) {
-    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
-  }
-}
-const deprecatedMethod = (method, instead) => {
-  const code = `LRU_CACHE_METHOD_${method}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
-    warn(code, `${method} method`, `cache.${instead}()`, get)
-  }
-}
-const deprecatedProperty = (field, instead) => {
-  const code = `LRU_CACHE_PROPERTY_${field}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
-    warn(code, `${field} property`, `cache.${instead}`, get)
-  }
-}
-
-const emitWarning = (...a) => {
-  typeof process === 'object' &&
-  process &&
-  typeof process.emitWarning === 'function'
-    ? process.emitWarning(...a)
-    : console.error(...a)
-}
-
-const shouldWarn = code => !warned.has(code)
-
-const warn = (code, what, instead, fn) => {
-  warned.add(code)
-  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
-  emitWarning(msg, 'DeprecationWarning', code, fn)
-}
-
-const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
-
-/* istanbul ignore next - This is a little bit ridiculous, tbh.
- * The maximum array length is 2^32-1 or thereabouts on most JS impls.
- * And well before that point, you're caching the entire world, I mean,
- * that's ~32GB of just integers for the next/prev links, plus whatever
- * else to hold that many keys and values.  Just filling the memory with
- * zeroes at init time is brutal when you get that big.
- * But why not be complete?
- * Maybe in the future, these limits will have expanded. */
-const getUintArray = max =>
-  !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-    ? Uint8Array
-    : max <= Math.pow(2, 16)
-    ? Uint16Array
-    : max <= Math.pow(2, 32)
-    ? Uint32Array
-    : max <= Number.MAX_SAFE_INTEGER
-    ? ZeroArray
-    : null
-
-class ZeroArray extends Array {
-  constructor(size) {
-    super(size)
-    this.fill(0)
-  }
-}
-
-class Stack {
-  constructor(max) {
-    if (max === 0) {
-      return []
-    }
-    const UintArray = getUintArray(max)
-    this.heap = new UintArray(max)
-    this.length = 0
-  }
-  push(n) {
-    this.heap[this.length++] = n
-  }
-  pop() {
-    return this.heap[--this.length]
-  }
-}
-
-class LRUCache {
-  constructor(options = {}) {
-    const {
-      max = 0,
-      ttl,
-      ttlResolution = 1,
-      ttlAutopurge,
-      updateAgeOnGet,
-      updateAgeOnHas,
-      allowStale,
-      dispose,
-      disposeAfter,
-      noDisposeOnSet,
-      noUpdateTTL,
-      maxSize = 0,
-      maxEntrySize = 0,
-      sizeCalculation,
-      fetchMethod,
-      fetchContext,
-      noDeleteOnFetchRejection,
-      noDeleteOnStaleGet,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-    } = options
-
-    // deprecated options, don't trigger a warning for getting them if
-    // the thing being passed in is another LRUCache we're copying.
-    const { length, maxAge, stale } =
-      options instanceof LRUCache ? {} : options
-
-    if (max !== 0 && !isPosInt(max)) {
-      throw new TypeError('max option must be a nonnegative integer')
-    }
-
-    const UintArray = max ? getUintArray(max) : Array
-    if (!UintArray) {
-      throw new Error('invalid max value: ' + max)
-    }
-
-    this.max = max
-    this.maxSize = maxSize
-    this.maxEntrySize = maxEntrySize || this.maxSize
-    this.sizeCalculation = sizeCalculation || length
-    if (this.sizeCalculation) {
-      if (!this.maxSize && !this.maxEntrySize) {
-        throw new TypeError(
-          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
-        )
-      }
-      if (typeof this.sizeCalculation !== 'function') {
-        throw new TypeError('sizeCalculation set to non-function')
-      }
-    }
-
-    this.fetchMethod = fetchMethod || null
-    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
-      throw new TypeError(
-        'fetchMethod must be a function if specified'
-      )
-    }
-
-    this.fetchContext = fetchContext
-    if (!this.fetchMethod && fetchContext !== undefined) {
-      throw new TypeError(
-        'cannot set fetchContext without fetchMethod'
-      )
-    }
-
-    this.keyMap = new Map()
-    this.keyList = new Array(max).fill(null)
-    this.valList = new Array(max).fill(null)
-    this.next = new UintArray(max)
-    this.prev = new UintArray(max)
-    this.head = 0
-    this.tail = 0
-    this.free = new Stack(max)
-    this.initialFill = 1
-    this.size = 0
-
-    if (typeof dispose === 'function') {
-      this.dispose = dispose
-    }
-    if (typeof disposeAfter === 'function') {
-      this.disposeAfter = disposeAfter
-      this.disposed = []
-    } else {
-      this.disposeAfter = null
-      this.disposed = null
-    }
-    this.noDisposeOnSet = !!noDisposeOnSet
-    this.noUpdateTTL = !!noUpdateTTL
-    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
-    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
-    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
-    this.ignoreFetchAbort = !!ignoreFetchAbort
-
-    // NB: maxEntrySize is set to maxSize if it's set
-    if (this.maxEntrySize !== 0) {
-      if (this.maxSize !== 0) {
-        if (!isPosInt(this.maxSize)) {
-          throw new TypeError(
-            'maxSize must be a positive integer if specified'
-          )
-        }
-      }
-      if (!isPosInt(this.maxEntrySize)) {
-        throw new TypeError(
-          'maxEntrySize must be a positive integer if specified'
-        )
-      }
-      this.initializeSizeTracking()
-    }
-
-    this.allowStale = !!allowStale || !!stale
-    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
-    this.updateAgeOnGet = !!updateAgeOnGet
-    this.updateAgeOnHas = !!updateAgeOnHas
-    this.ttlResolution =
-      isPosInt(ttlResolution) || ttlResolution === 0
-        ? ttlResolution
-        : 1
-    this.ttlAutopurge = !!ttlAutopurge
-    this.ttl = ttl || maxAge || 0
-    if (this.ttl) {
-      if (!isPosInt(this.ttl)) {
-        throw new TypeError(
-          'ttl must be a positive integer if specified'
-        )
-      }
-      this.initializeTTLTracking()
-    }
-
-    // do not allow completely unbounded caches
-    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
-      throw new TypeError(
-        'At least one of max, maxSize, or ttl is required'
-      )
-    }
-    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
-      const code = 'LRU_CACHE_UNBOUNDED'
-      if (shouldWarn(code)) {
-        warned.add(code)
-        const msg =
-          'TTL caching without ttlAutopurge, max, or maxSize can ' +
-          'result in unbounded memory consumption.'
-        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
-      }
-    }
-
-    if (stale) {
-      deprecatedOption('stale', 'allowStale')
-    }
-    if (maxAge) {
-      deprecatedOption('maxAge', 'ttl')
-    }
-    if (length) {
-      deprecatedOption('length', 'sizeCalculation')
-    }
-  }
-
-  getRemainingTTL(key) {
-    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
-  }
-
-  initializeTTLTracking() {
-    this.ttls = new ZeroArray(this.max)
-    this.starts = new ZeroArray(this.max)
-
-    this.setItemTTL = (index, ttl, start = perf.now()) => {
-      this.starts[index] = ttl !== 0 ? start : 0
-      this.ttls[index] = ttl
-      if (ttl !== 0 && this.ttlAutopurge) {
-        const t = setTimeout(() => {
-          if (this.isStale(index)) {
-            this.delete(this.keyList[index])
-          }
-        }, ttl + 1)
-        /* istanbul ignore else - unref() not supported on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-    }
-
-    this.updateItemAge = index => {
-      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
-    }
-
-    this.statusTTL = (status, index) => {
-      if (status) {
-        status.ttl = this.ttls[index]
-        status.start = this.starts[index]
-        status.now = cachedNow || getNow()
-        status.remainingTTL = status.now + status.ttl - status.start
-      }
-    }
-
-    // debounce calls to perf.now() to 1s so we're not hitting
-    // that costly call repeatedly.
-    let cachedNow = 0
-    const getNow = () => {
-      const n = perf.now()
-      if (this.ttlResolution > 0) {
-        cachedNow = n
-        const t = setTimeout(
-          () => (cachedNow = 0),
-          this.ttlResolution
-        )
-        /* istanbul ignore else - not available on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-      return n
-    }
-
-    this.getRemainingTTL = key => {
-      const index = this.keyMap.get(key)
-      if (index === undefined) {
-        return 0
-      }
-      return this.ttls[index] === 0 || this.starts[index] === 0
-        ? Infinity
-        : this.starts[index] +
-            this.ttls[index] -
-            (cachedNow || getNow())
-    }
-
-    this.isStale = index => {
-      return (
-        this.ttls[index] !== 0 &&
-        this.starts[index] !== 0 &&
-        (cachedNow || getNow()) - this.starts[index] >
-          this.ttls[index]
-      )
-    }
-  }
-  updateItemAge(_index) {}
-  statusTTL(_status, _index) {}
-  setItemTTL(_index, _ttl, _start) {}
-  isStale(_index) {
-    return false
-  }
-
-  initializeSizeTracking() {
-    this.calculatedSize = 0
-    this.sizes = new ZeroArray(this.max)
-    this.removeItemSize = index => {
-      this.calculatedSize -= this.sizes[index]
-      this.sizes[index] = 0
-    }
-    this.requireSize = (k, v, size, sizeCalculation) => {
-      // provisionally accept background fetches.
-      // actual value size will be checked when they return.
-      if (this.isBackgroundFetch(v)) {
-        return 0
-      }
-      if (!isPosInt(size)) {
-        if (sizeCalculation) {
-          if (typeof sizeCalculation !== 'function') {
-            throw new TypeError('sizeCalculation must be a function')
-          }
-          size = sizeCalculation(v, k)
-          if (!isPosInt(size)) {
-            throw new TypeError(
-              'sizeCalculation return invalid (expect positive integer)'
-            )
-          }
-        } else {
-          throw new TypeError(
-            'invalid size value (must be positive integer). ' +
-              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
-              'must be set.'
-          )
-        }
-      }
-      return size
-    }
-    this.addItemSize = (index, size, status) => {
-      this.sizes[index] = size
-      if (this.maxSize) {
-        const maxSize = this.maxSize - this.sizes[index]
-        while (this.calculatedSize > maxSize) {
-          this.evict(true)
-        }
-      }
-      this.calculatedSize += this.sizes[index]
-      if (status) {
-        status.entrySize = size
-        status.totalCalculatedSize = this.calculatedSize
-      }
-    }
-  }
-  removeItemSize(_index) {}
-  addItemSize(_index, _size) {}
-  requireSize(_k, _v, size, sizeCalculation) {
-    if (size || sizeCalculation) {
-      throw new TypeError(
-        'cannot set size without setting maxSize or maxEntrySize on cache'
-      )
-    }
-  }
-
-  *indexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.tail; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.head) {
-          break
-        } else {
-          i = this.prev[i]
-        }
-      }
-    }
-  }
-
-  *rindexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.head; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.tail) {
-          break
-        } else {
-          i = this.next[i]
-        }
-      }
-    }
-  }
-
-  isValidIndex(index) {
-    return (
-      index !== undefined &&
-      this.keyMap.get(this.keyList[index]) === index
-    )
-  }
-
-  *entries() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-  *rentries() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-
-  *keys() {
-    for (const i of this.indexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-  *rkeys() {
-    for (const i of this.rindexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-
-  *values() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-  *rvalues() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-
-  [Symbol.iterator]() {
-    return this.entries()
-  }
-
-  find(fn, getOptions) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      if (fn(value, this.keyList[i], this)) {
-        return this.get(this.keyList[i], getOptions)
-      }
-    }
-  }
-
-  forEach(fn, thisp = this) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  rforEach(fn, thisp = this) {
-    for (const i of this.rindexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  get prune() {
-    deprecatedMethod('prune', 'purgeStale')
-    return this.purgeStale
-  }
-
-  purgeStale() {
-    let deleted = false
-    for (const i of this.rindexes({ allowStale: true })) {
-      if (this.isStale(i)) {
-        this.delete(this.keyList[i])
-        deleted = true
-      }
-    }
-    return deleted
-  }
-
-  dump() {
-    const arr = []
-    for (const i of this.indexes({ allowStale: true })) {
-      const key = this.keyList[i]
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      const entry = { value }
-      if (this.ttls) {
-        entry.ttl = this.ttls[i]
-        // always dump the start relative to a portable timestamp
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = perf.now() - this.starts[i]
-        entry.start = Math.floor(Date.now() - age)
-      }
-      if (this.sizes) {
-        entry.size = this.sizes[i]
-      }
-      arr.unshift([key, entry])
-    }
-    return arr
-  }
-
-  load(arr) {
-    this.clear()
-    for (const [key, entry] of arr) {
-      if (entry.start) {
-        // entry.start is a portable timestamp, but we may be using
-        // node's performance.now(), so calculate the offset.
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = Date.now() - entry.start
-        entry.start = perf.now() - age
-      }
-      this.set(key, entry.value, entry)
-    }
-  }
-
-  dispose(_v, _k, _reason) {}
-
-  set(
-    k,
-    v,
-    {
-      ttl = this.ttl,
-      start,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      status,
-    } = {}
-  ) {
-    size = this.requireSize(k, v, size, sizeCalculation)
-    // if the item doesn't fit, don't do anything
-    // NB: maxEntrySize set to maxSize by default
-    if (this.maxEntrySize && size > this.maxEntrySize) {
-      if (status) {
-        status.set = 'miss'
-        status.maxEntrySizeExceeded = true
-      }
-      // have to delete, in case a background fetch is there already.
-      // in non-async cases, this is a no-op
-      this.delete(k)
-      return this
-    }
-    let index = this.size === 0 ? undefined : this.keyMap.get(k)
-    if (index === undefined) {
-      // addition
-      index = this.newIndex()
-      this.keyList[index] = k
-      this.valList[index] = v
-      this.keyMap.set(k, index)
-      this.next[this.tail] = index
-      this.prev[index] = this.tail
-      this.tail = index
-      this.size++
-      this.addItemSize(index, size, status)
-      if (status) {
-        status.set = 'add'
-      }
-      noUpdateTTL = false
-    } else {
-      // update
-      this.moveToTail(index)
-      const oldVal = this.valList[index]
-      if (v !== oldVal) {
-        if (this.isBackgroundFetch(oldVal)) {
-          oldVal.__abortController.abort(new Error('replaced'))
-        } else {
-          if (!noDisposeOnSet) {
-            this.dispose(oldVal, k, 'set')
-            if (this.disposeAfter) {
-              this.disposed.push([oldVal, k, 'set'])
-            }
-          }
-        }
-        this.removeItemSize(index)
-        this.valList[index] = v
-        this.addItemSize(index, size, status)
-        if (status) {
-          status.set = 'replace'
-          const oldValue =
-            oldVal && this.isBackgroundFetch(oldVal)
-              ? oldVal.__staleWhileFetching
-              : oldVal
-          if (oldValue !== undefined) status.oldValue = oldValue
-        }
-      } else if (status) {
-        status.set = 'update'
-      }
-    }
-    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
-      this.initializeTTLTracking()
-    }
-    if (!noUpdateTTL) {
-      this.setItemTTL(index, ttl, start)
-    }
-    this.statusTTL(status, index)
-    if (this.disposeAfter) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return this
-  }
-
-  newIndex() {
-    if (this.size === 0) {
-      return this.tail
-    }
-    if (this.size === this.max && this.max !== 0) {
-      return this.evict(false)
-    }
-    if (this.free.length !== 0) {
-      return this.free.pop()
-    }
-    // initial fill, just keep writing down the list
-    return this.initialFill++
-  }
-
-  pop() {
-    if (this.size) {
-      const val = this.valList[this.head]
-      this.evict(true)
-      return val
-    }
-  }
-
-  evict(free) {
-    const head = this.head
-    const k = this.keyList[head]
-    const v = this.valList[head]
-    if (this.isBackgroundFetch(v)) {
-      v.__abortController.abort(new Error('evicted'))
-    } else {
-      this.dispose(v, k, 'evict')
-      if (this.disposeAfter) {
-        this.disposed.push([v, k, 'evict'])
-      }
-    }
-    this.removeItemSize(head)
-    // if we aren't about to use the index, then null these out
-    if (free) {
-      this.keyList[head] = null
-      this.valList[head] = null
-      this.free.push(head)
-    }
-    this.head = this.next[head]
-    this.keyMap.delete(k)
-    this.size--
-    return head
-  }
-
-  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      if (!this.isStale(index)) {
-        if (updateAgeOnHas) {
-          this.updateItemAge(index)
-        }
-        if (status) status.has = 'hit'
-        this.statusTTL(status, index)
-        return true
-      } else if (status) {
-        status.has = 'stale'
-        this.statusTTL(status, index)
-      }
-    } else if (status) {
-      status.has = 'miss'
-    }
-    return false
-  }
-
-  // like get(), but without any LRU updating or TTL expiration
-  peek(k, { allowStale = this.allowStale } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined && (allowStale || !this.isStale(index))) {
-      const v = this.valList[index]
-      // either stale and allowed, or forcing a refresh of non-stale value
-      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
-    }
-  }
-
-  backgroundFetch(k, index, options, context) {
-    const v = index === undefined ? undefined : this.valList[index]
-    if (this.isBackgroundFetch(v)) {
-      return v
-    }
-    const ac = new AC()
-    if (options.signal) {
-      options.signal.addEventListener('abort', () =>
-        ac.abort(options.signal.reason)
-      )
-    }
-    const fetchOpts = {
-      signal: ac.signal,
-      options,
-      context,
-    }
-    const cb = (v, updateCache = false) => {
-      const { aborted } = ac.signal
-      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
-      if (options.status) {
-        if (aborted && !updateCache) {
-          options.status.fetchAborted = true
-          options.status.fetchError = ac.signal.reason
-          if (ignoreAbort) options.status.fetchAbortIgnored = true
-        } else {
-          options.status.fetchResolved = true
-        }
-      }
-      if (aborted && !ignoreAbort && !updateCache) {
-        return fetchFail(ac.signal.reason)
-      }
-      // either we didn't abort, and are still here, or we did, and ignored
-      if (this.valList[index] === p) {
-        if (v === undefined) {
-          if (p.__staleWhileFetching) {
-            this.valList[index] = p.__staleWhileFetching
-          } else {
-            this.delete(k)
-          }
-        } else {
-          if (options.status) options.status.fetchUpdated = true
-          this.set(k, v, fetchOpts.options)
-        }
-      }
-      return v
-    }
-    const eb = er => {
-      if (options.status) {
-        options.status.fetchRejected = true
-        options.status.fetchError = er
-      }
-      return fetchFail(er)
-    }
-    const fetchFail = er => {
-      const { aborted } = ac.signal
-      const allowStaleAborted =
-        aborted && options.allowStaleOnFetchAbort
-      const allowStale =
-        allowStaleAborted || options.allowStaleOnFetchRejection
-      const noDelete = allowStale || options.noDeleteOnFetchRejection
-      if (this.valList[index] === p) {
-        // if we allow stale on fetch rejections, then we need to ensure that
-        // the stale value is not removed from the cache when the fetch fails.
-        const del = !noDelete || p.__staleWhileFetching === undefined
-        if (del) {
-          this.delete(k)
-        } else if (!allowStaleAborted) {
-          // still replace the *promise* with the stale value,
-          // since we are done with the promise at this point.
-          // leave it untouched if we're still waiting for an
-          // aborted background fetch that hasn't yet returned.
-          this.valList[index] = p.__staleWhileFetching
-        }
-      }
-      if (allowStale) {
-        if (options.status && p.__staleWhileFetching !== undefined) {
-          options.status.returnedStale = true
-        }
-        return p.__staleWhileFetching
-      } else if (p.__returned === p) {
-        throw er
-      }
-    }
-    const pcall = (res, rej) => {
-      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
-      // ignored, we go until we finish, regardless.
-      // defer check until we are actually aborting,
-      // so fetchMethod can override.
-      ac.signal.addEventListener('abort', () => {
-        if (
-          !options.ignoreFetchAbort ||
-          options.allowStaleOnFetchAbort
-        ) {
-          res()
-          // when it eventually resolves, update the cache.
-          if (options.allowStaleOnFetchAbort) {
-            res = v => cb(v, true)
-          }
-        }
-      })
-    }
-    if (options.status) options.status.fetchDispatched = true
-    const p = new Promise(pcall).then(cb, eb)
-    p.__abortController = ac
-    p.__staleWhileFetching = v
-    p.__returned = null
-    if (index === undefined) {
-      // internal, don't expose status.
-      this.set(k, p, { ...fetchOpts.options, status: undefined })
-      index = this.keyMap.get(k)
-    } else {
-      this.valList[index] = p
-    }
-    return p
-  }
-
-  isBackgroundFetch(p) {
-    return (
-      p &&
-      typeof p === 'object' &&
-      typeof p.then === 'function' &&
-      Object.prototype.hasOwnProperty.call(
-        p,
-        '__staleWhileFetching'
-      ) &&
-      Object.prototype.hasOwnProperty.call(p, '__returned') &&
-      (p.__returned === p || p.__returned === null)
-    )
-  }
-
-  // this takes the union of get() and set() opts, because it does both
-  async fetch(
-    k,
-    {
-      // get options
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      // set options
-      ttl = this.ttl,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      // fetch exclusive options
-      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
-      ignoreFetchAbort = this.ignoreFetchAbort,
-      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
-      fetchContext = this.fetchContext,
-      forceRefresh = false,
-      status,
-      signal,
-    } = {}
-  ) {
-    if (!this.fetchMethod) {
-      if (status) status.fetch = 'get'
-      return this.get(k, {
-        allowStale,
-        updateAgeOnGet,
-        noDeleteOnStaleGet,
-        status,
-      })
-    }
-
-    const options = {
-      allowStale,
-      updateAgeOnGet,
-      noDeleteOnStaleGet,
-      ttl,
-      noDisposeOnSet,
-      size,
-      sizeCalculation,
-      noUpdateTTL,
-      noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-      status,
-      signal,
-    }
-
-    let index = this.keyMap.get(k)
-    if (index === undefined) {
-      if (status) status.fetch = 'miss'
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      return (p.__returned = p)
-    } else {
-      // in cache, maybe already fetching
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        const stale =
-          allowStale && v.__staleWhileFetching !== undefined
-        if (status) {
-          status.fetch = 'inflight'
-          if (stale) status.returnedStale = true
-        }
-        return stale ? v.__staleWhileFetching : (v.__returned = v)
-      }
-
-      // if we force a refresh, that means do NOT serve the cached value,
-      // unless we are already in the process of refreshing the cache.
-      const isStale = this.isStale(index)
-      if (!forceRefresh && !isStale) {
-        if (status) status.fetch = 'hit'
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        this.statusTTL(status, index)
-        return v
-      }
-
-      // ok, it is stale or a forced refresh, and not already fetching.
-      // refresh the cache.
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      const hasStale = p.__staleWhileFetching !== undefined
-      const staleVal = hasStale && allowStale
-      if (status) {
-        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
-        if (staleVal && isStale) status.returnedStale = true
-      }
-      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
-    }
-  }
-
-  get(
-    k,
-    {
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      status,
-    } = {}
-  ) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      const value = this.valList[index]
-      const fetching = this.isBackgroundFetch(value)
-      this.statusTTL(status, index)
-      if (this.isStale(index)) {
-        if (status) status.get = 'stale'
-        // delete only if not an in-flight background fetch
-        if (!fetching) {
-          if (!noDeleteOnStaleGet) {
-            this.delete(k)
-          }
-          if (status) status.returnedStale = allowStale
-          return allowStale ? value : undefined
-        } else {
-          if (status) {
-            status.returnedStale =
-              allowStale && value.__staleWhileFetching !== undefined
-          }
-          return allowStale ? value.__staleWhileFetching : undefined
-        }
-      } else {
-        if (status) status.get = 'hit'
-        // if we're currently fetching it, we don't actually have it yet
-        // it's not stale, which means this isn't a staleWhileRefetching.
-        // If it's not stale, and fetching, AND has a __staleWhileFetching
-        // value, then that means the user fetched with {forceRefresh:true},
-        // so it's safe to return that value.
-        if (fetching) {
-          return value.__staleWhileFetching
-        }
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        return value
-      }
-    } else if (status) {
-      status.get = 'miss'
-    }
-  }
-
-  connect(p, n) {
-    this.prev[n] = p
-    this.next[p] = n
-  }
-
-  moveToTail(index) {
-    // if tail already, nothing to do
-    // if head, move head to next[index]
-    // else
-    //   move next[prev[index]] to next[index] (head has no prev)
-    //   move prev[next[index]] to prev[index]
-    // prev[index] = tail
-    // next[tail] = index
-    // tail = index
-    if (index !== this.tail) {
-      if (index === this.head) {
-        this.head = this.next[index]
-      } else {
-        this.connect(this.prev[index], this.next[index])
-      }
-      this.connect(this.tail, index)
-      this.tail = index
-    }
-  }
-
-  get del() {
-    deprecatedMethod('del', 'delete')
-    return this.delete
-  }
-
-  delete(k) {
-    let deleted = false
-    if (this.size !== 0) {
-      const index = this.keyMap.get(k)
-      if (index !== undefined) {
-        deleted = true
-        if (this.size === 1) {
-          this.clear()
-        } else {
-          this.removeItemSize(index)
-          const v = this.valList[index]
-          if (this.isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('deleted'))
-          } else {
-            this.dispose(v, k, 'delete')
-            if (this.disposeAfter) {
-              this.disposed.push([v, k, 'delete'])
-            }
-          }
-          this.keyMap.delete(k)
-          this.keyList[index] = null
-          this.valList[index] = null
-          if (index === this.tail) {
-            this.tail = this.prev[index]
-          } else if (index === this.head) {
-            this.head = this.next[index]
-          } else {
-            this.next[this.prev[index]] = this.next[index]
-            this.prev[this.next[index]] = this.prev[index]
-          }
-          this.size--
-          this.free.push(index)
-        }
-      }
-    }
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return deleted
-  }
-
-  clear() {
-    for (const index of this.rindexes({ allowStale: true })) {
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        v.__abortController.abort(new Error('deleted'))
-      } else {
-        const k = this.keyList[index]
-        this.dispose(v, k, 'delete')
-        if (this.disposeAfter) {
-          this.disposed.push([v, k, 'delete'])
-        }
-      }
-    }
-
-    this.keyMap.clear()
-    this.valList.fill(null)
-    this.keyList.fill(null)
-    if (this.ttls) {
-      this.ttls.fill(0)
-      this.starts.fill(0)
-    }
-    if (this.sizes) {
-      this.sizes.fill(0)
-    }
-    this.head = 0
-    this.tail = 0
-    this.initialFill = 1
-    this.free.length = 0
-    this.calculatedSize = 0
-    this.size = 0
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-  }
-
-  get reset() {
-    deprecatedMethod('reset', 'clear')
-    return this.clear
-  }
-
-  get length() {
-    deprecatedProperty('length', 'size')
-    return this.size
-  }
-
-  static get AbortController() {
-    return AC
-  }
-  static get AbortSignal() {
-    return AS
-  }
-}
-
-module.exports = LRUCache
diff --git a/node_modules/normalize-package-data/node_modules/lru-cache/index.mjs b/node_modules/normalize-package-data/node_modules/lru-cache/index.mjs
deleted file mode 100644
index 4a0b4813ec515..0000000000000
--- a/node_modules/normalize-package-data/node_modules/lru-cache/index.mjs
+++ /dev/null
@@ -1,1227 +0,0 @@
-const perf =
-  typeof performance === 'object' &&
-  performance &&
-  typeof performance.now === 'function'
-    ? performance
-    : Date
-
-const hasAbortController = typeof AbortController === 'function'
-
-// minimal backwards-compatibility polyfill
-// this doesn't have nearly all the checks and whatnot that
-// actual AbortController/Signal has, but it's enough for
-// our purposes, and if used properly, behaves the same.
-const AC = hasAbortController
-  ? AbortController
-  : class AbortController {
-      constructor() {
-        this.signal = new AS()
-      }
-      abort(reason = new Error('This operation was aborted')) {
-        this.signal.reason = this.signal.reason || reason
-        this.signal.aborted = true
-        this.signal.dispatchEvent({
-          type: 'abort',
-          target: this.signal,
-        })
-      }
-    }
-
-const hasAbortSignal = typeof AbortSignal === 'function'
-// Some polyfills put this on the AC class, not global
-const hasACAbortSignal = typeof AC.AbortSignal === 'function'
-const AS = hasAbortSignal
-  ? AbortSignal
-  : hasACAbortSignal
-  ? AC.AbortController
-  : class AbortSignal {
-      constructor() {
-        this.reason = undefined
-        this.aborted = false
-        this._listeners = []
-      }
-      dispatchEvent(e) {
-        if (e.type === 'abort') {
-          this.aborted = true
-          this.onabort(e)
-          this._listeners.forEach(f => f(e), this)
-        }
-      }
-      onabort() {}
-      addEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners.push(fn)
-        }
-      }
-      removeEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners = this._listeners.filter(f => f !== fn)
-        }
-      }
-    }
-
-const warned = new Set()
-const deprecatedOption = (opt, instead) => {
-  const code = `LRU_CACHE_OPTION_${opt}`
-  if (shouldWarn(code)) {
-    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
-  }
-}
-const deprecatedMethod = (method, instead) => {
-  const code = `LRU_CACHE_METHOD_${method}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
-    warn(code, `${method} method`, `cache.${instead}()`, get)
-  }
-}
-const deprecatedProperty = (field, instead) => {
-  const code = `LRU_CACHE_PROPERTY_${field}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
-    warn(code, `${field} property`, `cache.${instead}`, get)
-  }
-}
-
-const emitWarning = (...a) => {
-  typeof process === 'object' &&
-  process &&
-  typeof process.emitWarning === 'function'
-    ? process.emitWarning(...a)
-    : console.error(...a)
-}
-
-const shouldWarn = code => !warned.has(code)
-
-const warn = (code, what, instead, fn) => {
-  warned.add(code)
-  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
-  emitWarning(msg, 'DeprecationWarning', code, fn)
-}
-
-const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
-
-/* istanbul ignore next - This is a little bit ridiculous, tbh.
- * The maximum array length is 2^32-1 or thereabouts on most JS impls.
- * And well before that point, you're caching the entire world, I mean,
- * that's ~32GB of just integers for the next/prev links, plus whatever
- * else to hold that many keys and values.  Just filling the memory with
- * zeroes at init time is brutal when you get that big.
- * But why not be complete?
- * Maybe in the future, these limits will have expanded. */
-const getUintArray = max =>
-  !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-    ? Uint8Array
-    : max <= Math.pow(2, 16)
-    ? Uint16Array
-    : max <= Math.pow(2, 32)
-    ? Uint32Array
-    : max <= Number.MAX_SAFE_INTEGER
-    ? ZeroArray
-    : null
-
-class ZeroArray extends Array {
-  constructor(size) {
-    super(size)
-    this.fill(0)
-  }
-}
-
-class Stack {
-  constructor(max) {
-    if (max === 0) {
-      return []
-    }
-    const UintArray = getUintArray(max)
-    this.heap = new UintArray(max)
-    this.length = 0
-  }
-  push(n) {
-    this.heap[this.length++] = n
-  }
-  pop() {
-    return this.heap[--this.length]
-  }
-}
-
-class LRUCache {
-  constructor(options = {}) {
-    const {
-      max = 0,
-      ttl,
-      ttlResolution = 1,
-      ttlAutopurge,
-      updateAgeOnGet,
-      updateAgeOnHas,
-      allowStale,
-      dispose,
-      disposeAfter,
-      noDisposeOnSet,
-      noUpdateTTL,
-      maxSize = 0,
-      maxEntrySize = 0,
-      sizeCalculation,
-      fetchMethod,
-      fetchContext,
-      noDeleteOnFetchRejection,
-      noDeleteOnStaleGet,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-    } = options
-
-    // deprecated options, don't trigger a warning for getting them if
-    // the thing being passed in is another LRUCache we're copying.
-    const { length, maxAge, stale } =
-      options instanceof LRUCache ? {} : options
-
-    if (max !== 0 && !isPosInt(max)) {
-      throw new TypeError('max option must be a nonnegative integer')
-    }
-
-    const UintArray = max ? getUintArray(max) : Array
-    if (!UintArray) {
-      throw new Error('invalid max value: ' + max)
-    }
-
-    this.max = max
-    this.maxSize = maxSize
-    this.maxEntrySize = maxEntrySize || this.maxSize
-    this.sizeCalculation = sizeCalculation || length
-    if (this.sizeCalculation) {
-      if (!this.maxSize && !this.maxEntrySize) {
-        throw new TypeError(
-          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
-        )
-      }
-      if (typeof this.sizeCalculation !== 'function') {
-        throw new TypeError('sizeCalculation set to non-function')
-      }
-    }
-
-    this.fetchMethod = fetchMethod || null
-    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
-      throw new TypeError(
-        'fetchMethod must be a function if specified'
-      )
-    }
-
-    this.fetchContext = fetchContext
-    if (!this.fetchMethod && fetchContext !== undefined) {
-      throw new TypeError(
-        'cannot set fetchContext without fetchMethod'
-      )
-    }
-
-    this.keyMap = new Map()
-    this.keyList = new Array(max).fill(null)
-    this.valList = new Array(max).fill(null)
-    this.next = new UintArray(max)
-    this.prev = new UintArray(max)
-    this.head = 0
-    this.tail = 0
-    this.free = new Stack(max)
-    this.initialFill = 1
-    this.size = 0
-
-    if (typeof dispose === 'function') {
-      this.dispose = dispose
-    }
-    if (typeof disposeAfter === 'function') {
-      this.disposeAfter = disposeAfter
-      this.disposed = []
-    } else {
-      this.disposeAfter = null
-      this.disposed = null
-    }
-    this.noDisposeOnSet = !!noDisposeOnSet
-    this.noUpdateTTL = !!noUpdateTTL
-    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
-    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
-    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
-    this.ignoreFetchAbort = !!ignoreFetchAbort
-
-    // NB: maxEntrySize is set to maxSize if it's set
-    if (this.maxEntrySize !== 0) {
-      if (this.maxSize !== 0) {
-        if (!isPosInt(this.maxSize)) {
-          throw new TypeError(
-            'maxSize must be a positive integer if specified'
-          )
-        }
-      }
-      if (!isPosInt(this.maxEntrySize)) {
-        throw new TypeError(
-          'maxEntrySize must be a positive integer if specified'
-        )
-      }
-      this.initializeSizeTracking()
-    }
-
-    this.allowStale = !!allowStale || !!stale
-    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
-    this.updateAgeOnGet = !!updateAgeOnGet
-    this.updateAgeOnHas = !!updateAgeOnHas
-    this.ttlResolution =
-      isPosInt(ttlResolution) || ttlResolution === 0
-        ? ttlResolution
-        : 1
-    this.ttlAutopurge = !!ttlAutopurge
-    this.ttl = ttl || maxAge || 0
-    if (this.ttl) {
-      if (!isPosInt(this.ttl)) {
-        throw new TypeError(
-          'ttl must be a positive integer if specified'
-        )
-      }
-      this.initializeTTLTracking()
-    }
-
-    // do not allow completely unbounded caches
-    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
-      throw new TypeError(
-        'At least one of max, maxSize, or ttl is required'
-      )
-    }
-    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
-      const code = 'LRU_CACHE_UNBOUNDED'
-      if (shouldWarn(code)) {
-        warned.add(code)
-        const msg =
-          'TTL caching without ttlAutopurge, max, or maxSize can ' +
-          'result in unbounded memory consumption.'
-        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
-      }
-    }
-
-    if (stale) {
-      deprecatedOption('stale', 'allowStale')
-    }
-    if (maxAge) {
-      deprecatedOption('maxAge', 'ttl')
-    }
-    if (length) {
-      deprecatedOption('length', 'sizeCalculation')
-    }
-  }
-
-  getRemainingTTL(key) {
-    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
-  }
-
-  initializeTTLTracking() {
-    this.ttls = new ZeroArray(this.max)
-    this.starts = new ZeroArray(this.max)
-
-    this.setItemTTL = (index, ttl, start = perf.now()) => {
-      this.starts[index] = ttl !== 0 ? start : 0
-      this.ttls[index] = ttl
-      if (ttl !== 0 && this.ttlAutopurge) {
-        const t = setTimeout(() => {
-          if (this.isStale(index)) {
-            this.delete(this.keyList[index])
-          }
-        }, ttl + 1)
-        /* istanbul ignore else - unref() not supported on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-    }
-
-    this.updateItemAge = index => {
-      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
-    }
-
-    this.statusTTL = (status, index) => {
-      if (status) {
-        status.ttl = this.ttls[index]
-        status.start = this.starts[index]
-        status.now = cachedNow || getNow()
-        status.remainingTTL = status.now + status.ttl - status.start
-      }
-    }
-
-    // debounce calls to perf.now() to 1s so we're not hitting
-    // that costly call repeatedly.
-    let cachedNow = 0
-    const getNow = () => {
-      const n = perf.now()
-      if (this.ttlResolution > 0) {
-        cachedNow = n
-        const t = setTimeout(
-          () => (cachedNow = 0),
-          this.ttlResolution
-        )
-        /* istanbul ignore else - not available on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-      return n
-    }
-
-    this.getRemainingTTL = key => {
-      const index = this.keyMap.get(key)
-      if (index === undefined) {
-        return 0
-      }
-      return this.ttls[index] === 0 || this.starts[index] === 0
-        ? Infinity
-        : this.starts[index] +
-            this.ttls[index] -
-            (cachedNow || getNow())
-    }
-
-    this.isStale = index => {
-      return (
-        this.ttls[index] !== 0 &&
-        this.starts[index] !== 0 &&
-        (cachedNow || getNow()) - this.starts[index] >
-          this.ttls[index]
-      )
-    }
-  }
-  updateItemAge(_index) {}
-  statusTTL(_status, _index) {}
-  setItemTTL(_index, _ttl, _start) {}
-  isStale(_index) {
-    return false
-  }
-
-  initializeSizeTracking() {
-    this.calculatedSize = 0
-    this.sizes = new ZeroArray(this.max)
-    this.removeItemSize = index => {
-      this.calculatedSize -= this.sizes[index]
-      this.sizes[index] = 0
-    }
-    this.requireSize = (k, v, size, sizeCalculation) => {
-      // provisionally accept background fetches.
-      // actual value size will be checked when they return.
-      if (this.isBackgroundFetch(v)) {
-        return 0
-      }
-      if (!isPosInt(size)) {
-        if (sizeCalculation) {
-          if (typeof sizeCalculation !== 'function') {
-            throw new TypeError('sizeCalculation must be a function')
-          }
-          size = sizeCalculation(v, k)
-          if (!isPosInt(size)) {
-            throw new TypeError(
-              'sizeCalculation return invalid (expect positive integer)'
-            )
-          }
-        } else {
-          throw new TypeError(
-            'invalid size value (must be positive integer). ' +
-              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
-              'must be set.'
-          )
-        }
-      }
-      return size
-    }
-    this.addItemSize = (index, size, status) => {
-      this.sizes[index] = size
-      if (this.maxSize) {
-        const maxSize = this.maxSize - this.sizes[index]
-        while (this.calculatedSize > maxSize) {
-          this.evict(true)
-        }
-      }
-      this.calculatedSize += this.sizes[index]
-      if (status) {
-        status.entrySize = size
-        status.totalCalculatedSize = this.calculatedSize
-      }
-    }
-  }
-  removeItemSize(_index) {}
-  addItemSize(_index, _size) {}
-  requireSize(_k, _v, size, sizeCalculation) {
-    if (size || sizeCalculation) {
-      throw new TypeError(
-        'cannot set size without setting maxSize or maxEntrySize on cache'
-      )
-    }
-  }
-
-  *indexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.tail; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.head) {
-          break
-        } else {
-          i = this.prev[i]
-        }
-      }
-    }
-  }
-
-  *rindexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.head; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.tail) {
-          break
-        } else {
-          i = this.next[i]
-        }
-      }
-    }
-  }
-
-  isValidIndex(index) {
-    return (
-      index !== undefined &&
-      this.keyMap.get(this.keyList[index]) === index
-    )
-  }
-
-  *entries() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-  *rentries() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-
-  *keys() {
-    for (const i of this.indexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-  *rkeys() {
-    for (const i of this.rindexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-
-  *values() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-  *rvalues() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-
-  [Symbol.iterator]() {
-    return this.entries()
-  }
-
-  find(fn, getOptions) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      if (fn(value, this.keyList[i], this)) {
-        return this.get(this.keyList[i], getOptions)
-      }
-    }
-  }
-
-  forEach(fn, thisp = this) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  rforEach(fn, thisp = this) {
-    for (const i of this.rindexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  get prune() {
-    deprecatedMethod('prune', 'purgeStale')
-    return this.purgeStale
-  }
-
-  purgeStale() {
-    let deleted = false
-    for (const i of this.rindexes({ allowStale: true })) {
-      if (this.isStale(i)) {
-        this.delete(this.keyList[i])
-        deleted = true
-      }
-    }
-    return deleted
-  }
-
-  dump() {
-    const arr = []
-    for (const i of this.indexes({ allowStale: true })) {
-      const key = this.keyList[i]
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      const entry = { value }
-      if (this.ttls) {
-        entry.ttl = this.ttls[i]
-        // always dump the start relative to a portable timestamp
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = perf.now() - this.starts[i]
-        entry.start = Math.floor(Date.now() - age)
-      }
-      if (this.sizes) {
-        entry.size = this.sizes[i]
-      }
-      arr.unshift([key, entry])
-    }
-    return arr
-  }
-
-  load(arr) {
-    this.clear()
-    for (const [key, entry] of arr) {
-      if (entry.start) {
-        // entry.start is a portable timestamp, but we may be using
-        // node's performance.now(), so calculate the offset.
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = Date.now() - entry.start
-        entry.start = perf.now() - age
-      }
-      this.set(key, entry.value, entry)
-    }
-  }
-
-  dispose(_v, _k, _reason) {}
-
-  set(
-    k,
-    v,
-    {
-      ttl = this.ttl,
-      start,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      status,
-    } = {}
-  ) {
-    size = this.requireSize(k, v, size, sizeCalculation)
-    // if the item doesn't fit, don't do anything
-    // NB: maxEntrySize set to maxSize by default
-    if (this.maxEntrySize && size > this.maxEntrySize) {
-      if (status) {
-        status.set = 'miss'
-        status.maxEntrySizeExceeded = true
-      }
-      // have to delete, in case a background fetch is there already.
-      // in non-async cases, this is a no-op
-      this.delete(k)
-      return this
-    }
-    let index = this.size === 0 ? undefined : this.keyMap.get(k)
-    if (index === undefined) {
-      // addition
-      index = this.newIndex()
-      this.keyList[index] = k
-      this.valList[index] = v
-      this.keyMap.set(k, index)
-      this.next[this.tail] = index
-      this.prev[index] = this.tail
-      this.tail = index
-      this.size++
-      this.addItemSize(index, size, status)
-      if (status) {
-        status.set = 'add'
-      }
-      noUpdateTTL = false
-    } else {
-      // update
-      this.moveToTail(index)
-      const oldVal = this.valList[index]
-      if (v !== oldVal) {
-        if (this.isBackgroundFetch(oldVal)) {
-          oldVal.__abortController.abort(new Error('replaced'))
-        } else {
-          if (!noDisposeOnSet) {
-            this.dispose(oldVal, k, 'set')
-            if (this.disposeAfter) {
-              this.disposed.push([oldVal, k, 'set'])
-            }
-          }
-        }
-        this.removeItemSize(index)
-        this.valList[index] = v
-        this.addItemSize(index, size, status)
-        if (status) {
-          status.set = 'replace'
-          const oldValue =
-            oldVal && this.isBackgroundFetch(oldVal)
-              ? oldVal.__staleWhileFetching
-              : oldVal
-          if (oldValue !== undefined) status.oldValue = oldValue
-        }
-      } else if (status) {
-        status.set = 'update'
-      }
-    }
-    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
-      this.initializeTTLTracking()
-    }
-    if (!noUpdateTTL) {
-      this.setItemTTL(index, ttl, start)
-    }
-    this.statusTTL(status, index)
-    if (this.disposeAfter) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return this
-  }
-
-  newIndex() {
-    if (this.size === 0) {
-      return this.tail
-    }
-    if (this.size === this.max && this.max !== 0) {
-      return this.evict(false)
-    }
-    if (this.free.length !== 0) {
-      return this.free.pop()
-    }
-    // initial fill, just keep writing down the list
-    return this.initialFill++
-  }
-
-  pop() {
-    if (this.size) {
-      const val = this.valList[this.head]
-      this.evict(true)
-      return val
-    }
-  }
-
-  evict(free) {
-    const head = this.head
-    const k = this.keyList[head]
-    const v = this.valList[head]
-    if (this.isBackgroundFetch(v)) {
-      v.__abortController.abort(new Error('evicted'))
-    } else {
-      this.dispose(v, k, 'evict')
-      if (this.disposeAfter) {
-        this.disposed.push([v, k, 'evict'])
-      }
-    }
-    this.removeItemSize(head)
-    // if we aren't about to use the index, then null these out
-    if (free) {
-      this.keyList[head] = null
-      this.valList[head] = null
-      this.free.push(head)
-    }
-    this.head = this.next[head]
-    this.keyMap.delete(k)
-    this.size--
-    return head
-  }
-
-  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      if (!this.isStale(index)) {
-        if (updateAgeOnHas) {
-          this.updateItemAge(index)
-        }
-        if (status) status.has = 'hit'
-        this.statusTTL(status, index)
-        return true
-      } else if (status) {
-        status.has = 'stale'
-        this.statusTTL(status, index)
-      }
-    } else if (status) {
-      status.has = 'miss'
-    }
-    return false
-  }
-
-  // like get(), but without any LRU updating or TTL expiration
-  peek(k, { allowStale = this.allowStale } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined && (allowStale || !this.isStale(index))) {
-      const v = this.valList[index]
-      // either stale and allowed, or forcing a refresh of non-stale value
-      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
-    }
-  }
-
-  backgroundFetch(k, index, options, context) {
-    const v = index === undefined ? undefined : this.valList[index]
-    if (this.isBackgroundFetch(v)) {
-      return v
-    }
-    const ac = new AC()
-    if (options.signal) {
-      options.signal.addEventListener('abort', () =>
-        ac.abort(options.signal.reason)
-      )
-    }
-    const fetchOpts = {
-      signal: ac.signal,
-      options,
-      context,
-    }
-    const cb = (v, updateCache = false) => {
-      const { aborted } = ac.signal
-      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
-      if (options.status) {
-        if (aborted && !updateCache) {
-          options.status.fetchAborted = true
-          options.status.fetchError = ac.signal.reason
-          if (ignoreAbort) options.status.fetchAbortIgnored = true
-        } else {
-          options.status.fetchResolved = true
-        }
-      }
-      if (aborted && !ignoreAbort && !updateCache) {
-        return fetchFail(ac.signal.reason)
-      }
-      // either we didn't abort, and are still here, or we did, and ignored
-      if (this.valList[index] === p) {
-        if (v === undefined) {
-          if (p.__staleWhileFetching) {
-            this.valList[index] = p.__staleWhileFetching
-          } else {
-            this.delete(k)
-          }
-        } else {
-          if (options.status) options.status.fetchUpdated = true
-          this.set(k, v, fetchOpts.options)
-        }
-      }
-      return v
-    }
-    const eb = er => {
-      if (options.status) {
-        options.status.fetchRejected = true
-        options.status.fetchError = er
-      }
-      return fetchFail(er)
-    }
-    const fetchFail = er => {
-      const { aborted } = ac.signal
-      const allowStaleAborted =
-        aborted && options.allowStaleOnFetchAbort
-      const allowStale =
-        allowStaleAborted || options.allowStaleOnFetchRejection
-      const noDelete = allowStale || options.noDeleteOnFetchRejection
-      if (this.valList[index] === p) {
-        // if we allow stale on fetch rejections, then we need to ensure that
-        // the stale value is not removed from the cache when the fetch fails.
-        const del = !noDelete || p.__staleWhileFetching === undefined
-        if (del) {
-          this.delete(k)
-        } else if (!allowStaleAborted) {
-          // still replace the *promise* with the stale value,
-          // since we are done with the promise at this point.
-          // leave it untouched if we're still waiting for an
-          // aborted background fetch that hasn't yet returned.
-          this.valList[index] = p.__staleWhileFetching
-        }
-      }
-      if (allowStale) {
-        if (options.status && p.__staleWhileFetching !== undefined) {
-          options.status.returnedStale = true
-        }
-        return p.__staleWhileFetching
-      } else if (p.__returned === p) {
-        throw er
-      }
-    }
-    const pcall = (res, rej) => {
-      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
-      // ignored, we go until we finish, regardless.
-      // defer check until we are actually aborting,
-      // so fetchMethod can override.
-      ac.signal.addEventListener('abort', () => {
-        if (
-          !options.ignoreFetchAbort ||
-          options.allowStaleOnFetchAbort
-        ) {
-          res()
-          // when it eventually resolves, update the cache.
-          if (options.allowStaleOnFetchAbort) {
-            res = v => cb(v, true)
-          }
-        }
-      })
-    }
-    if (options.status) options.status.fetchDispatched = true
-    const p = new Promise(pcall).then(cb, eb)
-    p.__abortController = ac
-    p.__staleWhileFetching = v
-    p.__returned = null
-    if (index === undefined) {
-      // internal, don't expose status.
-      this.set(k, p, { ...fetchOpts.options, status: undefined })
-      index = this.keyMap.get(k)
-    } else {
-      this.valList[index] = p
-    }
-    return p
-  }
-
-  isBackgroundFetch(p) {
-    return (
-      p &&
-      typeof p === 'object' &&
-      typeof p.then === 'function' &&
-      Object.prototype.hasOwnProperty.call(
-        p,
-        '__staleWhileFetching'
-      ) &&
-      Object.prototype.hasOwnProperty.call(p, '__returned') &&
-      (p.__returned === p || p.__returned === null)
-    )
-  }
-
-  // this takes the union of get() and set() opts, because it does both
-  async fetch(
-    k,
-    {
-      // get options
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      // set options
-      ttl = this.ttl,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      // fetch exclusive options
-      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
-      ignoreFetchAbort = this.ignoreFetchAbort,
-      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
-      fetchContext = this.fetchContext,
-      forceRefresh = false,
-      status,
-      signal,
-    } = {}
-  ) {
-    if (!this.fetchMethod) {
-      if (status) status.fetch = 'get'
-      return this.get(k, {
-        allowStale,
-        updateAgeOnGet,
-        noDeleteOnStaleGet,
-        status,
-      })
-    }
-
-    const options = {
-      allowStale,
-      updateAgeOnGet,
-      noDeleteOnStaleGet,
-      ttl,
-      noDisposeOnSet,
-      size,
-      sizeCalculation,
-      noUpdateTTL,
-      noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-      status,
-      signal,
-    }
-
-    let index = this.keyMap.get(k)
-    if (index === undefined) {
-      if (status) status.fetch = 'miss'
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      return (p.__returned = p)
-    } else {
-      // in cache, maybe already fetching
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        const stale =
-          allowStale && v.__staleWhileFetching !== undefined
-        if (status) {
-          status.fetch = 'inflight'
-          if (stale) status.returnedStale = true
-        }
-        return stale ? v.__staleWhileFetching : (v.__returned = v)
-      }
-
-      // if we force a refresh, that means do NOT serve the cached value,
-      // unless we are already in the process of refreshing the cache.
-      const isStale = this.isStale(index)
-      if (!forceRefresh && !isStale) {
-        if (status) status.fetch = 'hit'
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        this.statusTTL(status, index)
-        return v
-      }
-
-      // ok, it is stale or a forced refresh, and not already fetching.
-      // refresh the cache.
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      const hasStale = p.__staleWhileFetching !== undefined
-      const staleVal = hasStale && allowStale
-      if (status) {
-        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
-        if (staleVal && isStale) status.returnedStale = true
-      }
-      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
-    }
-  }
-
-  get(
-    k,
-    {
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      status,
-    } = {}
-  ) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      const value = this.valList[index]
-      const fetching = this.isBackgroundFetch(value)
-      this.statusTTL(status, index)
-      if (this.isStale(index)) {
-        if (status) status.get = 'stale'
-        // delete only if not an in-flight background fetch
-        if (!fetching) {
-          if (!noDeleteOnStaleGet) {
-            this.delete(k)
-          }
-          if (status) status.returnedStale = allowStale
-          return allowStale ? value : undefined
-        } else {
-          if (status) {
-            status.returnedStale =
-              allowStale && value.__staleWhileFetching !== undefined
-          }
-          return allowStale ? value.__staleWhileFetching : undefined
-        }
-      } else {
-        if (status) status.get = 'hit'
-        // if we're currently fetching it, we don't actually have it yet
-        // it's not stale, which means this isn't a staleWhileRefetching.
-        // If it's not stale, and fetching, AND has a __staleWhileFetching
-        // value, then that means the user fetched with {forceRefresh:true},
-        // so it's safe to return that value.
-        if (fetching) {
-          return value.__staleWhileFetching
-        }
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        return value
-      }
-    } else if (status) {
-      status.get = 'miss'
-    }
-  }
-
-  connect(p, n) {
-    this.prev[n] = p
-    this.next[p] = n
-  }
-
-  moveToTail(index) {
-    // if tail already, nothing to do
-    // if head, move head to next[index]
-    // else
-    //   move next[prev[index]] to next[index] (head has no prev)
-    //   move prev[next[index]] to prev[index]
-    // prev[index] = tail
-    // next[tail] = index
-    // tail = index
-    if (index !== this.tail) {
-      if (index === this.head) {
-        this.head = this.next[index]
-      } else {
-        this.connect(this.prev[index], this.next[index])
-      }
-      this.connect(this.tail, index)
-      this.tail = index
-    }
-  }
-
-  get del() {
-    deprecatedMethod('del', 'delete')
-    return this.delete
-  }
-
-  delete(k) {
-    let deleted = false
-    if (this.size !== 0) {
-      const index = this.keyMap.get(k)
-      if (index !== undefined) {
-        deleted = true
-        if (this.size === 1) {
-          this.clear()
-        } else {
-          this.removeItemSize(index)
-          const v = this.valList[index]
-          if (this.isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('deleted'))
-          } else {
-            this.dispose(v, k, 'delete')
-            if (this.disposeAfter) {
-              this.disposed.push([v, k, 'delete'])
-            }
-          }
-          this.keyMap.delete(k)
-          this.keyList[index] = null
-          this.valList[index] = null
-          if (index === this.tail) {
-            this.tail = this.prev[index]
-          } else if (index === this.head) {
-            this.head = this.next[index]
-          } else {
-            this.next[this.prev[index]] = this.next[index]
-            this.prev[this.next[index]] = this.prev[index]
-          }
-          this.size--
-          this.free.push(index)
-        }
-      }
-    }
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return deleted
-  }
-
-  clear() {
-    for (const index of this.rindexes({ allowStale: true })) {
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        v.__abortController.abort(new Error('deleted'))
-      } else {
-        const k = this.keyList[index]
-        this.dispose(v, k, 'delete')
-        if (this.disposeAfter) {
-          this.disposed.push([v, k, 'delete'])
-        }
-      }
-    }
-
-    this.keyMap.clear()
-    this.valList.fill(null)
-    this.keyList.fill(null)
-    if (this.ttls) {
-      this.ttls.fill(0)
-      this.starts.fill(0)
-    }
-    if (this.sizes) {
-      this.sizes.fill(0)
-    }
-    this.head = 0
-    this.tail = 0
-    this.initialFill = 1
-    this.free.length = 0
-    this.calculatedSize = 0
-    this.size = 0
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-  }
-
-  get reset() {
-    deprecatedMethod('reset', 'clear')
-    return this.clear
-  }
-
-  get length() {
-    deprecatedProperty('length', 'size')
-    return this.size
-  }
-
-  static get AbortController() {
-    return AC
-  }
-  static get AbortSignal() {
-    return AS
-  }
-}
-
-export default LRUCache
diff --git a/node_modules/normalize-package-data/node_modules/lru-cache/package.json b/node_modules/normalize-package-data/node_modules/lru-cache/package.json
deleted file mode 100644
index 9684991727e7a..0000000000000
--- a/node_modules/normalize-package-data/node_modules/lru-cache/package.json
+++ /dev/null
@@ -1,96 +0,0 @@
-{
-  "name": "lru-cache",
-  "description": "A cache object that deletes the least-recently-used items.",
-  "version": "7.18.3",
-  "author": "Isaac Z. Schlueter ",
-  "keywords": [
-    "mru",
-    "lru",
-    "cache"
-  ],
-  "sideEffects": false,
-  "scripts": {
-    "build": "npm run prepare",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "prepare": "node ./scripts/transpile-to-esm.js",
-    "size": "size-limit",
-    "test": "tap",
-    "snap": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write .",
-    "typedoc": "typedoc ./index.d.ts"
-  },
-  "type": "commonjs",
-  "main": "./index.js",
-  "module": "./index.mjs",
-  "types": "./index.d.ts",
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./index.d.ts",
-        "default": "./index.mjs"
-      },
-      "require": {
-        "types": "./index.d.ts",
-        "default": "./index.js"
-      }
-    },
-    "./package.json": "./package.json"
-  },
-  "repository": "git://github.com/isaacs/node-lru-cache.git",
-  "devDependencies": {
-    "@size-limit/preset-small-lib": "^7.0.8",
-    "@types/node": "^17.0.31",
-    "@types/tap": "^15.0.6",
-    "benchmark": "^2.1.4",
-    "c8": "^7.11.2",
-    "clock-mock": "^1.0.6",
-    "eslint-config-prettier": "^8.5.0",
-    "prettier": "^2.6.2",
-    "size-limit": "^7.0.8",
-    "tap": "^16.3.4",
-    "ts-node": "^10.7.0",
-    "tslib": "^2.4.0",
-    "typedoc": "^0.23.24",
-    "typescript": "^4.6.4"
-  },
-  "license": "ISC",
-  "files": [
-    "index.js",
-    "index.mjs",
-    "index.d.ts"
-  ],
-  "engines": {
-    "node": ">=12"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tap": {
-    "nyc-arg": [
-      "--include=index.js"
-    ],
-    "node-arg": [
-      "--expose-gc",
-      "--require",
-      "ts-node/register"
-    ],
-    "ts": false
-  },
-  "size-limit": [
-    {
-      "path": "./index.js"
-    }
-  ]
-}
diff --git a/node_modules/normalize-package-data/package.json b/node_modules/normalize-package-data/package.json
deleted file mode 100644
index ec2773bfbe6bf..0000000000000
--- a/node_modules/normalize-package-data/package.json
+++ /dev/null
@@ -1,55 +0,0 @@
-{
-  "name": "normalize-package-data",
-  "version": "5.0.0",
-  "author": "GitHub Inc.",
-  "description": "Normalizes data that can be found in package.json files.",
-  "license": "BSD-2-Clause",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/normalize-package-data.git"
-  },
-  "main": "lib/normalize.js",
-  "scripts": {
-    "test": "tap",
-    "npmclilint": "npmcli-lint",
-    "lint": "eslint \"**/*.js\"",
-    "lintfix": "npm run lint -- --fix",
-    "posttest": "npm run lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "snap": "tap",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "dependencies": {
-    "hosted-git-info": "^6.0.0",
-    "is-core-module": "^2.8.1",
-    "semver": "^7.3.5",
-    "validate-npm-package-license": "^3.0.4"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^3.0.1",
-    "@npmcli/template-oss": "4.5.1",
-    "tap": "^16.0.1"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.5.1"
-  },
-  "tap": {
-    "branches": 86,
-    "functions": 92,
-    "lines": 86,
-    "statements": 86,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  }
-}
diff --git a/node_modules/read-package-json/LICENSE b/node_modules/read-package-json/LICENSE
deleted file mode 100644
index 052085c436514..0000000000000
--- a/node_modules/read-package-json/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/read-package-json/lib/read-json.js b/node_modules/read-package-json/lib/read-json.js
deleted file mode 100644
index d35f09ebd208f..0000000000000
--- a/node_modules/read-package-json/lib/read-json.js
+++ /dev/null
@@ -1,589 +0,0 @@
-var fs = require('fs')
-
-var path = require('path')
-
-var { glob } = require('glob')
-var normalizeData = require('normalize-package-data')
-var safeJSON = require('json-parse-even-better-errors')
-var util = require('util')
-var normalizePackageBin = require('npm-normalize-package-bin')
-
-module.exports = readJson
-
-// put more stuff on here to customize.
-readJson.extraSet = [
-  bundleDependencies,
-  gypfile,
-  serverjs,
-  scriptpath,
-  authors,
-  readme,
-  mans,
-  bins,
-  githead,
-  fillTypes,
-]
-
-var typoWarned = {}
-var cache = {}
-
-function readJson (file, log_, strict_, cb_) {
-  var log, strict, cb
-  for (var i = 1; i < arguments.length - 1; i++) {
-    if (typeof arguments[i] === 'boolean') {
-      strict = arguments[i]
-    } else if (typeof arguments[i] === 'function') {
-      log = arguments[i]
-    }
-  }
-
-  if (!log) {
-    log = function () {}
-  }
-  cb = arguments[arguments.length - 1]
-
-  readJson_(file, log, strict, cb)
-}
-
-function readJson_ (file, log, strict, cb) {
-  fs.readFile(file, 'utf8', function (er, d) {
-    parseJson(file, er, d, log, strict, cb)
-  })
-}
-
-function stripBOM (content) {
-  // Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
-  // because the buffer-to-string conversion in `fs.readFileSync()`
-  // translates it to FEFF, the UTF-16 BOM.
-  if (content.charCodeAt(0) === 0xFEFF) {
-    content = content.slice(1)
-  }
-  return content
-}
-
-function jsonClone (obj) {
-  if (obj == null) {
-    return obj
-  } else if (Array.isArray(obj)) {
-    var newarr = new Array(obj.length)
-    for (var ii in obj) {
-      newarr[ii] = jsonClone(obj[ii])
-    }
-    return newarr
-  } else if (typeof obj === 'object') {
-    var newobj = {}
-    for (var kk in obj) {
-      newobj[kk] = jsonClone(obj[kk])
-    }
-    return newobj
-  } else {
-    return obj
-  }
-}
-
-function parseJson (file, er, d, log, strict, cb) {
-  if (er && er.code === 'ENOENT') {
-    return fs.stat(path.dirname(file), function (err, stat) {
-      if (!err && stat && !stat.isDirectory()) {
-        // ENOTDIR isn't used on Windows, but npm expects it.
-        er = Object.create(er)
-        er.code = 'ENOTDIR'
-        return cb(er)
-      } else {
-        return indexjs(file, er, log, strict, cb)
-      }
-    })
-  }
-  if (er) {
-    return cb(er)
-  }
-
-  if (cache[d]) {
-    return cb(null, jsonClone(cache[d]))
-  }
-
-  var data
-
-  try {
-    data = safeJSON(stripBOM(d))
-    for (var key in data) {
-      if (/^_/.test(key)) {
-        delete data[key]
-      }
-    }
-  } catch (jsonErr) {
-    data = parseIndex(d)
-    if (!data) {
-      return cb(parseError(jsonErr, file))
-    }
-  }
-  extrasCached(file, d, data, log, strict, cb)
-}
-
-function extrasCached (file, d, data, log, strict, cb) {
-  extras(file, data, log, strict, function (err, extrasData) {
-    if (!err) {
-      cache[d] = jsonClone(extrasData)
-    }
-    cb(err, extrasData)
-  })
-}
-
-function indexjs (file, er, log, strict, cb) {
-  if (path.basename(file) === 'index.js') {
-    return cb(er)
-  }
-
-  var index = path.resolve(path.dirname(file), 'index.js')
-  fs.readFile(index, 'utf8', function (er2, d) {
-    if (er2) {
-      return cb(er)
-    }
-
-    if (cache[d]) {
-      return cb(null, cache[d])
-    }
-
-    var data = parseIndex(d)
-    if (!data) {
-      return cb(er)
-    }
-
-    extrasCached(file, d, data, log, strict, cb)
-  })
-}
-
-readJson.extras = extras
-function extras (file, data, log_, strict_, cb_) {
-  var log, strict, cb
-  for (var i = 2; i < arguments.length - 1; i++) {
-    if (typeof arguments[i] === 'boolean') {
-      strict = arguments[i]
-    } else if (typeof arguments[i] === 'function') {
-      log = arguments[i]
-    }
-  }
-
-  if (!log) {
-    log = function () {}
-  }
-  cb = arguments[i]
-
-  var set = readJson.extraSet
-  var n = set.length
-  var errState = null
-  set.forEach(function (fn) {
-    fn(file, data, then)
-  })
-
-  function then (er) {
-    if (errState) {
-      return
-    }
-    if (er) {
-      return cb(errState = er)
-    }
-    if (--n > 0) {
-      return
-    }
-    final(file, data, log, strict, cb)
-  }
-}
-
-function scriptpath (file, data, cb) {
-  if (!data.scripts) {
-    return cb(null, data)
-  }
-  var k = Object.keys(data.scripts)
-  k.forEach(scriptpath_, data.scripts)
-  cb(null, data)
-}
-
-function scriptpath_ (key) {
-  var s = this[key]
-  // This is never allowed, and only causes problems
-  if (typeof s !== 'string') {
-    return delete this[key]
-  }
-
-  var spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
-  if (s.match(spre)) {
-    this[key] = this[key].replace(spre, '')
-  }
-}
-
-function gypfile (file, data, cb) {
-  var dir = path.dirname(file)
-  var s = data.scripts || {}
-  if (s.install || s.preinstall) {
-    return cb(null, data)
-  }
-
-  if (data.gypfile === false) {
-    return cb(null, data)
-  }
-  glob('*.gyp', { cwd: dir })
-    .then(files => gypfile_(file, data, files, cb))
-    .catch(er => cb(er))
-}
-
-function gypfile_ (file, data, files, cb) {
-  if (!files.length) {
-    return cb(null, data)
-  }
-  var s = data.scripts || {}
-  s.install = 'node-gyp rebuild'
-  data.scripts = s
-  data.gypfile = true
-  return cb(null, data)
-}
-
-function serverjs (file, data, cb) {
-  var dir = path.dirname(file)
-  var s = data.scripts || {}
-  if (s.start) {
-    return cb(null, data)
-  }
-  fs.access(path.join(dir, 'server.js'), (err) => {
-    if (!err) {
-      s.start = 'node server.js'
-      data.scripts = s
-    }
-    return cb(null, data)
-  })
-}
-
-function authors (file, data, cb) {
-  if (data.contributors) {
-    return cb(null, data)
-  }
-  var af = path.resolve(path.dirname(file), 'AUTHORS')
-  fs.readFile(af, 'utf8', function (er, ad) {
-    // ignore error.  just checking it.
-    if (er) {
-      return cb(null, data)
-    }
-    authors_(file, data, ad, cb)
-  })
-}
-
-function authors_ (file, data, ad, cb) {
-  ad = ad.split(/\r?\n/g).map(function (line) {
-    return line.replace(/^\s*#.*$/, '').trim()
-  }).filter(function (line) {
-    return line
-  })
-  data.contributors = ad
-  return cb(null, data)
-}
-
-function readme (file, data, cb) {
-  if (data.readme) {
-    return cb(null, data)
-  }
-  var dir = path.dirname(file)
-  var globOpts = { cwd: dir, nocase: true, mark: true }
-  glob('{README,README.*}', globOpts)
-    .then(files => {
-      // don't accept directories.
-      files = files.filter(function (filtered) {
-        return !filtered.match(/\/$/)
-      })
-      if (!files.length) {
-        return cb()
-      }
-      var fn = preferMarkdownReadme(files)
-      var rm = path.resolve(dir, fn)
-      return readme_(file, data, rm, cb)
-    })
-    .catch(er => cb(er))
-}
-
-function preferMarkdownReadme (files) {
-  var fallback = 0
-  var re = /\.m?a?r?k?d?o?w?n?$/i
-  for (var i = 0; i < files.length; i++) {
-    if (files[i].match(re)) {
-      return files[i]
-    } else if (files[i].match(/README$/)) {
-      fallback = i
-    }
-  }
-  // prefer README.md, followed by README; otherwise, return
-  // the first filename (which could be README)
-  return files[fallback]
-}
-
-function readme_ (file, data, rm, cb) {
-  var rmfn = path.basename(rm)
-  fs.readFile(rm, 'utf8', function (er, rmData) {
-    // maybe not readable, or something.
-    if (er) {
-      return cb()
-    }
-    data.readme = rmData
-    data.readmeFilename = rmfn
-    return cb(er, data)
-  })
-}
-
-function mans (file, data, cb) {
-  let cwd = data.directories && data.directories.man
-  if (data.man || !cwd) {
-    return cb(null, data)
-  }
-  const dirname = path.dirname(file)
-  cwd = path.resolve(path.dirname(file), cwd)
-  glob('**/*.[0-9]', { cwd })
-    .then(mansGlob => {
-      data.man = mansGlob.map(man =>
-        path.relative(dirname, path.join(cwd, man)).split(path.sep).join('/')
-      )
-      return cb(null, data)
-    })
-    .catch(er => cb(er))
-}
-
-function bins (file, data, cb) {
-  data = normalizePackageBin(data)
-
-  var m = data.directories && data.directories.bin
-  if (data.bin || !m) {
-    return cb(null, data)
-  }
-
-  m = path.resolve(path.dirname(file), path.join('.', path.join('/', m)))
-  glob('**', { cwd: m })
-    .then(binsGlob => bins_(file, data, binsGlob, cb))
-    .catch(er => cb(er))
-}
-
-function bins_ (file, data, binsGlob, cb) {
-  var m = (data.directories && data.directories.bin) || '.'
-  data.bin = binsGlob.reduce(function (acc, mf) {
-    if (mf && mf.charAt(0) !== '.') {
-      var f = path.basename(mf)
-      acc[f] = path.join(m, mf)
-    }
-    return acc
-  }, {})
-  return cb(null, normalizePackageBin(data))
-}
-
-function bundleDependencies (file, data, cb) {
-  var bd = 'bundleDependencies'
-  var bdd = 'bundledDependencies'
-  // normalize key name
-  if (data[bdd] !== undefined) {
-    if (data[bd] === undefined) {
-      data[bd] = data[bdd]
-    }
-    delete data[bdd]
-  }
-  if (data[bd] === false) {
-    delete data[bd]
-  } else if (data[bd] === true) {
-    data[bd] = Object.keys(data.dependencies || {})
-  } else if (data[bd] !== undefined && !Array.isArray(data[bd])) {
-    delete data[bd]
-  }
-  return cb(null, data)
-}
-
-function githead (file, data, cb) {
-  if (data.gitHead) {
-    return cb(null, data)
-  }
-  var dir = path.dirname(file)
-  var head = path.resolve(dir, '.git/HEAD')
-  fs.readFile(head, 'utf8', function (er, headData) {
-    if (er) {
-      var parent = path.dirname(dir)
-      if (parent === dir) {
-        return cb(null, data)
-      }
-      return githead(dir, data, cb)
-    }
-    githead_(data, dir, headData, cb)
-  })
-}
-
-function githead_ (data, dir, head, cb) {
-  if (!head.match(/^ref: /)) {
-    data.gitHead = head.trim()
-    return cb(null, data)
-  }
-  var headRef = head.replace(/^ref: /, '').trim()
-  var headFile = path.resolve(dir, '.git', headRef)
-  fs.readFile(headFile, 'utf8', function (er, headData) {
-    if (er || !headData) {
-      var packFile = path.resolve(dir, '.git/packed-refs')
-      return fs.readFile(packFile, 'utf8', function (readFileErr, refs) {
-        if (readFileErr || !refs) {
-          return cb(null, data)
-        }
-        refs = refs.split('\n')
-        for (var i = 0; i < refs.length; i++) {
-          var match = refs[i].match(/^([0-9a-f]{40}) (.+)$/)
-          if (match && match[2].trim() === headRef) {
-            data.gitHead = match[1]
-            break
-          }
-        }
-        return cb(null, data)
-      })
-    }
-    headData = headData.replace(/^ref: /, '').trim()
-    data.gitHead = headData
-    return cb(null, data)
-  })
-}
-
-/**
- * Warn if the bin references don't point to anything.  This might be better in
- * normalize-package-data if it had access to the file path.
- */
-function checkBinReferences_ (file, data, warn, cb) {
-  if (!(data.bin instanceof Object)) {
-    return cb()
-  }
-
-  var keys = Object.keys(data.bin)
-  var keysLeft = keys.length
-  if (!keysLeft) {
-    return cb()
-  }
-
-  function handleExists (relName, result) {
-    keysLeft--
-    if (!result) {
-      warn('No bin file found at ' + relName)
-    }
-    if (!keysLeft) {
-      cb()
-    }
-  }
-
-  keys.forEach(function (key) {
-    var dirName = path.dirname(file)
-    var relName = data.bin[key]
-    /* istanbul ignore if - impossible, bins have been normalized */
-    if (typeof relName !== 'string') {
-      var msg = 'Bin filename for ' + key +
-        ' is not a string: ' + util.inspect(relName)
-      warn(msg)
-      delete data.bin[key]
-      handleExists(relName, true)
-      return
-    }
-    var binPath = path.resolve(dirName, relName)
-    fs.stat(binPath, (err) => handleExists(relName, !err))
-  })
-}
-
-function final (file, data, log, strict, cb) {
-  var pId = makePackageId(data)
-
-  function warn (msg) {
-    if (typoWarned[pId]) {
-      return
-    }
-    if (log) {
-      log('package.json', pId, msg)
-    }
-  }
-
-  try {
-    normalizeData(data, warn, strict)
-  } catch (error) {
-    return cb(error)
-  }
-
-  checkBinReferences_(file, data, warn, function () {
-    typoWarned[pId] = true
-    cb(null, data)
-  })
-}
-
-function fillTypes (file, data, cb) {
-  var index = data.main || 'index.js'
-
-  if (typeof index !== 'string') {
-    return cb(new TypeError('The "main" attribute must be of type string.'))
-  }
-
-  // TODO exports is much more complicated than this in verbose format
-  // We need to support for instance
-
-  // "exports": {
-  //   ".": [
-  //     {
-  //       "default": "./lib/npm.js"
-  //     },
-  //     "./lib/npm.js"
-  //   ],
-  //   "./package.json": "./package.json"
-  // },
-  // as well as conditional exports
-
-  // if (data.exports && typeof data.exports === 'string') {
-  //   index = data.exports
-  // }
-
-  // if (data.exports && data.exports['.']) {
-  //   index = data.exports['.']
-  //   if (typeof index !== 'string') {
-  //   }
-  // }
-
-  var extless =
-    path.join(path.dirname(index), path.basename(index, path.extname(index)))
-  var dts = `./${extless}.d.ts`
-  var dtsPath = path.join(path.dirname(file), dts)
-  var hasDTSFields = 'types' in data || 'typings' in data
-  if (!hasDTSFields && fs.existsSync(dtsPath)) {
-    data.types = dts.split(path.sep).join('/')
-  }
-
-  cb(null, data)
-}
-
-function makePackageId (data) {
-  var name = cleanString(data.name)
-  var ver = cleanString(data.version)
-  return name + '@' + ver
-}
-
-function cleanString (str) {
-  return (!str || typeof (str) !== 'string') ? '' : str.trim()
-}
-
-// /**package { "name": "foo", "version": "1.2.3", ... } **/
-function parseIndex (data) {
-  data = data.split(/^\/\*\*package(?:\s|$)/m)
-
-  if (data.length < 2) {
-    return null
-  }
-  data = data[1]
-  data = data.split(/\*\*\/$/m)
-
-  if (data.length < 2) {
-    return null
-  }
-  data = data[0]
-  data = data.replace(/^\s*\*/mg, '')
-
-  try {
-    return safeJSON(data)
-  } catch (er) {
-    return null
-  }
-}
-
-function parseError (ex, file) {
-  var e = new Error('Failed to parse json\n' + ex.message)
-  e.code = 'EJSONPARSE'
-  e.path = file
-  return e
-}
diff --git a/node_modules/read-package-json/package.json b/node_modules/read-package-json/package.json
deleted file mode 100644
index 90ab321d51743..0000000000000
--- a/node_modules/read-package-json/package.json
+++ /dev/null
@@ -1,59 +0,0 @@
-{
-  "name": "read-package-json",
-  "version": "6.0.4",
-  "author": "GitHub Inc.",
-  "description": "The thing npm uses to read package.json files with semantics and defaults and validation",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/read-package-json.git"
-  },
-  "main": "lib/read-json.js",
-  "scripts": {
-    "prerelease": "npm t",
-    "postrelease": "npm publish && git push --follow-tags",
-    "release": "standard-version -s",
-    "test": "tap",
-    "npmclilint": "npmcli-lint",
-    "lint": "eslint \"**/*.js\"",
-    "lintfix": "npm run lint -- --fix",
-    "posttest": "npm run lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "snap": "tap",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "dependencies": {
-    "glob": "^10.2.2",
-    "json-parse-even-better-errors": "^3.0.0",
-    "normalize-package-data": "^5.0.0",
-    "npm-normalize-package-bin": "^3.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.15.1",
-    "tap": "^16.0.1"
-  },
-  "license": "ISC",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "tap": {
-    "branches": 73,
-    "functions": 77,
-    "lines": 77,
-    "statements": 77,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.15.1",
-    "publish": "true"
-  }
-}
diff --git a/package-lock.json b/package-lock.json
index 45cb733e784b3..9cec079822a5c 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -2460,135 +2460,17 @@
       }
     },
     "node_modules/@npmcli/metavuln-calculator": {
-      "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-6.0.1.tgz",
-      "integrity": "sha512-S7Mgb2gizh3LK+VEMYbPfIwJNaEnZuFGwNBAGkXSjvBqkU8rx/y6L14dMZjAIgS4st2vgkWs1bWKHi8mWkl41Q==",
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-7.0.0.tgz",
+      "integrity": "sha512-Pw0tyX02VkpqlIQlG2TeiJNsdrecYeUU0ubZZa9pi3N37GCsxI+en43u4hYFdq+eSx1A9a9vwFAUyqEtKFsbHQ==",
       "dependencies": {
-        "cacache": "^17.0.0",
+        "cacache": "^18.0.0",
         "json-parse-even-better-errors": "^3.0.0",
-        "pacote": "^16.0.0",
-        "semver": "^7.3.5"
-      },
-      "engines": {
-        "node": "^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git": {
-      "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-4.1.0.tgz",
-      "integrity": "sha512-9hwoB3gStVfa0N31ymBmrX+GuDGdVA/QWShZVqE0HK2Af+7QGGrCTbZia/SW0ImUTjTne7SP91qxDmtXvDHRPQ==",
-      "dependencies": {
-        "@npmcli/promise-spawn": "^6.0.0",
-        "lru-cache": "^7.4.4",
-        "npm-pick-manifest": "^8.0.0",
-        "proc-log": "^3.0.0",
-        "promise-inflight": "^1.0.1",
-        "promise-retry": "^2.0.1",
-        "semver": "^7.3.5",
-        "which": "^3.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/metavuln-calculator/node_modules/cacache": {
-      "version": "17.1.4",
-      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
-      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
-      "dependencies": {
-        "@npmcli/fs": "^3.1.0",
-        "fs-minipass": "^3.0.0",
-        "glob": "^10.2.2",
-        "lru-cache": "^7.7.1",
-        "minipass": "^7.0.3",
-        "minipass-collect": "^1.0.2",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "p-map": "^4.0.0",
-        "ssri": "^10.0.0",
-        "tar": "^6.1.11",
-        "unique-filename": "^3.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info": {
-      "version": "6.1.1",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
-      "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
-      "dependencies": {
-        "lru-cache": "^7.5.1"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache": {
-      "version": "7.18.3",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
-      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
-      "engines": {
-        "node": ">=12"
-      }
-    },
-    "node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg": {
-      "version": "10.1.0",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
-      "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==",
-      "dependencies": {
-        "hosted-git-info": "^6.0.0",
-        "proc-log": "^3.0.0",
-        "semver": "^7.3.5",
-        "validate-npm-package-name": "^5.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest": {
-      "version": "8.0.2",
-      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-8.0.2.tgz",
-      "integrity": "sha512-1dKY+86/AIiq1tkKVD3l0WI+Gd3vkknVGAggsFeBkTvbhMQ1OND/LKkYv4JtXPKUJ8bOTCyLiqEg2P6QNdK+Gg==",
-      "dependencies": {
-        "npm-install-checks": "^6.0.0",
-        "npm-normalize-package-bin": "^3.0.0",
-        "npm-package-arg": "^10.0.0",
+        "pacote": "^17.0.0",
         "semver": "^7.3.5"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/metavuln-calculator/node_modules/pacote": {
-      "version": "16.0.0",
-      "resolved": "https://registry.npmjs.org/pacote/-/pacote-16.0.0.tgz",
-      "integrity": "sha512-tXeSsl21IUIzw/zW0rzK/po2zlI5Nqbkpu0V6Pv99f3leHde7zdv+VjuP9pnVeTVV7OvaS49u+lgmvXjQ0TMJQ==",
-      "dependencies": {
-        "@npmcli/git": "^4.0.0",
-        "@npmcli/installed-package-contents": "^2.0.1",
-        "@npmcli/promise-spawn": "^6.0.1",
-        "@npmcli/run-script": "^6.0.0",
-        "cacache": "^17.0.0",
-        "fs-minipass": "^3.0.0",
-        "minipass": "^7.0.2",
-        "npm-package-arg": "^10.0.0",
-        "npm-packlist": "^7.0.0",
-        "npm-pick-manifest": "^8.0.0",
-        "npm-registry-fetch": "^15.0.0",
-        "proc-log": "^3.0.0",
-        "promise-retry": "^2.0.1",
-        "read-package-json": "^6.0.0",
-        "read-package-json-fast": "^3.0.0",
-        "sigstore": "^1.3.0",
-        "ssri": "^10.0.0",
-        "tar": "^6.1.11"
-      },
-      "bin": {
-        "pacote": "lib/bin.js"
-      },
-      "engines": {
-        "node": "^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "node_modules/@npmcli/mock-globals": {
@@ -9853,6 +9735,7 @@
       "version": "5.0.0",
       "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-5.0.0.tgz",
       "integrity": "sha512-h9iPVIfrVZ9wVYQnxFgtw1ugSvGEMOlyPWWtm8BMJhnwyEL/FLbYbTY3V3PpjI/BUK67n9PEWDu6eHzu1fB15Q==",
+      "dev": true,
       "dependencies": {
         "hosted-git-info": "^6.0.0",
         "is-core-module": "^2.8.1",
@@ -9867,6 +9750,7 @@
       "version": "6.1.1",
       "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
       "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
+      "dev": true,
       "dependencies": {
         "lru-cache": "^7.5.1"
       },
@@ -9878,6 +9762,7 @@
       "version": "7.18.3",
       "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
       "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
+      "dev": true,
       "engines": {
         "node": ">=12"
       }
@@ -11213,20 +11098,6 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
-    "node_modules/read-package-json": {
-      "version": "6.0.4",
-      "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-6.0.4.tgz",
-      "integrity": "sha512-AEtWXYfopBj2z5N5PbkAOeNHRPUg5q+Nen7QLxV8M2zJq1ym6/lCz3fYNTCXe19puu2d06jfHhrP7v/S2PtMMw==",
-      "dependencies": {
-        "glob": "^10.2.2",
-        "json-parse-even-better-errors": "^3.0.0",
-        "normalize-package-data": "^5.0.0",
-        "npm-normalize-package-bin": "^3.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
     "node_modules/read-package-json-fast": {
       "version": "3.0.2",
       "resolved": "https://registry.npmjs.org/read-package-json-fast/-/read-package-json-fast-3.0.2.tgz",
@@ -16328,7 +16199,7 @@
         "@npmcli/fs": "^3.1.0",
         "@npmcli/installed-package-contents": "^2.0.2",
         "@npmcli/map-workspaces": "^3.0.2",
-        "@npmcli/metavuln-calculator": "^6.0.1",
+        "@npmcli/metavuln-calculator": "^7.0.0",
         "@npmcli/name-from-folder": "^2.0.0",
         "@npmcli/node-gyp": "^3.0.0",
         "@npmcli/package-json": "^5.0.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index c8ee90d9c4609..98db0e6e39a6f 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -7,7 +7,7 @@
     "@npmcli/fs": "^3.1.0",
     "@npmcli/installed-package-contents": "^2.0.2",
     "@npmcli/map-workspaces": "^3.0.2",
-    "@npmcli/metavuln-calculator": "^6.0.1",
+    "@npmcli/metavuln-calculator": "^7.0.0",
     "@npmcli/name-from-folder": "^2.0.0",
     "@npmcli/node-gyp": "^3.0.0",
     "@npmcli/package-json": "^5.0.0",

From d5567876b815c89bfa79802a52c4fa27ec3cb377 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Tue, 15 Aug 2023 14:09:23 -0700
Subject: [PATCH 41/68] chore: set workspaces with changing engines to
 prerelease

---
 release-please-config.json | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/release-please-config.json b/release-please-config.json
index 6062068715df7..23613a946dc9a 100644
--- a/release-please-config.json
+++ b/release-please-config.json
@@ -71,7 +71,7 @@
       "prerelease": true
     },
     "workspaces/libnpmversion": {
-      "prerelease": false
+      "prerelease": true
     }
   },
   "exclude-packages-from-root": true,

From 947c1115e5f2ef26dd47eb377be930c55313a239 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Tue, 15 Aug 2023 14:11:40 -0700
Subject: [PATCH 42/68] fix: drop node 16.13.x support

BREAKING CHANGE: support for node <=16.13 has been removed
---
 .github/workflows/ci-libnpmaccess.yml         |  2 +-
 .github/workflows/ci-libnpmdiff.yml           |  2 +-
 .github/workflows/ci-libnpmexec.yml           |  2 +-
 .github/workflows/ci-libnpmhook.yml           |  2 +-
 .github/workflows/ci-libnpmorg.yml            |  2 +-
 .github/workflows/ci-libnpmpack.yml           |  2 +-
 .github/workflows/ci-libnpmpublish.yml        |  2 +-
 .github/workflows/ci-libnpmsearch.yml         |  2 +-
 .github/workflows/ci-libnpmteam.yml           |  2 +-
 .github/workflows/ci-libnpmversion.yml        |  4 +---
 .github/workflows/ci-npmcli-arborist.yml      |  2 +-
 .github/workflows/ci-npmcli-mock-registry.yml |  2 +-
 mock-registry/package.json                    |  4 ++--
 workspaces/arborist/package.json              |  4 ++--
 workspaces/libnpmaccess/package.json          |  4 ++--
 workspaces/libnpmdiff/package.json            |  4 ++--
 workspaces/libnpmexec/package.json            |  4 ++--
 workspaces/libnpmhook/package.json            |  4 ++--
 workspaces/libnpmorg/package.json             |  4 ++--
 workspaces/libnpmpack/package.json            |  4 ++--
 workspaces/libnpmpublish/package.json         |  4 ++--
 workspaces/libnpmsearch/package.json          |  4 ++--
 workspaces/libnpmteam/package.json            |  4 ++--
 workspaces/libnpmversion/package.json         | 10 ++++++++--
 24 files changed, 42 insertions(+), 38 deletions(-)

diff --git a/.github/workflows/ci-libnpmaccess.yml b/.github/workflows/ci-libnpmaccess.yml
index 2f97a1ec1b7e5..0f530a4cddaca 100644
--- a/.github/workflows/ci-libnpmaccess.yml
+++ b/.github/workflows/ci-libnpmaccess.yml
@@ -64,7 +64,7 @@ jobs:
             os: windows-latest
             shell: cmd
         node-version:
-          - 16.13.0
+          - 16.14.0
           - 16.x
           - 18.0.0
           - 18.x
diff --git a/.github/workflows/ci-libnpmdiff.yml b/.github/workflows/ci-libnpmdiff.yml
index dc6f09e74fc2b..6171ebdfd26f4 100644
--- a/.github/workflows/ci-libnpmdiff.yml
+++ b/.github/workflows/ci-libnpmdiff.yml
@@ -64,7 +64,7 @@ jobs:
             os: windows-latest
             shell: cmd
         node-version:
-          - 16.13.0
+          - 16.14.0
           - 16.x
           - 18.0.0
           - 18.x
diff --git a/.github/workflows/ci-libnpmexec.yml b/.github/workflows/ci-libnpmexec.yml
index a09285039e8aa..e216f74c4a54f 100644
--- a/.github/workflows/ci-libnpmexec.yml
+++ b/.github/workflows/ci-libnpmexec.yml
@@ -64,7 +64,7 @@ jobs:
             os: windows-latest
             shell: cmd
         node-version:
-          - 16.13.0
+          - 16.14.0
           - 16.x
           - 18.0.0
           - 18.x
diff --git a/.github/workflows/ci-libnpmhook.yml b/.github/workflows/ci-libnpmhook.yml
index a4d7a170a2636..d384f7127b103 100644
--- a/.github/workflows/ci-libnpmhook.yml
+++ b/.github/workflows/ci-libnpmhook.yml
@@ -64,7 +64,7 @@ jobs:
             os: windows-latest
             shell: cmd
         node-version:
-          - 16.13.0
+          - 16.14.0
           - 16.x
           - 18.0.0
           - 18.x
diff --git a/.github/workflows/ci-libnpmorg.yml b/.github/workflows/ci-libnpmorg.yml
index 0d01c4f7c197f..2860ad34c145e 100644
--- a/.github/workflows/ci-libnpmorg.yml
+++ b/.github/workflows/ci-libnpmorg.yml
@@ -64,7 +64,7 @@ jobs:
             os: windows-latest
             shell: cmd
         node-version:
-          - 16.13.0
+          - 16.14.0
           - 16.x
           - 18.0.0
           - 18.x
diff --git a/.github/workflows/ci-libnpmpack.yml b/.github/workflows/ci-libnpmpack.yml
index db8b0b496c146..bf5471e58c941 100644
--- a/.github/workflows/ci-libnpmpack.yml
+++ b/.github/workflows/ci-libnpmpack.yml
@@ -64,7 +64,7 @@ jobs:
             os: windows-latest
             shell: cmd
         node-version:
-          - 16.13.0
+          - 16.14.0
           - 16.x
           - 18.0.0
           - 18.x
diff --git a/.github/workflows/ci-libnpmpublish.yml b/.github/workflows/ci-libnpmpublish.yml
index 0312720c95a7c..33a21ef83735d 100644
--- a/.github/workflows/ci-libnpmpublish.yml
+++ b/.github/workflows/ci-libnpmpublish.yml
@@ -64,7 +64,7 @@ jobs:
             os: windows-latest
             shell: cmd
         node-version:
-          - 16.13.0
+          - 16.14.0
           - 16.x
           - 18.0.0
           - 18.x
diff --git a/.github/workflows/ci-libnpmsearch.yml b/.github/workflows/ci-libnpmsearch.yml
index 8f3d8f6a5802f..58d58b523ad4a 100644
--- a/.github/workflows/ci-libnpmsearch.yml
+++ b/.github/workflows/ci-libnpmsearch.yml
@@ -64,7 +64,7 @@ jobs:
             os: windows-latest
             shell: cmd
         node-version:
-          - 16.13.0
+          - 16.14.0
           - 16.x
           - 18.0.0
           - 18.x
diff --git a/.github/workflows/ci-libnpmteam.yml b/.github/workflows/ci-libnpmteam.yml
index 2d9266f8043f5..c1dc3219c5cbd 100644
--- a/.github/workflows/ci-libnpmteam.yml
+++ b/.github/workflows/ci-libnpmteam.yml
@@ -64,7 +64,7 @@ jobs:
             os: windows-latest
             shell: cmd
         node-version:
-          - 16.13.0
+          - 16.14.0
           - 16.x
           - 18.0.0
           - 18.x
diff --git a/.github/workflows/ci-libnpmversion.yml b/.github/workflows/ci-libnpmversion.yml
index b94827e9dad3c..f78a1df3acc5b 100644
--- a/.github/workflows/ci-libnpmversion.yml
+++ b/.github/workflows/ci-libnpmversion.yml
@@ -64,9 +64,7 @@ jobs:
             os: windows-latest
             shell: cmd
         node-version:
-          - 14.17.0
-          - 14.x
-          - 16.13.0
+          - 16.14.0
           - 16.x
           - 18.0.0
           - 18.x
diff --git a/.github/workflows/ci-npmcli-arborist.yml b/.github/workflows/ci-npmcli-arborist.yml
index c032dc33e9e09..7ba0f170e576d 100644
--- a/.github/workflows/ci-npmcli-arborist.yml
+++ b/.github/workflows/ci-npmcli-arborist.yml
@@ -64,7 +64,7 @@ jobs:
             os: windows-latest
             shell: cmd
         node-version:
-          - 16.13.0
+          - 16.14.0
           - 16.x
           - 18.0.0
           - 18.x
diff --git a/.github/workflows/ci-npmcli-mock-registry.yml b/.github/workflows/ci-npmcli-mock-registry.yml
index 4e8510eb59612..8e23df0cb5657 100644
--- a/.github/workflows/ci-npmcli-mock-registry.yml
+++ b/.github/workflows/ci-npmcli-mock-registry.yml
@@ -64,7 +64,7 @@ jobs:
             os: windows-latest
             shell: cmd
         node-version:
-          - 16.13.0
+          - 16.14.0
           - 16.x
           - 18.0.0
           - 18.x
diff --git a/mock-registry/package.json b/mock-registry/package.json
index 5f9598b08e4fb..d729881f52651 100644
--- a/mock-registry/package.json
+++ b/mock-registry/package.json
@@ -30,13 +30,13 @@
     "lib/"
   ],
   "engines": {
-    "node": "^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "version": "4.18.0",
     "ciVersions": [
-      "16.13.0",
+      "16.14.0",
       "16.x",
       "18.0.0",
       "18.x"
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 98db0e6e39a6f..0c7f52344ed4b 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -87,14 +87,14 @@
     ]
   },
   "engines": {
-    "node": "^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "version": "4.18.0",
     "content": "../../scripts/template-oss/index.js",
     "ciVersions": [
-      "16.13.0",
+      "16.14.0",
       "16.x",
       "18.0.0",
       "18.x"
diff --git a/workspaces/libnpmaccess/package.json b/workspaces/libnpmaccess/package.json
index a78674049c784..292bffe371ba8 100644
--- a/workspaces/libnpmaccess/package.json
+++ b/workspaces/libnpmaccess/package.json
@@ -33,7 +33,7 @@
     "npm-registry-fetch": "^15.0.0"
   },
   "engines": {
-    "node": "^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "files": [
     "bin/",
@@ -44,7 +44,7 @@
     "version": "4.18.0",
     "content": "../../scripts/template-oss/index.js",
     "ciVersions": [
-      "16.13.0",
+      "16.14.0",
       "16.x",
       "18.0.0",
       "18.x"
diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json
index 12565b747d062..723fe876c5459 100644
--- a/workspaces/libnpmdiff/package.json
+++ b/workspaces/libnpmdiff/package.json
@@ -13,7 +13,7 @@
     "lib/"
   ],
   "engines": {
-    "node": "^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "keywords": [
     "npm",
@@ -61,7 +61,7 @@
     "version": "4.18.0",
     "content": "../../scripts/template-oss/index.js",
     "ciVersions": [
-      "16.13.0",
+      "16.14.0",
       "16.x",
       "18.0.0",
       "18.x"
diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json
index e633eb98dd50a..e749a8a0b4e6a 100644
--- a/workspaces/libnpmexec/package.json
+++ b/workspaces/libnpmexec/package.json
@@ -7,7 +7,7 @@
   ],
   "main": "lib/index.js",
   "engines": {
-    "node": "^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "description": "npm exec (npx) programmatic API",
   "repository": {
@@ -76,7 +76,7 @@
     "version": "4.18.0",
     "content": "../../scripts/template-oss/index.js",
     "ciVersions": [
-      "16.13.0",
+      "16.14.0",
       "16.x",
       "18.0.0",
       "18.x"
diff --git a/workspaces/libnpmhook/package.json b/workspaces/libnpmhook/package.json
index 5fc4d372632d3..c97047e644812 100644
--- a/workspaces/libnpmhook/package.json
+++ b/workspaces/libnpmhook/package.json
@@ -40,14 +40,14 @@
     "tap": "^16.3.4"
   },
   "engines": {
-    "node": "^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "version": "4.18.0",
     "content": "../../scripts/template-oss/index.js",
     "ciVersions": [
-      "16.13.0",
+      "16.14.0",
       "16.x",
       "18.0.0",
       "18.x"
diff --git a/workspaces/libnpmorg/package.json b/workspaces/libnpmorg/package.json
index 8cef0bd762af6..584f113ed10c0 100644
--- a/workspaces/libnpmorg/package.json
+++ b/workspaces/libnpmorg/package.json
@@ -45,14 +45,14 @@
     "npm-registry-fetch": "^15.0.0"
   },
   "engines": {
-    "node": "^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "version": "4.18.0",
     "content": "../../scripts/template-oss/index.js",
     "ciVersions": [
-      "16.13.0",
+      "16.14.0",
       "16.x",
       "18.0.0",
       "18.x"
diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json
index 0a8330815c2a2..fa252913f865f 100644
--- a/workspaces/libnpmpack/package.json
+++ b/workspaces/libnpmpack/package.json
@@ -42,14 +42,14 @@
     "pacote": "^17.0.0"
   },
   "engines": {
-    "node": "^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "version": "4.18.0",
     "content": "../../scripts/template-oss/index.js",
     "ciVersions": [
-      "16.13.0",
+      "16.14.0",
       "16.x",
       "18.0.0",
       "18.x"
diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json
index 0cb19f7950fe4..583d8a86fb840 100644
--- a/workspaces/libnpmpublish/package.json
+++ b/workspaces/libnpmpublish/package.json
@@ -48,14 +48,14 @@
     "ssri": "^10.0.5"
   },
   "engines": {
-    "node": "^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "version": "4.18.0",
     "content": "../../scripts/template-oss/index.js",
     "ciVersions": [
-      "16.13.0",
+      "16.14.0",
       "16.x",
       "18.0.0",
       "18.x"
diff --git a/workspaces/libnpmsearch/package.json b/workspaces/libnpmsearch/package.json
index c00101a99ede3..ab0f73882b023 100644
--- a/workspaces/libnpmsearch/package.json
+++ b/workspaces/libnpmsearch/package.json
@@ -41,14 +41,14 @@
     "npm-registry-fetch": "^15.0.0"
   },
   "engines": {
-    "node": "^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "version": "4.18.0",
     "content": "../../scripts/template-oss/index.js",
     "ciVersions": [
-      "16.13.0",
+      "16.14.0",
       "16.x",
       "18.0.0",
       "18.x"
diff --git a/workspaces/libnpmteam/package.json b/workspaces/libnpmteam/package.json
index 9379c8e0c718c..860c525afa4a5 100644
--- a/workspaces/libnpmteam/package.json
+++ b/workspaces/libnpmteam/package.json
@@ -35,14 +35,14 @@
     "npm-registry-fetch": "^15.0.0"
   },
   "engines": {
-    "node": "^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "version": "4.18.0",
     "content": "../../scripts/template-oss/index.js",
     "ciVersions": [
-      "16.13.0",
+      "16.14.0",
       "16.x",
       "18.0.0",
       "18.x"
diff --git a/workspaces/libnpmversion/package.json b/workspaces/libnpmversion/package.json
index 19e4972632a88..aaaba35ad9e4b 100644
--- a/workspaces/libnpmversion/package.json
+++ b/workspaces/libnpmversion/package.json
@@ -44,11 +44,17 @@
     "semver": "^7.3.7"
   },
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "version": "4.18.0",
-    "content": "../../scripts/template-oss/index.js"
+    "content": "../../scripts/template-oss/index.js",
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ]
   }
 }

From e6df998f2c3f223af627203cba24d7227dd411dc Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Tue, 15 Aug 2023 14:25:08 -0700
Subject: [PATCH 43/68] deps: hoist normalize-package-data@6.0.0

---
 DEPENDENCIES.md                               |   2 +
 node_modules/.gitignore                       |   6 +-
 .../normalize-package-data/LICENSE            |  15 -
 .../lib/extract_description.js                |  24 -
 .../normalize-package-data/lib/fixer.js       | 475 ------------------
 .../lib/make_warning.js                       |  22 -
 .../normalize-package-data/lib/normalize.js   |  48 --
 .../normalize-package-data/lib/safe_format.js |  11 -
 .../normalize-package-data/lib/typos.json     |  25 -
 .../lib/warning_messages.json                 |  30 --
 .../normalize-package-data/package.json       |  62 ---
 .../normalize-package-data/LICENSE            |   0
 .../lib/extract_description.js                |   0
 .../normalize-package-data/lib/fixer.js       |   0
 .../lib/make_warning.js                       |   0
 .../normalize-package-data/lib/normalize.js   |   0
 .../normalize-package-data/lib/safe_format.js |   0
 .../normalize-package-data/lib/typos.json     |   0
 .../lib/warning_messages.json                 |   0
 .../normalize-package-data/package.json       |   0
 .../normalize-package-data/LICENSE            |  15 -
 .../lib/extract_description.js                |  24 -
 .../normalize-package-data/lib/fixer.js       | 475 ------------------
 .../lib/make_warning.js                       |  22 -
 .../normalize-package-data/lib/normalize.js   |  48 --
 .../normalize-package-data/lib/safe_format.js |  11 -
 .../normalize-package-data/lib/typos.json     |  25 -
 .../lib/warning_messages.json                 |  30 --
 .../normalize-package-data/package.json       |  62 ---
 package-lock.json                             | 129 ++---
 30 files changed, 35 insertions(+), 1526 deletions(-)
 delete mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/LICENSE
 delete mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/extract_description.js
 delete mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/fixer.js
 delete mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/make_warning.js
 delete mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/normalize.js
 delete mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/safe_format.js
 delete mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/typos.json
 delete mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/warning_messages.json
 delete mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/package.json
 rename node_modules/{@npmcli/package-json/node_modules => }/normalize-package-data/LICENSE (100%)
 rename node_modules/{@npmcli/package-json/node_modules => }/normalize-package-data/lib/extract_description.js (100%)
 rename node_modules/{@npmcli/package-json/node_modules => }/normalize-package-data/lib/fixer.js (100%)
 rename node_modules/{@npmcli/package-json/node_modules => }/normalize-package-data/lib/make_warning.js (100%)
 rename node_modules/{@npmcli/package-json/node_modules => }/normalize-package-data/lib/normalize.js (100%)
 rename node_modules/{@npmcli/package-json/node_modules => }/normalize-package-data/lib/safe_format.js (100%)
 rename node_modules/{@npmcli/package-json/node_modules => }/normalize-package-data/lib/typos.json (100%)
 rename node_modules/{@npmcli/package-json/node_modules => }/normalize-package-data/lib/warning_messages.json (100%)
 rename node_modules/{@npmcli/package-json/node_modules => }/normalize-package-data/package.json (100%)
 delete mode 100644 node_modules/pacote/node_modules/normalize-package-data/LICENSE
 delete mode 100644 node_modules/pacote/node_modules/normalize-package-data/lib/extract_description.js
 delete mode 100644 node_modules/pacote/node_modules/normalize-package-data/lib/fixer.js
 delete mode 100644 node_modules/pacote/node_modules/normalize-package-data/lib/make_warning.js
 delete mode 100644 node_modules/pacote/node_modules/normalize-package-data/lib/normalize.js
 delete mode 100644 node_modules/pacote/node_modules/normalize-package-data/lib/safe_format.js
 delete mode 100644 node_modules/pacote/node_modules/normalize-package-data/lib/typos.json
 delete mode 100644 node_modules/pacote/node_modules/normalize-package-data/lib/warning_messages.json
 delete mode 100644 node_modules/pacote/node_modules/normalize-package-data/package.json

diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md
index 324dbb190ca34..5d58526871a86 100644
--- a/DEPENDENCIES.md
+++ b/DEPENDENCIES.md
@@ -107,6 +107,7 @@ graph LR;
   npm-->libnpmversion;
   npm-->make-fetch-happen;
   npm-->nopt;
+  npm-->normalize-package-data;
   npm-->npm-audit-report;
   npm-->npm-install-checks;
   npm-->npm-package-arg;
@@ -532,6 +533,7 @@ graph LR;
   npm-->nock;
   npm-->node-gyp;
   npm-->nopt;
+  npm-->normalize-package-data;
   npm-->npm-audit-report;
   npm-->npm-install-checks;
   npm-->npm-package-arg;
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 17bd4ad5a2682..bcdef42e99c83 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -28,9 +28,6 @@
 !/@npmcli/name-from-folder
 !/@npmcli/node-gyp
 !/@npmcli/package-json
-!/@npmcli/package-json/node_modules/
-/@npmcli/package-json/node_modules/*
-!/@npmcli/package-json/node_modules/normalize-package-data
 !/@npmcli/promise-spawn
 !/@npmcli/query
 !/@npmcli/run-script
@@ -128,7 +125,6 @@
 !/init-package-json
 !/init-package-json/node_modules/
 /init-package-json/node_modules/*
-!/init-package-json/node_modules/normalize-package-data
 !/init-package-json/node_modules/read-package-json
 !/ip-regex
 !/ip
@@ -201,6 +197,7 @@
 !/node-gyp/node_modules/signal-exit
 !/node-gyp/node_modules/which
 !/nopt
+!/normalize-package-data
 !/npm-audit-report
 !/npm-bundled
 !/npm-install-checks
@@ -228,7 +225,6 @@
 !/pacote/node_modules/
 /pacote/node_modules/*
 !/pacote/node_modules/lru-cache
-!/pacote/node_modules/normalize-package-data
 !/pacote/node_modules/npm-pick-manifest
 !/pacote/node_modules/npm-pick-manifest/node_modules/
 /pacote/node_modules/npm-pick-manifest/node_modules/*
diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/LICENSE b/node_modules/init-package-json/node_modules/normalize-package-data/LICENSE
deleted file mode 100644
index 19d1364a8ac08..0000000000000
--- a/node_modules/init-package-json/node_modules/normalize-package-data/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-This package contains code originally written by Isaac Z. Schlueter.
-Used with permission.
-
-Copyright (c) Meryn Stol ("Author")
-All rights reserved.
-
-The BSD License
-
-Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
-
-1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/extract_description.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/extract_description.js
deleted file mode 100644
index 631966b5f29af..0000000000000
--- a/node_modules/init-package-json/node_modules/normalize-package-data/lib/extract_description.js
+++ /dev/null
@@ -1,24 +0,0 @@
-module.exports = extractDescription
-
-// Extracts description from contents of a readme file in markdown format
-function extractDescription (d) {
-  if (!d) {
-    return
-  }
-  if (d === 'ERROR: No README data found!') {
-    return
-  }
-  // the first block of text before the first heading
-  // that isn't the first line heading
-  d = d.trim().split('\n')
-  let s = 0
-  while (d[s] && d[s].trim().match(/^(#|$)/)) {
-    s++
-  }
-  const l = d.length
-  let e = s + 1
-  while (e < l && d[e].trim()) {
-    e++
-  }
-  return d.slice(s, e).join(' ').trim()
-}
diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/fixer.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/fixer.js
deleted file mode 100644
index bb78231d83ca9..0000000000000
--- a/node_modules/init-package-json/node_modules/normalize-package-data/lib/fixer.js
+++ /dev/null
@@ -1,475 +0,0 @@
-var isValidSemver = require('semver/functions/valid')
-var cleanSemver = require('semver/functions/clean')
-var validateLicense = require('validate-npm-package-license')
-var hostedGitInfo = require('hosted-git-info')
-var isBuiltinModule = require('is-core-module')
-var depTypes = ['dependencies', 'devDependencies', 'optionalDependencies']
-var extractDescription = require('./extract_description')
-var url = require('url')
-var typos = require('./typos.json')
-
-var isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.'))
-
-module.exports = {
-  // default warning function
-  warn: function () {},
-
-  fixRepositoryField: function (data) {
-    if (data.repositories) {
-      this.warn('repositories')
-      data.repository = data.repositories[0]
-    }
-    if (!data.repository) {
-      return this.warn('missingRepository')
-    }
-    if (typeof data.repository === 'string') {
-      data.repository = {
-        type: 'git',
-        url: data.repository,
-      }
-    }
-    var r = data.repository.url || ''
-    if (r) {
-      var hosted = hostedGitInfo.fromUrl(r)
-      if (hosted) {
-        r = data.repository.url
-          = hosted.getDefaultRepresentation() === 'shortcut' ? hosted.https() : hosted.toString()
-      }
-    }
-
-    if (r.match(/github.com\/[^/]+\/[^/]+\.git\.git$/)) {
-      this.warn('brokenGitUrl', r)
-    }
-  },
-
-  fixTypos: function (data) {
-    Object.keys(typos.topLevel).forEach(function (d) {
-      if (Object.prototype.hasOwnProperty.call(data, d)) {
-        this.warn('typo', d, typos.topLevel[d])
-      }
-    }, this)
-  },
-
-  fixScriptsField: function (data) {
-    if (!data.scripts) {
-      return
-    }
-    if (typeof data.scripts !== 'object') {
-      this.warn('nonObjectScripts')
-      delete data.scripts
-      return
-    }
-    Object.keys(data.scripts).forEach(function (k) {
-      if (typeof data.scripts[k] !== 'string') {
-        this.warn('nonStringScript')
-        delete data.scripts[k]
-      } else if (typos.script[k] && !data.scripts[typos.script[k]]) {
-        this.warn('typo', k, typos.script[k], 'scripts')
-      }
-    }, this)
-  },
-
-  fixFilesField: function (data) {
-    var files = data.files
-    if (files && !Array.isArray(files)) {
-      this.warn('nonArrayFiles')
-      delete data.files
-    } else if (data.files) {
-      data.files = data.files.filter(function (file) {
-        if (!file || typeof file !== 'string') {
-          this.warn('invalidFilename', file)
-          return false
-        } else {
-          return true
-        }
-      }, this)
-    }
-  },
-
-  fixBinField: function (data) {
-    if (!data.bin) {
-      return
-    }
-    if (typeof data.bin === 'string') {
-      var b = {}
-      var match
-      if (match = data.name.match(/^@[^/]+[/](.*)$/)) {
-        b[match[1]] = data.bin
-      } else {
-        b[data.name] = data.bin
-      }
-      data.bin = b
-    }
-  },
-
-  fixManField: function (data) {
-    if (!data.man) {
-      return
-    }
-    if (typeof data.man === 'string') {
-      data.man = [data.man]
-    }
-  },
-  fixBundleDependenciesField: function (data) {
-    var bdd = 'bundledDependencies'
-    var bd = 'bundleDependencies'
-    if (data[bdd] && !data[bd]) {
-      data[bd] = data[bdd]
-      delete data[bdd]
-    }
-    if (data[bd] && !Array.isArray(data[bd])) {
-      this.warn('nonArrayBundleDependencies')
-      delete data[bd]
-    } else if (data[bd]) {
-      data[bd] = data[bd].filter(function (filtered) {
-        if (!filtered || typeof filtered !== 'string') {
-          this.warn('nonStringBundleDependency', filtered)
-          return false
-        } else {
-          if (!data.dependencies) {
-            data.dependencies = {}
-          }
-          if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) {
-            this.warn('nonDependencyBundleDependency', filtered)
-            data.dependencies[filtered] = '*'
-          }
-          return true
-        }
-      }, this)
-    }
-  },
-
-  fixDependencies: function (data, strict) {
-    objectifyDeps(data, this.warn)
-    addOptionalDepsToDeps(data, this.warn)
-    this.fixBundleDependenciesField(data)
-
-    ;['dependencies', 'devDependencies'].forEach(function (deps) {
-      if (!(deps in data)) {
-        return
-      }
-      if (!data[deps] || typeof data[deps] !== 'object') {
-        this.warn('nonObjectDependencies', deps)
-        delete data[deps]
-        return
-      }
-      Object.keys(data[deps]).forEach(function (d) {
-        var r = data[deps][d]
-        if (typeof r !== 'string') {
-          this.warn('nonStringDependency', d, JSON.stringify(r))
-          delete data[deps][d]
-        }
-        var hosted = hostedGitInfo.fromUrl(data[deps][d])
-        if (hosted) {
-          data[deps][d] = hosted.toString()
-        }
-      }, this)
-    }, this)
-  },
-
-  fixModulesField: function (data) {
-    if (data.modules) {
-      this.warn('deprecatedModules')
-      delete data.modules
-    }
-  },
-
-  fixKeywordsField: function (data) {
-    if (typeof data.keywords === 'string') {
-      data.keywords = data.keywords.split(/,\s+/)
-    }
-    if (data.keywords && !Array.isArray(data.keywords)) {
-      delete data.keywords
-      this.warn('nonArrayKeywords')
-    } else if (data.keywords) {
-      data.keywords = data.keywords.filter(function (kw) {
-        if (typeof kw !== 'string' || !kw) {
-          this.warn('nonStringKeyword')
-          return false
-        } else {
-          return true
-        }
-      }, this)
-    }
-  },
-
-  fixVersionField: function (data, strict) {
-    // allow "loose" semver 1.0 versions in non-strict mode
-    // enforce strict semver 2.0 compliance in strict mode
-    var loose = !strict
-    if (!data.version) {
-      data.version = ''
-      return true
-    }
-    if (!isValidSemver(data.version, loose)) {
-      throw new Error('Invalid version: "' + data.version + '"')
-    }
-    data.version = cleanSemver(data.version, loose)
-    return true
-  },
-
-  fixPeople: function (data) {
-    modifyPeople(data, unParsePerson)
-    modifyPeople(data, parsePerson)
-  },
-
-  fixNameField: function (data, options) {
-    if (typeof options === 'boolean') {
-      options = { strict: options }
-    } else if (typeof options === 'undefined') {
-      options = {}
-    }
-    var strict = options.strict
-    if (!data.name && !strict) {
-      data.name = ''
-      return
-    }
-    if (typeof data.name !== 'string') {
-      throw new Error('name field must be a string.')
-    }
-    if (!strict) {
-      data.name = data.name.trim()
-    }
-    ensureValidName(data.name, strict, options.allowLegacyCase)
-    if (isBuiltinModule(data.name)) {
-      this.warn('conflictingName', data.name)
-    }
-  },
-
-  fixDescriptionField: function (data) {
-    if (data.description && typeof data.description !== 'string') {
-      this.warn('nonStringDescription')
-      delete data.description
-    }
-    if (data.readme && !data.description) {
-      data.description = extractDescription(data.readme)
-    }
-    if (data.description === undefined) {
-      delete data.description
-    }
-    if (!data.description) {
-      this.warn('missingDescription')
-    }
-  },
-
-  fixReadmeField: function (data) {
-    if (!data.readme) {
-      this.warn('missingReadme')
-      data.readme = 'ERROR: No README data found!'
-    }
-  },
-
-  fixBugsField: function (data) {
-    if (!data.bugs && data.repository && data.repository.url) {
-      var hosted = hostedGitInfo.fromUrl(data.repository.url)
-      if (hosted && hosted.bugs()) {
-        data.bugs = { url: hosted.bugs() }
-      }
-    } else if (data.bugs) {
-      if (typeof data.bugs === 'string') {
-        if (isEmail(data.bugs)) {
-          data.bugs = { email: data.bugs }
-        /* eslint-disable-next-line node/no-deprecated-api */
-        } else if (url.parse(data.bugs).protocol) {
-          data.bugs = { url: data.bugs }
-        } else {
-          this.warn('nonEmailUrlBugsString')
-        }
-      } else {
-        bugsTypos(data.bugs, this.warn)
-        var oldBugs = data.bugs
-        data.bugs = {}
-        if (oldBugs.url) {
-          /* eslint-disable-next-line node/no-deprecated-api */
-          if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) {
-            data.bugs.url = oldBugs.url
-          } else {
-            this.warn('nonUrlBugsUrlField')
-          }
-        }
-        if (oldBugs.email) {
-          if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) {
-            data.bugs.email = oldBugs.email
-          } else {
-            this.warn('nonEmailBugsEmailField')
-          }
-        }
-      }
-      if (!data.bugs.email && !data.bugs.url) {
-        delete data.bugs
-        this.warn('emptyNormalizedBugs')
-      }
-    }
-  },
-
-  fixHomepageField: function (data) {
-    if (!data.homepage && data.repository && data.repository.url) {
-      var hosted = hostedGitInfo.fromUrl(data.repository.url)
-      if (hosted && hosted.docs()) {
-        data.homepage = hosted.docs()
-      }
-    }
-    if (!data.homepage) {
-      return
-    }
-
-    if (typeof data.homepage !== 'string') {
-      this.warn('nonUrlHomepage')
-      return delete data.homepage
-    }
-    /* eslint-disable-next-line node/no-deprecated-api */
-    if (!url.parse(data.homepage).protocol) {
-      data.homepage = 'http://' + data.homepage
-    }
-  },
-
-  fixLicenseField: function (data) {
-    const license = data.license || data.licence
-    if (!license) {
-      return this.warn('missingLicense')
-    }
-    if (
-      typeof (license) !== 'string' ||
-      license.length < 1 ||
-      license.trim() === ''
-    ) {
-      return this.warn('invalidLicense')
-    }
-    if (!validateLicense(license).validForNewPackages) {
-      return this.warn('invalidLicense')
-    }
-  },
-}
-
-function isValidScopedPackageName (spec) {
-  if (spec.charAt(0) !== '@') {
-    return false
-  }
-
-  var rest = spec.slice(1).split('/')
-  if (rest.length !== 2) {
-    return false
-  }
-
-  return rest[0] && rest[1] &&
-    rest[0] === encodeURIComponent(rest[0]) &&
-    rest[1] === encodeURIComponent(rest[1])
-}
-
-function isCorrectlyEncodedName (spec) {
-  return !spec.match(/[/@\s+%:]/) &&
-    spec === encodeURIComponent(spec)
-}
-
-function ensureValidName (name, strict, allowLegacyCase) {
-  if (name.charAt(0) === '.' ||
-      !(isValidScopedPackageName(name) || isCorrectlyEncodedName(name)) ||
-      (strict && (!allowLegacyCase) && name !== name.toLowerCase()) ||
-      name.toLowerCase() === 'node_modules' ||
-      name.toLowerCase() === 'favicon.ico') {
-    throw new Error('Invalid name: ' + JSON.stringify(name))
-  }
-}
-
-function modifyPeople (data, fn) {
-  if (data.author) {
-    data.author = fn(data.author)
-  }['maintainers', 'contributors'].forEach(function (set) {
-    if (!Array.isArray(data[set])) {
-      return
-    }
-    data[set] = data[set].map(fn)
-  })
-  return data
-}
-
-function unParsePerson (person) {
-  if (typeof person === 'string') {
-    return person
-  }
-  var name = person.name || ''
-  var u = person.url || person.web
-  var wrappedUrl = u ? (' (' + u + ')') : ''
-  var e = person.email || person.mail
-  var wrappedEmail = e ? (' <' + e + '>') : ''
-  return name + wrappedEmail + wrappedUrl
-}
-
-function parsePerson (person) {
-  if (typeof person !== 'string') {
-    return person
-  }
-  var matchedName = person.match(/^([^(<]+)/)
-  var matchedUrl = person.match(/\(([^()]+)\)/)
-  var matchedEmail = person.match(/<([^<>]+)>/)
-  var obj = {}
-  if (matchedName && matchedName[0].trim()) {
-    obj.name = matchedName[0].trim()
-  }
-  if (matchedEmail) {
-    obj.email = matchedEmail[1]
-  }
-  if (matchedUrl) {
-    obj.url = matchedUrl[1]
-  }
-  return obj
-}
-
-function addOptionalDepsToDeps (data, warn) {
-  var o = data.optionalDependencies
-  if (!o) {
-    return
-  }
-  var d = data.dependencies || {}
-  Object.keys(o).forEach(function (k) {
-    d[k] = o[k]
-  })
-  data.dependencies = d
-}
-
-function depObjectify (deps, type, warn) {
-  if (!deps) {
-    return {}
-  }
-  if (typeof deps === 'string') {
-    deps = deps.trim().split(/[\n\r\s\t ,]+/)
-  }
-  if (!Array.isArray(deps)) {
-    return deps
-  }
-  warn('deprecatedArrayDependencies', type)
-  var o = {}
-  deps.filter(function (d) {
-    return typeof d === 'string'
-  }).forEach(function (d) {
-    d = d.trim().split(/(:?[@\s><=])/)
-    var dn = d.shift()
-    var dv = d.join('')
-    dv = dv.trim()
-    dv = dv.replace(/^@/, '')
-    o[dn] = dv
-  })
-  return o
-}
-
-function objectifyDeps (data, warn) {
-  depTypes.forEach(function (type) {
-    if (!data[type]) {
-      return
-    }
-    data[type] = depObjectify(data[type], type, warn)
-  })
-}
-
-function bugsTypos (bugs, warn) {
-  if (!bugs) {
-    return
-  }
-  Object.keys(bugs).forEach(function (k) {
-    if (typos.bugs[k]) {
-      warn('typo', k, typos.bugs[k], 'bugs')
-      bugs[typos.bugs[k]] = bugs[k]
-      delete bugs[k]
-    }
-  })
-}
diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/make_warning.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/make_warning.js
deleted file mode 100644
index 3be9c86539952..0000000000000
--- a/node_modules/init-package-json/node_modules/normalize-package-data/lib/make_warning.js
+++ /dev/null
@@ -1,22 +0,0 @@
-var util = require('util')
-var messages = require('./warning_messages.json')
-
-module.exports = function () {
-  var args = Array.prototype.slice.call(arguments, 0)
-  var warningName = args.shift()
-  if (warningName === 'typo') {
-    return makeTypoWarning.apply(null, args)
-  } else {
-    var msgTemplate = messages[warningName] ? messages[warningName] : warningName + ": '%s'"
-    args.unshift(msgTemplate)
-    return util.format.apply(null, args)
-  }
-}
-
-function makeTypoWarning (providedName, probableName, field) {
-  if (field) {
-    providedName = field + "['" + providedName + "']"
-    probableName = field + "['" + probableName + "']"
-  }
-  return util.format(messages.typo, providedName, probableName)
-}
diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/normalize.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/normalize.js
deleted file mode 100644
index bf71d2c1e2235..0000000000000
--- a/node_modules/init-package-json/node_modules/normalize-package-data/lib/normalize.js
+++ /dev/null
@@ -1,48 +0,0 @@
-module.exports = normalize
-
-var fixer = require('./fixer')
-normalize.fixer = fixer
-
-var makeWarning = require('./make_warning')
-
-var fieldsToFix = ['name', 'version', 'description', 'repository', 'modules', 'scripts',
-  'files', 'bin', 'man', 'bugs', 'keywords', 'readme', 'homepage', 'license']
-var otherThingsToFix = ['dependencies', 'people', 'typos']
-
-var thingsToFix = fieldsToFix.map(function (fieldName) {
-  return ucFirst(fieldName) + 'Field'
-})
-// two ways to do this in CoffeeScript on only one line, sub-70 chars:
-// thingsToFix = fieldsToFix.map (name) -> ucFirst(name) + "Field"
-// thingsToFix = (ucFirst(name) + "Field" for name in fieldsToFix)
-thingsToFix = thingsToFix.concat(otherThingsToFix)
-
-function normalize (data, warn, strict) {
-  if (warn === true) {
-    warn = null
-    strict = true
-  }
-  if (!strict) {
-    strict = false
-  }
-  if (!warn || data.private) {
-    warn = function (msg) { /* noop */ }
-  }
-
-  if (data.scripts &&
-      data.scripts.install === 'node-gyp rebuild' &&
-      !data.scripts.preinstall) {
-    data.gypfile = true
-  }
-  fixer.warn = function () {
-    warn(makeWarning.apply(null, arguments))
-  }
-  thingsToFix.forEach(function (thingName) {
-    fixer['fix' + ucFirst(thingName)](data, strict)
-  })
-  data._id = data.name + '@' + data.version
-}
-
-function ucFirst (string) {
-  return string.charAt(0).toUpperCase() + string.slice(1)
-}
diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/safe_format.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/safe_format.js
deleted file mode 100644
index 5fc888e5450cd..0000000000000
--- a/node_modules/init-package-json/node_modules/normalize-package-data/lib/safe_format.js
+++ /dev/null
@@ -1,11 +0,0 @@
-var util = require('util')
-
-module.exports = function () {
-  var args = Array.prototype.slice.call(arguments, 0)
-  args.forEach(function (arg) {
-    if (!arg) {
-      throw new TypeError('Bad arguments.')
-    }
-  })
-  return util.format.apply(null, arguments)
-}
diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/typos.json b/node_modules/init-package-json/node_modules/normalize-package-data/lib/typos.json
deleted file mode 100644
index 7f9dd283b30ff..0000000000000
--- a/node_modules/init-package-json/node_modules/normalize-package-data/lib/typos.json
+++ /dev/null
@@ -1,25 +0,0 @@
-{
-  "topLevel": {
-    "dependancies": "dependencies"
-   ,"dependecies": "dependencies"
-   ,"depdenencies": "dependencies"
-   ,"devEependencies": "devDependencies"
-   ,"depends": "dependencies"
-   ,"dev-dependencies": "devDependencies"
-   ,"devDependences": "devDependencies"
-   ,"devDepenencies": "devDependencies"
-   ,"devdependencies": "devDependencies"
-   ,"repostitory": "repository"
-   ,"repo": "repository"
-   ,"prefereGlobal": "preferGlobal"
-   ,"hompage": "homepage"
-   ,"hampage": "homepage"
-   ,"autohr": "author"
-   ,"autor": "author"
-   ,"contributers": "contributors"
-   ,"publicationConfig": "publishConfig"
-   ,"script": "scripts"
-  },
-  "bugs": { "web": "url", "name": "url" },
-  "script": { "server": "start", "tests": "test" }
-}
diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/warning_messages.json b/node_modules/init-package-json/node_modules/normalize-package-data/lib/warning_messages.json
deleted file mode 100644
index 4890f506ed965..0000000000000
--- a/node_modules/init-package-json/node_modules/normalize-package-data/lib/warning_messages.json
+++ /dev/null
@@ -1,30 +0,0 @@
-{
-  "repositories": "'repositories' (plural) Not supported. Please pick one as the 'repository' field"
-  ,"missingRepository": "No repository field."
-  ,"brokenGitUrl": "Probably broken git url: %s"
-  ,"nonObjectScripts": "scripts must be an object"
-  ,"nonStringScript": "script values must be string commands"
-  ,"nonArrayFiles": "Invalid 'files' member"
-  ,"invalidFilename": "Invalid filename in 'files' list: %s"
-  ,"nonArrayBundleDependencies": "Invalid 'bundleDependencies' list. Must be array of package names"
-  ,"nonStringBundleDependency": "Invalid bundleDependencies member: %s"
-  ,"nonDependencyBundleDependency": "Non-dependency in bundleDependencies: %s"
-  ,"nonObjectDependencies": "%s field must be an object"
-  ,"nonStringDependency": "Invalid dependency: %s %s"
-  ,"deprecatedArrayDependencies": "specifying %s as array is deprecated"
-  ,"deprecatedModules": "modules field is deprecated"
-  ,"nonArrayKeywords": "keywords should be an array of strings"
-  ,"nonStringKeyword": "keywords should be an array of strings"
-  ,"conflictingName": "%s is also the name of a node core module."
-  ,"nonStringDescription": "'description' field should be a string"
-  ,"missingDescription": "No description"
-  ,"missingReadme": "No README data"
-  ,"missingLicense": "No license field."
-  ,"nonEmailUrlBugsString": "Bug string field must be url, email, or {email,url}"
-  ,"nonUrlBugsUrlField": "bugs.url field must be a string url. Deleted."
-  ,"nonEmailBugsEmailField": "bugs.email field must be a string email. Deleted."
-  ,"emptyNormalizedBugs": "Normalized value of bugs field is an empty object. Deleted."
-  ,"nonUrlHomepage": "homepage field must be a string url. Deleted."
-  ,"invalidLicense": "license should be a valid SPDX license expression"
-  ,"typo": "%s should probably be %s."
-}
diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/package.json b/node_modules/init-package-json/node_modules/normalize-package-data/package.json
deleted file mode 100644
index 48d2371d4a66b..0000000000000
--- a/node_modules/init-package-json/node_modules/normalize-package-data/package.json
+++ /dev/null
@@ -1,62 +0,0 @@
-{
-  "name": "normalize-package-data",
-  "version": "6.0.0",
-  "author": "GitHub Inc.",
-  "description": "Normalizes data that can be found in package.json files.",
-  "license": "BSD-2-Clause",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/normalize-package-data.git"
-  },
-  "main": "lib/normalize.js",
-  "scripts": {
-    "test": "tap",
-    "npmclilint": "npmcli-lint",
-    "lint": "eslint \"**/*.js\"",
-    "lintfix": "npm run lint -- --fix",
-    "posttest": "npm run lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "snap": "tap",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "dependencies": {
-    "hosted-git-info": "^7.0.0",
-    "is-core-module": "^2.8.1",
-    "semver": "^7.3.5",
-    "validate-npm-package-license": "^3.0.4"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.18.0",
-    "tap": "^16.0.1"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^16.14.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.18.0",
-    "publish": "true",
-    "ciVersions": [
-      "16.14.0",
-      "16.x",
-      "18.0.0",
-      "18.x"
-    ]
-  },
-  "tap": {
-    "branches": 86,
-    "functions": 92,
-    "lines": 86,
-    "statements": 86,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  }
-}
diff --git a/node_modules/@npmcli/package-json/node_modules/normalize-package-data/LICENSE b/node_modules/normalize-package-data/LICENSE
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/normalize-package-data/LICENSE
rename to node_modules/normalize-package-data/LICENSE
diff --git a/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/extract_description.js b/node_modules/normalize-package-data/lib/extract_description.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/extract_description.js
rename to node_modules/normalize-package-data/lib/extract_description.js
diff --git a/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/fixer.js b/node_modules/normalize-package-data/lib/fixer.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/fixer.js
rename to node_modules/normalize-package-data/lib/fixer.js
diff --git a/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/make_warning.js b/node_modules/normalize-package-data/lib/make_warning.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/make_warning.js
rename to node_modules/normalize-package-data/lib/make_warning.js
diff --git a/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/normalize.js b/node_modules/normalize-package-data/lib/normalize.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/normalize.js
rename to node_modules/normalize-package-data/lib/normalize.js
diff --git a/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/safe_format.js b/node_modules/normalize-package-data/lib/safe_format.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/safe_format.js
rename to node_modules/normalize-package-data/lib/safe_format.js
diff --git a/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/typos.json b/node_modules/normalize-package-data/lib/typos.json
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/typos.json
rename to node_modules/normalize-package-data/lib/typos.json
diff --git a/node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/warning_messages.json b/node_modules/normalize-package-data/lib/warning_messages.json
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/normalize-package-data/lib/warning_messages.json
rename to node_modules/normalize-package-data/lib/warning_messages.json
diff --git a/node_modules/@npmcli/package-json/node_modules/normalize-package-data/package.json b/node_modules/normalize-package-data/package.json
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/normalize-package-data/package.json
rename to node_modules/normalize-package-data/package.json
diff --git a/node_modules/pacote/node_modules/normalize-package-data/LICENSE b/node_modules/pacote/node_modules/normalize-package-data/LICENSE
deleted file mode 100644
index 19d1364a8ac08..0000000000000
--- a/node_modules/pacote/node_modules/normalize-package-data/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-This package contains code originally written by Isaac Z. Schlueter.
-Used with permission.
-
-Copyright (c) Meryn Stol ("Author")
-All rights reserved.
-
-The BSD License
-
-Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
-
-1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/extract_description.js b/node_modules/pacote/node_modules/normalize-package-data/lib/extract_description.js
deleted file mode 100644
index 631966b5f29af..0000000000000
--- a/node_modules/pacote/node_modules/normalize-package-data/lib/extract_description.js
+++ /dev/null
@@ -1,24 +0,0 @@
-module.exports = extractDescription
-
-// Extracts description from contents of a readme file in markdown format
-function extractDescription (d) {
-  if (!d) {
-    return
-  }
-  if (d === 'ERROR: No README data found!') {
-    return
-  }
-  // the first block of text before the first heading
-  // that isn't the first line heading
-  d = d.trim().split('\n')
-  let s = 0
-  while (d[s] && d[s].trim().match(/^(#|$)/)) {
-    s++
-  }
-  const l = d.length
-  let e = s + 1
-  while (e < l && d[e].trim()) {
-    e++
-  }
-  return d.slice(s, e).join(' ').trim()
-}
diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/fixer.js b/node_modules/pacote/node_modules/normalize-package-data/lib/fixer.js
deleted file mode 100644
index bb78231d83ca9..0000000000000
--- a/node_modules/pacote/node_modules/normalize-package-data/lib/fixer.js
+++ /dev/null
@@ -1,475 +0,0 @@
-var isValidSemver = require('semver/functions/valid')
-var cleanSemver = require('semver/functions/clean')
-var validateLicense = require('validate-npm-package-license')
-var hostedGitInfo = require('hosted-git-info')
-var isBuiltinModule = require('is-core-module')
-var depTypes = ['dependencies', 'devDependencies', 'optionalDependencies']
-var extractDescription = require('./extract_description')
-var url = require('url')
-var typos = require('./typos.json')
-
-var isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.'))
-
-module.exports = {
-  // default warning function
-  warn: function () {},
-
-  fixRepositoryField: function (data) {
-    if (data.repositories) {
-      this.warn('repositories')
-      data.repository = data.repositories[0]
-    }
-    if (!data.repository) {
-      return this.warn('missingRepository')
-    }
-    if (typeof data.repository === 'string') {
-      data.repository = {
-        type: 'git',
-        url: data.repository,
-      }
-    }
-    var r = data.repository.url || ''
-    if (r) {
-      var hosted = hostedGitInfo.fromUrl(r)
-      if (hosted) {
-        r = data.repository.url
-          = hosted.getDefaultRepresentation() === 'shortcut' ? hosted.https() : hosted.toString()
-      }
-    }
-
-    if (r.match(/github.com\/[^/]+\/[^/]+\.git\.git$/)) {
-      this.warn('brokenGitUrl', r)
-    }
-  },
-
-  fixTypos: function (data) {
-    Object.keys(typos.topLevel).forEach(function (d) {
-      if (Object.prototype.hasOwnProperty.call(data, d)) {
-        this.warn('typo', d, typos.topLevel[d])
-      }
-    }, this)
-  },
-
-  fixScriptsField: function (data) {
-    if (!data.scripts) {
-      return
-    }
-    if (typeof data.scripts !== 'object') {
-      this.warn('nonObjectScripts')
-      delete data.scripts
-      return
-    }
-    Object.keys(data.scripts).forEach(function (k) {
-      if (typeof data.scripts[k] !== 'string') {
-        this.warn('nonStringScript')
-        delete data.scripts[k]
-      } else if (typos.script[k] && !data.scripts[typos.script[k]]) {
-        this.warn('typo', k, typos.script[k], 'scripts')
-      }
-    }, this)
-  },
-
-  fixFilesField: function (data) {
-    var files = data.files
-    if (files && !Array.isArray(files)) {
-      this.warn('nonArrayFiles')
-      delete data.files
-    } else if (data.files) {
-      data.files = data.files.filter(function (file) {
-        if (!file || typeof file !== 'string') {
-          this.warn('invalidFilename', file)
-          return false
-        } else {
-          return true
-        }
-      }, this)
-    }
-  },
-
-  fixBinField: function (data) {
-    if (!data.bin) {
-      return
-    }
-    if (typeof data.bin === 'string') {
-      var b = {}
-      var match
-      if (match = data.name.match(/^@[^/]+[/](.*)$/)) {
-        b[match[1]] = data.bin
-      } else {
-        b[data.name] = data.bin
-      }
-      data.bin = b
-    }
-  },
-
-  fixManField: function (data) {
-    if (!data.man) {
-      return
-    }
-    if (typeof data.man === 'string') {
-      data.man = [data.man]
-    }
-  },
-  fixBundleDependenciesField: function (data) {
-    var bdd = 'bundledDependencies'
-    var bd = 'bundleDependencies'
-    if (data[bdd] && !data[bd]) {
-      data[bd] = data[bdd]
-      delete data[bdd]
-    }
-    if (data[bd] && !Array.isArray(data[bd])) {
-      this.warn('nonArrayBundleDependencies')
-      delete data[bd]
-    } else if (data[bd]) {
-      data[bd] = data[bd].filter(function (filtered) {
-        if (!filtered || typeof filtered !== 'string') {
-          this.warn('nonStringBundleDependency', filtered)
-          return false
-        } else {
-          if (!data.dependencies) {
-            data.dependencies = {}
-          }
-          if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) {
-            this.warn('nonDependencyBundleDependency', filtered)
-            data.dependencies[filtered] = '*'
-          }
-          return true
-        }
-      }, this)
-    }
-  },
-
-  fixDependencies: function (data, strict) {
-    objectifyDeps(data, this.warn)
-    addOptionalDepsToDeps(data, this.warn)
-    this.fixBundleDependenciesField(data)
-
-    ;['dependencies', 'devDependencies'].forEach(function (deps) {
-      if (!(deps in data)) {
-        return
-      }
-      if (!data[deps] || typeof data[deps] !== 'object') {
-        this.warn('nonObjectDependencies', deps)
-        delete data[deps]
-        return
-      }
-      Object.keys(data[deps]).forEach(function (d) {
-        var r = data[deps][d]
-        if (typeof r !== 'string') {
-          this.warn('nonStringDependency', d, JSON.stringify(r))
-          delete data[deps][d]
-        }
-        var hosted = hostedGitInfo.fromUrl(data[deps][d])
-        if (hosted) {
-          data[deps][d] = hosted.toString()
-        }
-      }, this)
-    }, this)
-  },
-
-  fixModulesField: function (data) {
-    if (data.modules) {
-      this.warn('deprecatedModules')
-      delete data.modules
-    }
-  },
-
-  fixKeywordsField: function (data) {
-    if (typeof data.keywords === 'string') {
-      data.keywords = data.keywords.split(/,\s+/)
-    }
-    if (data.keywords && !Array.isArray(data.keywords)) {
-      delete data.keywords
-      this.warn('nonArrayKeywords')
-    } else if (data.keywords) {
-      data.keywords = data.keywords.filter(function (kw) {
-        if (typeof kw !== 'string' || !kw) {
-          this.warn('nonStringKeyword')
-          return false
-        } else {
-          return true
-        }
-      }, this)
-    }
-  },
-
-  fixVersionField: function (data, strict) {
-    // allow "loose" semver 1.0 versions in non-strict mode
-    // enforce strict semver 2.0 compliance in strict mode
-    var loose = !strict
-    if (!data.version) {
-      data.version = ''
-      return true
-    }
-    if (!isValidSemver(data.version, loose)) {
-      throw new Error('Invalid version: "' + data.version + '"')
-    }
-    data.version = cleanSemver(data.version, loose)
-    return true
-  },
-
-  fixPeople: function (data) {
-    modifyPeople(data, unParsePerson)
-    modifyPeople(data, parsePerson)
-  },
-
-  fixNameField: function (data, options) {
-    if (typeof options === 'boolean') {
-      options = { strict: options }
-    } else if (typeof options === 'undefined') {
-      options = {}
-    }
-    var strict = options.strict
-    if (!data.name && !strict) {
-      data.name = ''
-      return
-    }
-    if (typeof data.name !== 'string') {
-      throw new Error('name field must be a string.')
-    }
-    if (!strict) {
-      data.name = data.name.trim()
-    }
-    ensureValidName(data.name, strict, options.allowLegacyCase)
-    if (isBuiltinModule(data.name)) {
-      this.warn('conflictingName', data.name)
-    }
-  },
-
-  fixDescriptionField: function (data) {
-    if (data.description && typeof data.description !== 'string') {
-      this.warn('nonStringDescription')
-      delete data.description
-    }
-    if (data.readme && !data.description) {
-      data.description = extractDescription(data.readme)
-    }
-    if (data.description === undefined) {
-      delete data.description
-    }
-    if (!data.description) {
-      this.warn('missingDescription')
-    }
-  },
-
-  fixReadmeField: function (data) {
-    if (!data.readme) {
-      this.warn('missingReadme')
-      data.readme = 'ERROR: No README data found!'
-    }
-  },
-
-  fixBugsField: function (data) {
-    if (!data.bugs && data.repository && data.repository.url) {
-      var hosted = hostedGitInfo.fromUrl(data.repository.url)
-      if (hosted && hosted.bugs()) {
-        data.bugs = { url: hosted.bugs() }
-      }
-    } else if (data.bugs) {
-      if (typeof data.bugs === 'string') {
-        if (isEmail(data.bugs)) {
-          data.bugs = { email: data.bugs }
-        /* eslint-disable-next-line node/no-deprecated-api */
-        } else if (url.parse(data.bugs).protocol) {
-          data.bugs = { url: data.bugs }
-        } else {
-          this.warn('nonEmailUrlBugsString')
-        }
-      } else {
-        bugsTypos(data.bugs, this.warn)
-        var oldBugs = data.bugs
-        data.bugs = {}
-        if (oldBugs.url) {
-          /* eslint-disable-next-line node/no-deprecated-api */
-          if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) {
-            data.bugs.url = oldBugs.url
-          } else {
-            this.warn('nonUrlBugsUrlField')
-          }
-        }
-        if (oldBugs.email) {
-          if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) {
-            data.bugs.email = oldBugs.email
-          } else {
-            this.warn('nonEmailBugsEmailField')
-          }
-        }
-      }
-      if (!data.bugs.email && !data.bugs.url) {
-        delete data.bugs
-        this.warn('emptyNormalizedBugs')
-      }
-    }
-  },
-
-  fixHomepageField: function (data) {
-    if (!data.homepage && data.repository && data.repository.url) {
-      var hosted = hostedGitInfo.fromUrl(data.repository.url)
-      if (hosted && hosted.docs()) {
-        data.homepage = hosted.docs()
-      }
-    }
-    if (!data.homepage) {
-      return
-    }
-
-    if (typeof data.homepage !== 'string') {
-      this.warn('nonUrlHomepage')
-      return delete data.homepage
-    }
-    /* eslint-disable-next-line node/no-deprecated-api */
-    if (!url.parse(data.homepage).protocol) {
-      data.homepage = 'http://' + data.homepage
-    }
-  },
-
-  fixLicenseField: function (data) {
-    const license = data.license || data.licence
-    if (!license) {
-      return this.warn('missingLicense')
-    }
-    if (
-      typeof (license) !== 'string' ||
-      license.length < 1 ||
-      license.trim() === ''
-    ) {
-      return this.warn('invalidLicense')
-    }
-    if (!validateLicense(license).validForNewPackages) {
-      return this.warn('invalidLicense')
-    }
-  },
-}
-
-function isValidScopedPackageName (spec) {
-  if (spec.charAt(0) !== '@') {
-    return false
-  }
-
-  var rest = spec.slice(1).split('/')
-  if (rest.length !== 2) {
-    return false
-  }
-
-  return rest[0] && rest[1] &&
-    rest[0] === encodeURIComponent(rest[0]) &&
-    rest[1] === encodeURIComponent(rest[1])
-}
-
-function isCorrectlyEncodedName (spec) {
-  return !spec.match(/[/@\s+%:]/) &&
-    spec === encodeURIComponent(spec)
-}
-
-function ensureValidName (name, strict, allowLegacyCase) {
-  if (name.charAt(0) === '.' ||
-      !(isValidScopedPackageName(name) || isCorrectlyEncodedName(name)) ||
-      (strict && (!allowLegacyCase) && name !== name.toLowerCase()) ||
-      name.toLowerCase() === 'node_modules' ||
-      name.toLowerCase() === 'favicon.ico') {
-    throw new Error('Invalid name: ' + JSON.stringify(name))
-  }
-}
-
-function modifyPeople (data, fn) {
-  if (data.author) {
-    data.author = fn(data.author)
-  }['maintainers', 'contributors'].forEach(function (set) {
-    if (!Array.isArray(data[set])) {
-      return
-    }
-    data[set] = data[set].map(fn)
-  })
-  return data
-}
-
-function unParsePerson (person) {
-  if (typeof person === 'string') {
-    return person
-  }
-  var name = person.name || ''
-  var u = person.url || person.web
-  var wrappedUrl = u ? (' (' + u + ')') : ''
-  var e = person.email || person.mail
-  var wrappedEmail = e ? (' <' + e + '>') : ''
-  return name + wrappedEmail + wrappedUrl
-}
-
-function parsePerson (person) {
-  if (typeof person !== 'string') {
-    return person
-  }
-  var matchedName = person.match(/^([^(<]+)/)
-  var matchedUrl = person.match(/\(([^()]+)\)/)
-  var matchedEmail = person.match(/<([^<>]+)>/)
-  var obj = {}
-  if (matchedName && matchedName[0].trim()) {
-    obj.name = matchedName[0].trim()
-  }
-  if (matchedEmail) {
-    obj.email = matchedEmail[1]
-  }
-  if (matchedUrl) {
-    obj.url = matchedUrl[1]
-  }
-  return obj
-}
-
-function addOptionalDepsToDeps (data, warn) {
-  var o = data.optionalDependencies
-  if (!o) {
-    return
-  }
-  var d = data.dependencies || {}
-  Object.keys(o).forEach(function (k) {
-    d[k] = o[k]
-  })
-  data.dependencies = d
-}
-
-function depObjectify (deps, type, warn) {
-  if (!deps) {
-    return {}
-  }
-  if (typeof deps === 'string') {
-    deps = deps.trim().split(/[\n\r\s\t ,]+/)
-  }
-  if (!Array.isArray(deps)) {
-    return deps
-  }
-  warn('deprecatedArrayDependencies', type)
-  var o = {}
-  deps.filter(function (d) {
-    return typeof d === 'string'
-  }).forEach(function (d) {
-    d = d.trim().split(/(:?[@\s><=])/)
-    var dn = d.shift()
-    var dv = d.join('')
-    dv = dv.trim()
-    dv = dv.replace(/^@/, '')
-    o[dn] = dv
-  })
-  return o
-}
-
-function objectifyDeps (data, warn) {
-  depTypes.forEach(function (type) {
-    if (!data[type]) {
-      return
-    }
-    data[type] = depObjectify(data[type], type, warn)
-  })
-}
-
-function bugsTypos (bugs, warn) {
-  if (!bugs) {
-    return
-  }
-  Object.keys(bugs).forEach(function (k) {
-    if (typos.bugs[k]) {
-      warn('typo', k, typos.bugs[k], 'bugs')
-      bugs[typos.bugs[k]] = bugs[k]
-      delete bugs[k]
-    }
-  })
-}
diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/make_warning.js b/node_modules/pacote/node_modules/normalize-package-data/lib/make_warning.js
deleted file mode 100644
index 3be9c86539952..0000000000000
--- a/node_modules/pacote/node_modules/normalize-package-data/lib/make_warning.js
+++ /dev/null
@@ -1,22 +0,0 @@
-var util = require('util')
-var messages = require('./warning_messages.json')
-
-module.exports = function () {
-  var args = Array.prototype.slice.call(arguments, 0)
-  var warningName = args.shift()
-  if (warningName === 'typo') {
-    return makeTypoWarning.apply(null, args)
-  } else {
-    var msgTemplate = messages[warningName] ? messages[warningName] : warningName + ": '%s'"
-    args.unshift(msgTemplate)
-    return util.format.apply(null, args)
-  }
-}
-
-function makeTypoWarning (providedName, probableName, field) {
-  if (field) {
-    providedName = field + "['" + providedName + "']"
-    probableName = field + "['" + probableName + "']"
-  }
-  return util.format(messages.typo, providedName, probableName)
-}
diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/normalize.js b/node_modules/pacote/node_modules/normalize-package-data/lib/normalize.js
deleted file mode 100644
index bf71d2c1e2235..0000000000000
--- a/node_modules/pacote/node_modules/normalize-package-data/lib/normalize.js
+++ /dev/null
@@ -1,48 +0,0 @@
-module.exports = normalize
-
-var fixer = require('./fixer')
-normalize.fixer = fixer
-
-var makeWarning = require('./make_warning')
-
-var fieldsToFix = ['name', 'version', 'description', 'repository', 'modules', 'scripts',
-  'files', 'bin', 'man', 'bugs', 'keywords', 'readme', 'homepage', 'license']
-var otherThingsToFix = ['dependencies', 'people', 'typos']
-
-var thingsToFix = fieldsToFix.map(function (fieldName) {
-  return ucFirst(fieldName) + 'Field'
-})
-// two ways to do this in CoffeeScript on only one line, sub-70 chars:
-// thingsToFix = fieldsToFix.map (name) -> ucFirst(name) + "Field"
-// thingsToFix = (ucFirst(name) + "Field" for name in fieldsToFix)
-thingsToFix = thingsToFix.concat(otherThingsToFix)
-
-function normalize (data, warn, strict) {
-  if (warn === true) {
-    warn = null
-    strict = true
-  }
-  if (!strict) {
-    strict = false
-  }
-  if (!warn || data.private) {
-    warn = function (msg) { /* noop */ }
-  }
-
-  if (data.scripts &&
-      data.scripts.install === 'node-gyp rebuild' &&
-      !data.scripts.preinstall) {
-    data.gypfile = true
-  }
-  fixer.warn = function () {
-    warn(makeWarning.apply(null, arguments))
-  }
-  thingsToFix.forEach(function (thingName) {
-    fixer['fix' + ucFirst(thingName)](data, strict)
-  })
-  data._id = data.name + '@' + data.version
-}
-
-function ucFirst (string) {
-  return string.charAt(0).toUpperCase() + string.slice(1)
-}
diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/safe_format.js b/node_modules/pacote/node_modules/normalize-package-data/lib/safe_format.js
deleted file mode 100644
index 5fc888e5450cd..0000000000000
--- a/node_modules/pacote/node_modules/normalize-package-data/lib/safe_format.js
+++ /dev/null
@@ -1,11 +0,0 @@
-var util = require('util')
-
-module.exports = function () {
-  var args = Array.prototype.slice.call(arguments, 0)
-  args.forEach(function (arg) {
-    if (!arg) {
-      throw new TypeError('Bad arguments.')
-    }
-  })
-  return util.format.apply(null, arguments)
-}
diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/typos.json b/node_modules/pacote/node_modules/normalize-package-data/lib/typos.json
deleted file mode 100644
index 7f9dd283b30ff..0000000000000
--- a/node_modules/pacote/node_modules/normalize-package-data/lib/typos.json
+++ /dev/null
@@ -1,25 +0,0 @@
-{
-  "topLevel": {
-    "dependancies": "dependencies"
-   ,"dependecies": "dependencies"
-   ,"depdenencies": "dependencies"
-   ,"devEependencies": "devDependencies"
-   ,"depends": "dependencies"
-   ,"dev-dependencies": "devDependencies"
-   ,"devDependences": "devDependencies"
-   ,"devDepenencies": "devDependencies"
-   ,"devdependencies": "devDependencies"
-   ,"repostitory": "repository"
-   ,"repo": "repository"
-   ,"prefereGlobal": "preferGlobal"
-   ,"hompage": "homepage"
-   ,"hampage": "homepage"
-   ,"autohr": "author"
-   ,"autor": "author"
-   ,"contributers": "contributors"
-   ,"publicationConfig": "publishConfig"
-   ,"script": "scripts"
-  },
-  "bugs": { "web": "url", "name": "url" },
-  "script": { "server": "start", "tests": "test" }
-}
diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/warning_messages.json b/node_modules/pacote/node_modules/normalize-package-data/lib/warning_messages.json
deleted file mode 100644
index 4890f506ed965..0000000000000
--- a/node_modules/pacote/node_modules/normalize-package-data/lib/warning_messages.json
+++ /dev/null
@@ -1,30 +0,0 @@
-{
-  "repositories": "'repositories' (plural) Not supported. Please pick one as the 'repository' field"
-  ,"missingRepository": "No repository field."
-  ,"brokenGitUrl": "Probably broken git url: %s"
-  ,"nonObjectScripts": "scripts must be an object"
-  ,"nonStringScript": "script values must be string commands"
-  ,"nonArrayFiles": "Invalid 'files' member"
-  ,"invalidFilename": "Invalid filename in 'files' list: %s"
-  ,"nonArrayBundleDependencies": "Invalid 'bundleDependencies' list. Must be array of package names"
-  ,"nonStringBundleDependency": "Invalid bundleDependencies member: %s"
-  ,"nonDependencyBundleDependency": "Non-dependency in bundleDependencies: %s"
-  ,"nonObjectDependencies": "%s field must be an object"
-  ,"nonStringDependency": "Invalid dependency: %s %s"
-  ,"deprecatedArrayDependencies": "specifying %s as array is deprecated"
-  ,"deprecatedModules": "modules field is deprecated"
-  ,"nonArrayKeywords": "keywords should be an array of strings"
-  ,"nonStringKeyword": "keywords should be an array of strings"
-  ,"conflictingName": "%s is also the name of a node core module."
-  ,"nonStringDescription": "'description' field should be a string"
-  ,"missingDescription": "No description"
-  ,"missingReadme": "No README data"
-  ,"missingLicense": "No license field."
-  ,"nonEmailUrlBugsString": "Bug string field must be url, email, or {email,url}"
-  ,"nonUrlBugsUrlField": "bugs.url field must be a string url. Deleted."
-  ,"nonEmailBugsEmailField": "bugs.email field must be a string email. Deleted."
-  ,"emptyNormalizedBugs": "Normalized value of bugs field is an empty object. Deleted."
-  ,"nonUrlHomepage": "homepage field must be a string url. Deleted."
-  ,"invalidLicense": "license should be a valid SPDX license expression"
-  ,"typo": "%s should probably be %s."
-}
diff --git a/node_modules/pacote/node_modules/normalize-package-data/package.json b/node_modules/pacote/node_modules/normalize-package-data/package.json
deleted file mode 100644
index 48d2371d4a66b..0000000000000
--- a/node_modules/pacote/node_modules/normalize-package-data/package.json
+++ /dev/null
@@ -1,62 +0,0 @@
-{
-  "name": "normalize-package-data",
-  "version": "6.0.0",
-  "author": "GitHub Inc.",
-  "description": "Normalizes data that can be found in package.json files.",
-  "license": "BSD-2-Clause",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/normalize-package-data.git"
-  },
-  "main": "lib/normalize.js",
-  "scripts": {
-    "test": "tap",
-    "npmclilint": "npmcli-lint",
-    "lint": "eslint \"**/*.js\"",
-    "lintfix": "npm run lint -- --fix",
-    "posttest": "npm run lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "snap": "tap",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "dependencies": {
-    "hosted-git-info": "^7.0.0",
-    "is-core-module": "^2.8.1",
-    "semver": "^7.3.5",
-    "validate-npm-package-license": "^3.0.4"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.18.0",
-    "tap": "^16.0.1"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^16.14.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.18.0",
-    "publish": "true",
-    "ciVersions": [
-      "16.14.0",
-      "16.x",
-      "18.0.0",
-      "18.x"
-    ]
-  },
-  "tap": {
-    "branches": 86,
-    "functions": 92,
-    "lines": 86,
-    "statements": 86,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  }
-}
diff --git a/package-lock.json b/package-lock.json
index 9cec079822a5c..4ca183ef1110b 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -234,7 +234,7 @@
         "tap": "^16.3.4"
       },
       "engines": {
-        "node": "^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "node_modules/@actions/core": {
@@ -2517,21 +2517,6 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/@npmcli/package-json/node_modules/normalize-package-data": {
-      "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.0.tgz",
-      "integrity": "sha512-UL7ELRVxYBHBgYEtZCXjxuD5vPxnmvMGq0jp/dGPKKrN7tfsBh2IY7TlJ15WWwdjRWD3RJbnsygUurTK3xkPkg==",
-      "inBundle": true,
-      "dependencies": {
-        "hosted-git-info": "^7.0.0",
-        "is-core-module": "^2.8.1",
-        "semver": "^7.3.5",
-        "validate-npm-package-license": "^3.0.4"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
     "node_modules/@npmcli/promise-spawn": {
       "version": "6.0.2",
       "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-6.0.2.tgz",
@@ -2675,6 +2660,21 @@
         "node": ">=12"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/normalize-package-data": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-5.0.0.tgz",
+      "integrity": "sha512-h9iPVIfrVZ9wVYQnxFgtw1ugSvGEMOlyPWWtm8BMJhnwyEL/FLbYbTY3V3PpjI/BUK67n9PEWDu6eHzu1fB15Q==",
+      "dev": true,
+      "dependencies": {
+        "hosted-git-info": "^6.0.0",
+        "is-core-module": "^2.8.1",
+        "semver": "^7.3.5",
+        "validate-npm-package-license": "^3.0.4"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/npm-package-arg": {
       "version": "10.1.0",
       "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
@@ -6745,21 +6745,6 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/init-package-json/node_modules/normalize-package-data": {
-      "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.0.tgz",
-      "integrity": "sha512-UL7ELRVxYBHBgYEtZCXjxuD5vPxnmvMGq0jp/dGPKKrN7tfsBh2IY7TlJ15WWwdjRWD3RJbnsygUurTK3xkPkg==",
-      "inBundle": true,
-      "dependencies": {
-        "hosted-git-info": "^7.0.0",
-        "is-core-module": "^2.8.1",
-        "semver": "^7.3.5",
-        "validate-npm-package-license": "^3.0.4"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
     "node_modules/init-package-json/node_modules/read-package-json": {
       "version": "7.0.0",
       "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-7.0.0.tgz",
@@ -9732,39 +9717,18 @@
       }
     },
     "node_modules/normalize-package-data": {
-      "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-5.0.0.tgz",
-      "integrity": "sha512-h9iPVIfrVZ9wVYQnxFgtw1ugSvGEMOlyPWWtm8BMJhnwyEL/FLbYbTY3V3PpjI/BUK67n9PEWDu6eHzu1fB15Q==",
-      "dev": true,
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.0.tgz",
+      "integrity": "sha512-UL7ELRVxYBHBgYEtZCXjxuD5vPxnmvMGq0jp/dGPKKrN7tfsBh2IY7TlJ15WWwdjRWD3RJbnsygUurTK3xkPkg==",
+      "inBundle": true,
       "dependencies": {
-        "hosted-git-info": "^6.0.0",
+        "hosted-git-info": "^7.0.0",
         "is-core-module": "^2.8.1",
         "semver": "^7.3.5",
         "validate-npm-package-license": "^3.0.4"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/normalize-package-data/node_modules/hosted-git-info": {
-      "version": "6.1.1",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
-      "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
-      "dev": true,
-      "dependencies": {
-        "lru-cache": "^7.5.1"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/normalize-package-data/node_modules/lru-cache": {
-      "version": "7.18.3",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
-      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
-      "dev": true,
-      "engines": {
-        "node": ">=12"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "node_modules/normalize-path": {
@@ -10565,21 +10529,6 @@
         "node": ">=12"
       }
     },
-    "node_modules/pacote/node_modules/normalize-package-data": {
-      "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.0.tgz",
-      "integrity": "sha512-UL7ELRVxYBHBgYEtZCXjxuD5vPxnmvMGq0jp/dGPKKrN7tfsBh2IY7TlJ15WWwdjRWD3RJbnsygUurTK3xkPkg==",
-      "inBundle": true,
-      "dependencies": {
-        "hosted-git-info": "^7.0.0",
-        "is-core-module": "^2.8.1",
-        "semver": "^7.3.5",
-        "validate-npm-package-license": "^3.0.4"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
     "node_modules/pacote/node_modules/npm-pick-manifest": {
       "version": "8.0.2",
       "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-8.0.2.tgz",
@@ -16243,7 +16192,7 @@
         "tcompare": "^5.0.6"
       },
       "engines": {
-        "node": "^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "workspaces/config": {
@@ -16285,7 +16234,7 @@
         "tap": "^16.3.4"
       },
       "engines": {
-        "node": "^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "workspaces/libnpmdiff": {
@@ -16308,7 +16257,7 @@
         "tap": "^16.3.4"
       },
       "engines": {
-        "node": "^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "workspaces/libnpmexec": {
@@ -16338,7 +16287,7 @@
         "tap": "^16.3.4"
       },
       "engines": {
-        "node": "^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "workspaces/libnpmfund": {
@@ -16370,7 +16319,7 @@
         "tap": "^16.3.4"
       },
       "engines": {
-        "node": "^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "workspaces/libnpmorg": {
@@ -16388,7 +16337,7 @@
         "tap": "^16.3.4"
       },
       "engines": {
-        "node": "^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "workspaces/libnpmpack": {
@@ -16408,7 +16357,7 @@
         "tap": "^16.3.4"
       },
       "engines": {
-        "node": "^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "workspaces/libnpmpublish": {
@@ -16432,20 +16381,6 @@
         "nock": "^13.3.0",
         "tap": "^16.3.4"
       },
-      "engines": {
-        "node": "^16.13.0 || >=18.0.0"
-      }
-    },
-    "workspaces/libnpmpublish/node_modules/normalize-package-data": {
-      "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.0.tgz",
-      "integrity": "sha512-UL7ELRVxYBHBgYEtZCXjxuD5vPxnmvMGq0jp/dGPKKrN7tfsBh2IY7TlJ15WWwdjRWD3RJbnsygUurTK3xkPkg==",
-      "dependencies": {
-        "hosted-git-info": "^7.0.0",
-        "is-core-module": "^2.8.1",
-        "semver": "^7.3.5",
-        "validate-npm-package-license": "^3.0.4"
-      },
       "engines": {
         "node": "^16.14.0 || >=18.0.0"
       }
@@ -16463,7 +16398,7 @@
         "tap": "^16.3.4"
       },
       "engines": {
-        "node": "^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "workspaces/libnpmteam": {
@@ -16480,7 +16415,7 @@
         "tap": "^16.3.4"
       },
       "engines": {
-        "node": "^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "workspaces/libnpmversion": {
@@ -16500,7 +16435,7 @@
         "tap": "^16.3.4"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     }
   }

From a87de290e200de45508f27f419dd581d257bf4a0 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Tue, 15 Aug 2023 14:26:11 -0700
Subject: [PATCH 44/68] deps: hoist read-package-json@7.0.0

---
 DEPENDENCIES.md                               |   4 +-
 node_modules/.gitignore                       |   5 +-
 .../node_modules/read-package-json/LICENSE    |  15 -
 .../read-package-json/lib/read-json.js        | 589 ------------------
 .../read-package-json/package.json            |  65 --
 .../read-package-json/LICENSE                 |   0
 .../read-package-json/lib/read-json.js        |   0
 .../read-package-json/package.json            |   0
 package-lock.json                             |  45 +-
 9 files changed, 18 insertions(+), 705 deletions(-)
 delete mode 100644 node_modules/pacote/node_modules/read-package-json/LICENSE
 delete mode 100644 node_modules/pacote/node_modules/read-package-json/lib/read-json.js
 delete mode 100644 node_modules/pacote/node_modules/read-package-json/package.json
 rename node_modules/{init-package-json/node_modules => }/read-package-json/LICENSE (100%)
 rename node_modules/{init-package-json/node_modules => }/read-package-json/lib/read-json.js (100%)
 rename node_modules/{init-package-json/node_modules => }/read-package-json/package.json (100%)

diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md
index 5d58526871a86..46f0bfb3a4620 100644
--- a/DEPENDENCIES.md
+++ b/DEPENDENCIES.md
@@ -107,7 +107,6 @@ graph LR;
   npm-->libnpmversion;
   npm-->make-fetch-happen;
   npm-->nopt;
-  npm-->normalize-package-data;
   npm-->npm-audit-report;
   npm-->npm-install-checks;
   npm-->npm-package-arg;
@@ -134,6 +133,7 @@ graph LR;
   npm-->pacote;
   npm-->parse-conflict-json;
   npm-->proc-log;
+  npm-->read-package-json;
   npm-->read;
   npm-->semver;
   npm-->ssri;
@@ -533,7 +533,6 @@ graph LR;
   npm-->nock;
   npm-->node-gyp;
   npm-->nopt;
-  npm-->normalize-package-data;
   npm-->npm-audit-report;
   npm-->npm-install-checks;
   npm-->npm-package-arg;
@@ -562,6 +561,7 @@ graph LR;
   npm-->parse-conflict-json;
   npm-->proc-log;
   npm-->qrcode-terminal;
+  npm-->read-package-json;
   npm-->read;
   npm-->remark-gfm;
   npm-->remark-github;
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index bcdef42e99c83..25a9780a8417b 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -123,9 +123,6 @@
 !/inherits
 !/ini
 !/init-package-json
-!/init-package-json/node_modules/
-/init-package-json/node_modules/*
-!/init-package-json/node_modules/read-package-json
 !/ip-regex
 !/ip
 !/is-cidr
@@ -230,7 +227,6 @@
 /pacote/node_modules/npm-pick-manifest/node_modules/*
 !/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info
 !/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg
-!/pacote/node_modules/read-package-json
 !/parse-conflict-json
 !/path-is-absolute
 !/path-key
@@ -246,6 +242,7 @@
 !/qrcode-terminal
 !/read-cmd-shim
 !/read-package-json-fast
+!/read-package-json
 !/read
 !/readable-stream
 !/retry
diff --git a/node_modules/pacote/node_modules/read-package-json/LICENSE b/node_modules/pacote/node_modules/read-package-json/LICENSE
deleted file mode 100644
index 052085c436514..0000000000000
--- a/node_modules/pacote/node_modules/read-package-json/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/read-package-json/lib/read-json.js b/node_modules/pacote/node_modules/read-package-json/lib/read-json.js
deleted file mode 100644
index d35f09ebd208f..0000000000000
--- a/node_modules/pacote/node_modules/read-package-json/lib/read-json.js
+++ /dev/null
@@ -1,589 +0,0 @@
-var fs = require('fs')
-
-var path = require('path')
-
-var { glob } = require('glob')
-var normalizeData = require('normalize-package-data')
-var safeJSON = require('json-parse-even-better-errors')
-var util = require('util')
-var normalizePackageBin = require('npm-normalize-package-bin')
-
-module.exports = readJson
-
-// put more stuff on here to customize.
-readJson.extraSet = [
-  bundleDependencies,
-  gypfile,
-  serverjs,
-  scriptpath,
-  authors,
-  readme,
-  mans,
-  bins,
-  githead,
-  fillTypes,
-]
-
-var typoWarned = {}
-var cache = {}
-
-function readJson (file, log_, strict_, cb_) {
-  var log, strict, cb
-  for (var i = 1; i < arguments.length - 1; i++) {
-    if (typeof arguments[i] === 'boolean') {
-      strict = arguments[i]
-    } else if (typeof arguments[i] === 'function') {
-      log = arguments[i]
-    }
-  }
-
-  if (!log) {
-    log = function () {}
-  }
-  cb = arguments[arguments.length - 1]
-
-  readJson_(file, log, strict, cb)
-}
-
-function readJson_ (file, log, strict, cb) {
-  fs.readFile(file, 'utf8', function (er, d) {
-    parseJson(file, er, d, log, strict, cb)
-  })
-}
-
-function stripBOM (content) {
-  // Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
-  // because the buffer-to-string conversion in `fs.readFileSync()`
-  // translates it to FEFF, the UTF-16 BOM.
-  if (content.charCodeAt(0) === 0xFEFF) {
-    content = content.slice(1)
-  }
-  return content
-}
-
-function jsonClone (obj) {
-  if (obj == null) {
-    return obj
-  } else if (Array.isArray(obj)) {
-    var newarr = new Array(obj.length)
-    for (var ii in obj) {
-      newarr[ii] = jsonClone(obj[ii])
-    }
-    return newarr
-  } else if (typeof obj === 'object') {
-    var newobj = {}
-    for (var kk in obj) {
-      newobj[kk] = jsonClone(obj[kk])
-    }
-    return newobj
-  } else {
-    return obj
-  }
-}
-
-function parseJson (file, er, d, log, strict, cb) {
-  if (er && er.code === 'ENOENT') {
-    return fs.stat(path.dirname(file), function (err, stat) {
-      if (!err && stat && !stat.isDirectory()) {
-        // ENOTDIR isn't used on Windows, but npm expects it.
-        er = Object.create(er)
-        er.code = 'ENOTDIR'
-        return cb(er)
-      } else {
-        return indexjs(file, er, log, strict, cb)
-      }
-    })
-  }
-  if (er) {
-    return cb(er)
-  }
-
-  if (cache[d]) {
-    return cb(null, jsonClone(cache[d]))
-  }
-
-  var data
-
-  try {
-    data = safeJSON(stripBOM(d))
-    for (var key in data) {
-      if (/^_/.test(key)) {
-        delete data[key]
-      }
-    }
-  } catch (jsonErr) {
-    data = parseIndex(d)
-    if (!data) {
-      return cb(parseError(jsonErr, file))
-    }
-  }
-  extrasCached(file, d, data, log, strict, cb)
-}
-
-function extrasCached (file, d, data, log, strict, cb) {
-  extras(file, data, log, strict, function (err, extrasData) {
-    if (!err) {
-      cache[d] = jsonClone(extrasData)
-    }
-    cb(err, extrasData)
-  })
-}
-
-function indexjs (file, er, log, strict, cb) {
-  if (path.basename(file) === 'index.js') {
-    return cb(er)
-  }
-
-  var index = path.resolve(path.dirname(file), 'index.js')
-  fs.readFile(index, 'utf8', function (er2, d) {
-    if (er2) {
-      return cb(er)
-    }
-
-    if (cache[d]) {
-      return cb(null, cache[d])
-    }
-
-    var data = parseIndex(d)
-    if (!data) {
-      return cb(er)
-    }
-
-    extrasCached(file, d, data, log, strict, cb)
-  })
-}
-
-readJson.extras = extras
-function extras (file, data, log_, strict_, cb_) {
-  var log, strict, cb
-  for (var i = 2; i < arguments.length - 1; i++) {
-    if (typeof arguments[i] === 'boolean') {
-      strict = arguments[i]
-    } else if (typeof arguments[i] === 'function') {
-      log = arguments[i]
-    }
-  }
-
-  if (!log) {
-    log = function () {}
-  }
-  cb = arguments[i]
-
-  var set = readJson.extraSet
-  var n = set.length
-  var errState = null
-  set.forEach(function (fn) {
-    fn(file, data, then)
-  })
-
-  function then (er) {
-    if (errState) {
-      return
-    }
-    if (er) {
-      return cb(errState = er)
-    }
-    if (--n > 0) {
-      return
-    }
-    final(file, data, log, strict, cb)
-  }
-}
-
-function scriptpath (file, data, cb) {
-  if (!data.scripts) {
-    return cb(null, data)
-  }
-  var k = Object.keys(data.scripts)
-  k.forEach(scriptpath_, data.scripts)
-  cb(null, data)
-}
-
-function scriptpath_ (key) {
-  var s = this[key]
-  // This is never allowed, and only causes problems
-  if (typeof s !== 'string') {
-    return delete this[key]
-  }
-
-  var spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
-  if (s.match(spre)) {
-    this[key] = this[key].replace(spre, '')
-  }
-}
-
-function gypfile (file, data, cb) {
-  var dir = path.dirname(file)
-  var s = data.scripts || {}
-  if (s.install || s.preinstall) {
-    return cb(null, data)
-  }
-
-  if (data.gypfile === false) {
-    return cb(null, data)
-  }
-  glob('*.gyp', { cwd: dir })
-    .then(files => gypfile_(file, data, files, cb))
-    .catch(er => cb(er))
-}
-
-function gypfile_ (file, data, files, cb) {
-  if (!files.length) {
-    return cb(null, data)
-  }
-  var s = data.scripts || {}
-  s.install = 'node-gyp rebuild'
-  data.scripts = s
-  data.gypfile = true
-  return cb(null, data)
-}
-
-function serverjs (file, data, cb) {
-  var dir = path.dirname(file)
-  var s = data.scripts || {}
-  if (s.start) {
-    return cb(null, data)
-  }
-  fs.access(path.join(dir, 'server.js'), (err) => {
-    if (!err) {
-      s.start = 'node server.js'
-      data.scripts = s
-    }
-    return cb(null, data)
-  })
-}
-
-function authors (file, data, cb) {
-  if (data.contributors) {
-    return cb(null, data)
-  }
-  var af = path.resolve(path.dirname(file), 'AUTHORS')
-  fs.readFile(af, 'utf8', function (er, ad) {
-    // ignore error.  just checking it.
-    if (er) {
-      return cb(null, data)
-    }
-    authors_(file, data, ad, cb)
-  })
-}
-
-function authors_ (file, data, ad, cb) {
-  ad = ad.split(/\r?\n/g).map(function (line) {
-    return line.replace(/^\s*#.*$/, '').trim()
-  }).filter(function (line) {
-    return line
-  })
-  data.contributors = ad
-  return cb(null, data)
-}
-
-function readme (file, data, cb) {
-  if (data.readme) {
-    return cb(null, data)
-  }
-  var dir = path.dirname(file)
-  var globOpts = { cwd: dir, nocase: true, mark: true }
-  glob('{README,README.*}', globOpts)
-    .then(files => {
-      // don't accept directories.
-      files = files.filter(function (filtered) {
-        return !filtered.match(/\/$/)
-      })
-      if (!files.length) {
-        return cb()
-      }
-      var fn = preferMarkdownReadme(files)
-      var rm = path.resolve(dir, fn)
-      return readme_(file, data, rm, cb)
-    })
-    .catch(er => cb(er))
-}
-
-function preferMarkdownReadme (files) {
-  var fallback = 0
-  var re = /\.m?a?r?k?d?o?w?n?$/i
-  for (var i = 0; i < files.length; i++) {
-    if (files[i].match(re)) {
-      return files[i]
-    } else if (files[i].match(/README$/)) {
-      fallback = i
-    }
-  }
-  // prefer README.md, followed by README; otherwise, return
-  // the first filename (which could be README)
-  return files[fallback]
-}
-
-function readme_ (file, data, rm, cb) {
-  var rmfn = path.basename(rm)
-  fs.readFile(rm, 'utf8', function (er, rmData) {
-    // maybe not readable, or something.
-    if (er) {
-      return cb()
-    }
-    data.readme = rmData
-    data.readmeFilename = rmfn
-    return cb(er, data)
-  })
-}
-
-function mans (file, data, cb) {
-  let cwd = data.directories && data.directories.man
-  if (data.man || !cwd) {
-    return cb(null, data)
-  }
-  const dirname = path.dirname(file)
-  cwd = path.resolve(path.dirname(file), cwd)
-  glob('**/*.[0-9]', { cwd })
-    .then(mansGlob => {
-      data.man = mansGlob.map(man =>
-        path.relative(dirname, path.join(cwd, man)).split(path.sep).join('/')
-      )
-      return cb(null, data)
-    })
-    .catch(er => cb(er))
-}
-
-function bins (file, data, cb) {
-  data = normalizePackageBin(data)
-
-  var m = data.directories && data.directories.bin
-  if (data.bin || !m) {
-    return cb(null, data)
-  }
-
-  m = path.resolve(path.dirname(file), path.join('.', path.join('/', m)))
-  glob('**', { cwd: m })
-    .then(binsGlob => bins_(file, data, binsGlob, cb))
-    .catch(er => cb(er))
-}
-
-function bins_ (file, data, binsGlob, cb) {
-  var m = (data.directories && data.directories.bin) || '.'
-  data.bin = binsGlob.reduce(function (acc, mf) {
-    if (mf && mf.charAt(0) !== '.') {
-      var f = path.basename(mf)
-      acc[f] = path.join(m, mf)
-    }
-    return acc
-  }, {})
-  return cb(null, normalizePackageBin(data))
-}
-
-function bundleDependencies (file, data, cb) {
-  var bd = 'bundleDependencies'
-  var bdd = 'bundledDependencies'
-  // normalize key name
-  if (data[bdd] !== undefined) {
-    if (data[bd] === undefined) {
-      data[bd] = data[bdd]
-    }
-    delete data[bdd]
-  }
-  if (data[bd] === false) {
-    delete data[bd]
-  } else if (data[bd] === true) {
-    data[bd] = Object.keys(data.dependencies || {})
-  } else if (data[bd] !== undefined && !Array.isArray(data[bd])) {
-    delete data[bd]
-  }
-  return cb(null, data)
-}
-
-function githead (file, data, cb) {
-  if (data.gitHead) {
-    return cb(null, data)
-  }
-  var dir = path.dirname(file)
-  var head = path.resolve(dir, '.git/HEAD')
-  fs.readFile(head, 'utf8', function (er, headData) {
-    if (er) {
-      var parent = path.dirname(dir)
-      if (parent === dir) {
-        return cb(null, data)
-      }
-      return githead(dir, data, cb)
-    }
-    githead_(data, dir, headData, cb)
-  })
-}
-
-function githead_ (data, dir, head, cb) {
-  if (!head.match(/^ref: /)) {
-    data.gitHead = head.trim()
-    return cb(null, data)
-  }
-  var headRef = head.replace(/^ref: /, '').trim()
-  var headFile = path.resolve(dir, '.git', headRef)
-  fs.readFile(headFile, 'utf8', function (er, headData) {
-    if (er || !headData) {
-      var packFile = path.resolve(dir, '.git/packed-refs')
-      return fs.readFile(packFile, 'utf8', function (readFileErr, refs) {
-        if (readFileErr || !refs) {
-          return cb(null, data)
-        }
-        refs = refs.split('\n')
-        for (var i = 0; i < refs.length; i++) {
-          var match = refs[i].match(/^([0-9a-f]{40}) (.+)$/)
-          if (match && match[2].trim() === headRef) {
-            data.gitHead = match[1]
-            break
-          }
-        }
-        return cb(null, data)
-      })
-    }
-    headData = headData.replace(/^ref: /, '').trim()
-    data.gitHead = headData
-    return cb(null, data)
-  })
-}
-
-/**
- * Warn if the bin references don't point to anything.  This might be better in
- * normalize-package-data if it had access to the file path.
- */
-function checkBinReferences_ (file, data, warn, cb) {
-  if (!(data.bin instanceof Object)) {
-    return cb()
-  }
-
-  var keys = Object.keys(data.bin)
-  var keysLeft = keys.length
-  if (!keysLeft) {
-    return cb()
-  }
-
-  function handleExists (relName, result) {
-    keysLeft--
-    if (!result) {
-      warn('No bin file found at ' + relName)
-    }
-    if (!keysLeft) {
-      cb()
-    }
-  }
-
-  keys.forEach(function (key) {
-    var dirName = path.dirname(file)
-    var relName = data.bin[key]
-    /* istanbul ignore if - impossible, bins have been normalized */
-    if (typeof relName !== 'string') {
-      var msg = 'Bin filename for ' + key +
-        ' is not a string: ' + util.inspect(relName)
-      warn(msg)
-      delete data.bin[key]
-      handleExists(relName, true)
-      return
-    }
-    var binPath = path.resolve(dirName, relName)
-    fs.stat(binPath, (err) => handleExists(relName, !err))
-  })
-}
-
-function final (file, data, log, strict, cb) {
-  var pId = makePackageId(data)
-
-  function warn (msg) {
-    if (typoWarned[pId]) {
-      return
-    }
-    if (log) {
-      log('package.json', pId, msg)
-    }
-  }
-
-  try {
-    normalizeData(data, warn, strict)
-  } catch (error) {
-    return cb(error)
-  }
-
-  checkBinReferences_(file, data, warn, function () {
-    typoWarned[pId] = true
-    cb(null, data)
-  })
-}
-
-function fillTypes (file, data, cb) {
-  var index = data.main || 'index.js'
-
-  if (typeof index !== 'string') {
-    return cb(new TypeError('The "main" attribute must be of type string.'))
-  }
-
-  // TODO exports is much more complicated than this in verbose format
-  // We need to support for instance
-
-  // "exports": {
-  //   ".": [
-  //     {
-  //       "default": "./lib/npm.js"
-  //     },
-  //     "./lib/npm.js"
-  //   ],
-  //   "./package.json": "./package.json"
-  // },
-  // as well as conditional exports
-
-  // if (data.exports && typeof data.exports === 'string') {
-  //   index = data.exports
-  // }
-
-  // if (data.exports && data.exports['.']) {
-  //   index = data.exports['.']
-  //   if (typeof index !== 'string') {
-  //   }
-  // }
-
-  var extless =
-    path.join(path.dirname(index), path.basename(index, path.extname(index)))
-  var dts = `./${extless}.d.ts`
-  var dtsPath = path.join(path.dirname(file), dts)
-  var hasDTSFields = 'types' in data || 'typings' in data
-  if (!hasDTSFields && fs.existsSync(dtsPath)) {
-    data.types = dts.split(path.sep).join('/')
-  }
-
-  cb(null, data)
-}
-
-function makePackageId (data) {
-  var name = cleanString(data.name)
-  var ver = cleanString(data.version)
-  return name + '@' + ver
-}
-
-function cleanString (str) {
-  return (!str || typeof (str) !== 'string') ? '' : str.trim()
-}
-
-// /**package { "name": "foo", "version": "1.2.3", ... } **/
-function parseIndex (data) {
-  data = data.split(/^\/\*\*package(?:\s|$)/m)
-
-  if (data.length < 2) {
-    return null
-  }
-  data = data[1]
-  data = data.split(/\*\*\/$/m)
-
-  if (data.length < 2) {
-    return null
-  }
-  data = data[0]
-  data = data.replace(/^\s*\*/mg, '')
-
-  try {
-    return safeJSON(data)
-  } catch (er) {
-    return null
-  }
-}
-
-function parseError (ex, file) {
-  var e = new Error('Failed to parse json\n' + ex.message)
-  e.code = 'EJSONPARSE'
-  e.path = file
-  return e
-}
diff --git a/node_modules/pacote/node_modules/read-package-json/package.json b/node_modules/pacote/node_modules/read-package-json/package.json
deleted file mode 100644
index 01061f2bc2792..0000000000000
--- a/node_modules/pacote/node_modules/read-package-json/package.json
+++ /dev/null
@@ -1,65 +0,0 @@
-{
-  "name": "read-package-json",
-  "version": "7.0.0",
-  "author": "GitHub Inc.",
-  "description": "The thing npm uses to read package.json files with semantics and defaults and validation",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/read-package-json.git"
-  },
-  "main": "lib/read-json.js",
-  "scripts": {
-    "prerelease": "npm t",
-    "postrelease": "npm publish && git push --follow-tags",
-    "release": "standard-version -s",
-    "test": "tap",
-    "npmclilint": "npmcli-lint",
-    "lint": "eslint \"**/*.js\"",
-    "lintfix": "npm run lint -- --fix",
-    "posttest": "npm run lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "snap": "tap",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "dependencies": {
-    "glob": "^10.2.2",
-    "json-parse-even-better-errors": "^3.0.0",
-    "normalize-package-data": "^6.0.0",
-    "npm-normalize-package-bin": "^3.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.18.0",
-    "tap": "^16.0.1"
-  },
-  "license": "ISC",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^16.14.0 || >=18.0.0"
-  },
-  "tap": {
-    "branches": 73,
-    "functions": 77,
-    "lines": 77,
-    "statements": 77,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.18.0",
-    "publish": "true",
-    "ciVersions": [
-      "16.14.0",
-      "16.x",
-      "18.0.0",
-      "18.x"
-    ]
-  }
-}
diff --git a/node_modules/init-package-json/node_modules/read-package-json/LICENSE b/node_modules/read-package-json/LICENSE
similarity index 100%
rename from node_modules/init-package-json/node_modules/read-package-json/LICENSE
rename to node_modules/read-package-json/LICENSE
diff --git a/node_modules/init-package-json/node_modules/read-package-json/lib/read-json.js b/node_modules/read-package-json/lib/read-json.js
similarity index 100%
rename from node_modules/init-package-json/node_modules/read-package-json/lib/read-json.js
rename to node_modules/read-package-json/lib/read-json.js
diff --git a/node_modules/init-package-json/node_modules/read-package-json/package.json b/node_modules/read-package-json/package.json
similarity index 100%
rename from node_modules/init-package-json/node_modules/read-package-json/package.json
rename to node_modules/read-package-json/package.json
diff --git a/package-lock.json b/package-lock.json
index 4ca183ef1110b..77480935db1e3 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -6745,21 +6745,6 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/init-package-json/node_modules/read-package-json": {
-      "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-7.0.0.tgz",
-      "integrity": "sha512-uL4Z10OKV4p6vbdvIXB+OzhInYtIozl/VxUBPgNkBuUi2DeRonnuspmaVAMcrkmfjKGNmRndyQAbE7/AmzGwFg==",
-      "inBundle": true,
-      "dependencies": {
-        "glob": "^10.2.2",
-        "json-parse-even-better-errors": "^3.0.0",
-        "normalize-package-data": "^6.0.0",
-        "npm-normalize-package-bin": "^3.0.0"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
     "node_modules/internal-slot": {
       "version": "1.0.5",
       "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.5.tgz",
@@ -10571,21 +10556,6 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
-    "node_modules/pacote/node_modules/read-package-json": {
-      "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-7.0.0.tgz",
-      "integrity": "sha512-uL4Z10OKV4p6vbdvIXB+OzhInYtIozl/VxUBPgNkBuUi2DeRonnuspmaVAMcrkmfjKGNmRndyQAbE7/AmzGwFg==",
-      "inBundle": true,
-      "dependencies": {
-        "glob": "^10.2.2",
-        "json-parse-even-better-errors": "^3.0.0",
-        "normalize-package-data": "^6.0.0",
-        "npm-normalize-package-bin": "^3.0.0"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
     "node_modules/parent-module": {
       "version": "1.0.1",
       "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
@@ -11047,6 +11017,21 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/read-package-json": {
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-7.0.0.tgz",
+      "integrity": "sha512-uL4Z10OKV4p6vbdvIXB+OzhInYtIozl/VxUBPgNkBuUi2DeRonnuspmaVAMcrkmfjKGNmRndyQAbE7/AmzGwFg==",
+      "inBundle": true,
+      "dependencies": {
+        "glob": "^10.2.2",
+        "json-parse-even-better-errors": "^3.0.0",
+        "normalize-package-data": "^6.0.0",
+        "npm-normalize-package-bin": "^3.0.0"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
     "node_modules/read-package-json-fast": {
       "version": "3.0.2",
       "resolved": "https://registry.npmjs.org/read-package-json-fast/-/read-package-json-fast-3.0.2.tgz",

From 10a6be45d037a6a588f22b976d952611079390e7 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Tue, 15 Aug 2023 15:35:02 -0700
Subject: [PATCH 45/68] deps: pacote@17.0.1

---
 DEPENDENCIES.md                               |    2 -
 mock-registry/package.json                    |    2 +-
 node_modules/.gitignore                       |    7 +-
 .../pacote/node_modules/lru-cache/LICENSE     |   15 -
 .../pacote/node_modules/lru-cache/index.js    | 1227 -----------------
 .../pacote/node_modules/lru-cache/index.mjs   | 1227 -----------------
 .../node_modules/lru-cache/package.json       |   96 --
 .../node_modules/npm-pick-manifest/LICENSE.md |   16 -
 .../npm-pick-manifest/lib/index.js            |  218 ---
 .../node_modules/hosted-git-info/LICENSE      |   13 -
 .../hosted-git-info/lib/from-url.js           |  122 --
 .../node_modules/hosted-git-info/lib/hosts.js |  228 ---
 .../node_modules/hosted-git-info/lib/index.js |  179 ---
 .../hosted-git-info/lib/parse-url.js          |   78 --
 .../node_modules/hosted-git-info/package.json |   59 -
 .../node_modules/npm-package-arg/LICENSE      |   15 -
 .../node_modules/npm-package-arg/lib/npa.js   |  431 ------
 .../node_modules/npm-package-arg/package.json |   59 -
 .../npm-registry-fetch/LICENSE.md             |   20 +
 .../npm-registry-fetch/lib/auth.js            |  145 ++
 .../npm-registry-fetch/lib/check-response.js  |  100 ++
 .../npm-registry-fetch/lib/clean-url.js       |   27 +
 .../npm-registry-fetch/lib/default-opts.js    |   19 +
 .../npm-registry-fetch/lib/errors.js          |   80 ++
 .../npm-registry-fetch/lib/index.js           |  247 ++++
 .../package.json                              |   46 +-
 node_modules/pacote/package.json              |    6 +-
 package-lock.json                             |   79 +-
 package.json                                  |    2 +-
 workspaces/arborist/package.json              |    2 +-
 workspaces/libnpmdiff/package.json            |    2 +-
 workspaces/libnpmexec/package.json            |    2 +-
 workspaces/libnpmpack/package.json            |    2 +-
 33 files changed, 702 insertions(+), 4071 deletions(-)
 delete mode 100644 node_modules/pacote/node_modules/lru-cache/LICENSE
 delete mode 100644 node_modules/pacote/node_modules/lru-cache/index.js
 delete mode 100644 node_modules/pacote/node_modules/lru-cache/index.mjs
 delete mode 100644 node_modules/pacote/node_modules/lru-cache/package.json
 delete mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/LICENSE.md
 delete mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/lib/index.js
 delete mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/LICENSE
 delete mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/from-url.js
 delete mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/hosts.js
 delete mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/index.js
 delete mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/parse-url.js
 delete mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/package.json
 delete mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE
 delete mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js
 delete mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json
 create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md
 create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js
 create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js
 create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/clean-url.js
 create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js
 create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js
 create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js
 rename node_modules/pacote/node_modules/{npm-pick-manifest => npm-registry-fetch}/package.json (50%)

diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md
index 46f0bfb3a4620..324dbb190ca34 100644
--- a/DEPENDENCIES.md
+++ b/DEPENDENCIES.md
@@ -133,7 +133,6 @@ graph LR;
   npm-->pacote;
   npm-->parse-conflict-json;
   npm-->proc-log;
-  npm-->read-package-json;
   npm-->read;
   npm-->semver;
   npm-->ssri;
@@ -561,7 +560,6 @@ graph LR;
   npm-->parse-conflict-json;
   npm-->proc-log;
   npm-->qrcode-terminal;
-  npm-->read-package-json;
   npm-->read;
   npm-->remark-gfm;
   npm-->remark-github;
diff --git a/mock-registry/package.json b/mock-registry/package.json
index d729881f52651..c88ddc45c3608 100644
--- a/mock-registry/package.json
+++ b/mock-registry/package.json
@@ -56,7 +56,7 @@
     "json-stringify-safe": "^5.0.1",
     "nock": "^13.3.0",
     "npm-package-arg": "^11.0.0",
-    "pacote": "^17.0.0",
+    "pacote": "^17.0.1",
     "tap": "^16.3.4"
   }
 }
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 25a9780a8417b..aa04f66c550f2 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -221,12 +221,7 @@
 !/pacote
 !/pacote/node_modules/
 /pacote/node_modules/*
-!/pacote/node_modules/lru-cache
-!/pacote/node_modules/npm-pick-manifest
-!/pacote/node_modules/npm-pick-manifest/node_modules/
-/pacote/node_modules/npm-pick-manifest/node_modules/*
-!/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info
-!/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg
+!/pacote/node_modules/npm-registry-fetch
 !/parse-conflict-json
 !/path-is-absolute
 !/path-key
diff --git a/node_modules/pacote/node_modules/lru-cache/LICENSE b/node_modules/pacote/node_modules/lru-cache/LICENSE
deleted file mode 100644
index f785757cd63f8..0000000000000
--- a/node_modules/pacote/node_modules/lru-cache/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/lru-cache/index.js b/node_modules/pacote/node_modules/lru-cache/index.js
deleted file mode 100644
index 48e99fe5e5a70..0000000000000
--- a/node_modules/pacote/node_modules/lru-cache/index.js
+++ /dev/null
@@ -1,1227 +0,0 @@
-const perf =
-  typeof performance === 'object' &&
-  performance &&
-  typeof performance.now === 'function'
-    ? performance
-    : Date
-
-const hasAbortController = typeof AbortController === 'function'
-
-// minimal backwards-compatibility polyfill
-// this doesn't have nearly all the checks and whatnot that
-// actual AbortController/Signal has, but it's enough for
-// our purposes, and if used properly, behaves the same.
-const AC = hasAbortController
-  ? AbortController
-  : class AbortController {
-      constructor() {
-        this.signal = new AS()
-      }
-      abort(reason = new Error('This operation was aborted')) {
-        this.signal.reason = this.signal.reason || reason
-        this.signal.aborted = true
-        this.signal.dispatchEvent({
-          type: 'abort',
-          target: this.signal,
-        })
-      }
-    }
-
-const hasAbortSignal = typeof AbortSignal === 'function'
-// Some polyfills put this on the AC class, not global
-const hasACAbortSignal = typeof AC.AbortSignal === 'function'
-const AS = hasAbortSignal
-  ? AbortSignal
-  : hasACAbortSignal
-  ? AC.AbortController
-  : class AbortSignal {
-      constructor() {
-        this.reason = undefined
-        this.aborted = false
-        this._listeners = []
-      }
-      dispatchEvent(e) {
-        if (e.type === 'abort') {
-          this.aborted = true
-          this.onabort(e)
-          this._listeners.forEach(f => f(e), this)
-        }
-      }
-      onabort() {}
-      addEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners.push(fn)
-        }
-      }
-      removeEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners = this._listeners.filter(f => f !== fn)
-        }
-      }
-    }
-
-const warned = new Set()
-const deprecatedOption = (opt, instead) => {
-  const code = `LRU_CACHE_OPTION_${opt}`
-  if (shouldWarn(code)) {
-    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
-  }
-}
-const deprecatedMethod = (method, instead) => {
-  const code = `LRU_CACHE_METHOD_${method}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
-    warn(code, `${method} method`, `cache.${instead}()`, get)
-  }
-}
-const deprecatedProperty = (field, instead) => {
-  const code = `LRU_CACHE_PROPERTY_${field}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
-    warn(code, `${field} property`, `cache.${instead}`, get)
-  }
-}
-
-const emitWarning = (...a) => {
-  typeof process === 'object' &&
-  process &&
-  typeof process.emitWarning === 'function'
-    ? process.emitWarning(...a)
-    : console.error(...a)
-}
-
-const shouldWarn = code => !warned.has(code)
-
-const warn = (code, what, instead, fn) => {
-  warned.add(code)
-  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
-  emitWarning(msg, 'DeprecationWarning', code, fn)
-}
-
-const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
-
-/* istanbul ignore next - This is a little bit ridiculous, tbh.
- * The maximum array length is 2^32-1 or thereabouts on most JS impls.
- * And well before that point, you're caching the entire world, I mean,
- * that's ~32GB of just integers for the next/prev links, plus whatever
- * else to hold that many keys and values.  Just filling the memory with
- * zeroes at init time is brutal when you get that big.
- * But why not be complete?
- * Maybe in the future, these limits will have expanded. */
-const getUintArray = max =>
-  !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-    ? Uint8Array
-    : max <= Math.pow(2, 16)
-    ? Uint16Array
-    : max <= Math.pow(2, 32)
-    ? Uint32Array
-    : max <= Number.MAX_SAFE_INTEGER
-    ? ZeroArray
-    : null
-
-class ZeroArray extends Array {
-  constructor(size) {
-    super(size)
-    this.fill(0)
-  }
-}
-
-class Stack {
-  constructor(max) {
-    if (max === 0) {
-      return []
-    }
-    const UintArray = getUintArray(max)
-    this.heap = new UintArray(max)
-    this.length = 0
-  }
-  push(n) {
-    this.heap[this.length++] = n
-  }
-  pop() {
-    return this.heap[--this.length]
-  }
-}
-
-class LRUCache {
-  constructor(options = {}) {
-    const {
-      max = 0,
-      ttl,
-      ttlResolution = 1,
-      ttlAutopurge,
-      updateAgeOnGet,
-      updateAgeOnHas,
-      allowStale,
-      dispose,
-      disposeAfter,
-      noDisposeOnSet,
-      noUpdateTTL,
-      maxSize = 0,
-      maxEntrySize = 0,
-      sizeCalculation,
-      fetchMethod,
-      fetchContext,
-      noDeleteOnFetchRejection,
-      noDeleteOnStaleGet,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-    } = options
-
-    // deprecated options, don't trigger a warning for getting them if
-    // the thing being passed in is another LRUCache we're copying.
-    const { length, maxAge, stale } =
-      options instanceof LRUCache ? {} : options
-
-    if (max !== 0 && !isPosInt(max)) {
-      throw new TypeError('max option must be a nonnegative integer')
-    }
-
-    const UintArray = max ? getUintArray(max) : Array
-    if (!UintArray) {
-      throw new Error('invalid max value: ' + max)
-    }
-
-    this.max = max
-    this.maxSize = maxSize
-    this.maxEntrySize = maxEntrySize || this.maxSize
-    this.sizeCalculation = sizeCalculation || length
-    if (this.sizeCalculation) {
-      if (!this.maxSize && !this.maxEntrySize) {
-        throw new TypeError(
-          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
-        )
-      }
-      if (typeof this.sizeCalculation !== 'function') {
-        throw new TypeError('sizeCalculation set to non-function')
-      }
-    }
-
-    this.fetchMethod = fetchMethod || null
-    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
-      throw new TypeError(
-        'fetchMethod must be a function if specified'
-      )
-    }
-
-    this.fetchContext = fetchContext
-    if (!this.fetchMethod && fetchContext !== undefined) {
-      throw new TypeError(
-        'cannot set fetchContext without fetchMethod'
-      )
-    }
-
-    this.keyMap = new Map()
-    this.keyList = new Array(max).fill(null)
-    this.valList = new Array(max).fill(null)
-    this.next = new UintArray(max)
-    this.prev = new UintArray(max)
-    this.head = 0
-    this.tail = 0
-    this.free = new Stack(max)
-    this.initialFill = 1
-    this.size = 0
-
-    if (typeof dispose === 'function') {
-      this.dispose = dispose
-    }
-    if (typeof disposeAfter === 'function') {
-      this.disposeAfter = disposeAfter
-      this.disposed = []
-    } else {
-      this.disposeAfter = null
-      this.disposed = null
-    }
-    this.noDisposeOnSet = !!noDisposeOnSet
-    this.noUpdateTTL = !!noUpdateTTL
-    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
-    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
-    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
-    this.ignoreFetchAbort = !!ignoreFetchAbort
-
-    // NB: maxEntrySize is set to maxSize if it's set
-    if (this.maxEntrySize !== 0) {
-      if (this.maxSize !== 0) {
-        if (!isPosInt(this.maxSize)) {
-          throw new TypeError(
-            'maxSize must be a positive integer if specified'
-          )
-        }
-      }
-      if (!isPosInt(this.maxEntrySize)) {
-        throw new TypeError(
-          'maxEntrySize must be a positive integer if specified'
-        )
-      }
-      this.initializeSizeTracking()
-    }
-
-    this.allowStale = !!allowStale || !!stale
-    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
-    this.updateAgeOnGet = !!updateAgeOnGet
-    this.updateAgeOnHas = !!updateAgeOnHas
-    this.ttlResolution =
-      isPosInt(ttlResolution) || ttlResolution === 0
-        ? ttlResolution
-        : 1
-    this.ttlAutopurge = !!ttlAutopurge
-    this.ttl = ttl || maxAge || 0
-    if (this.ttl) {
-      if (!isPosInt(this.ttl)) {
-        throw new TypeError(
-          'ttl must be a positive integer if specified'
-        )
-      }
-      this.initializeTTLTracking()
-    }
-
-    // do not allow completely unbounded caches
-    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
-      throw new TypeError(
-        'At least one of max, maxSize, or ttl is required'
-      )
-    }
-    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
-      const code = 'LRU_CACHE_UNBOUNDED'
-      if (shouldWarn(code)) {
-        warned.add(code)
-        const msg =
-          'TTL caching without ttlAutopurge, max, or maxSize can ' +
-          'result in unbounded memory consumption.'
-        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
-      }
-    }
-
-    if (stale) {
-      deprecatedOption('stale', 'allowStale')
-    }
-    if (maxAge) {
-      deprecatedOption('maxAge', 'ttl')
-    }
-    if (length) {
-      deprecatedOption('length', 'sizeCalculation')
-    }
-  }
-
-  getRemainingTTL(key) {
-    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
-  }
-
-  initializeTTLTracking() {
-    this.ttls = new ZeroArray(this.max)
-    this.starts = new ZeroArray(this.max)
-
-    this.setItemTTL = (index, ttl, start = perf.now()) => {
-      this.starts[index] = ttl !== 0 ? start : 0
-      this.ttls[index] = ttl
-      if (ttl !== 0 && this.ttlAutopurge) {
-        const t = setTimeout(() => {
-          if (this.isStale(index)) {
-            this.delete(this.keyList[index])
-          }
-        }, ttl + 1)
-        /* istanbul ignore else - unref() not supported on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-    }
-
-    this.updateItemAge = index => {
-      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
-    }
-
-    this.statusTTL = (status, index) => {
-      if (status) {
-        status.ttl = this.ttls[index]
-        status.start = this.starts[index]
-        status.now = cachedNow || getNow()
-        status.remainingTTL = status.now + status.ttl - status.start
-      }
-    }
-
-    // debounce calls to perf.now() to 1s so we're not hitting
-    // that costly call repeatedly.
-    let cachedNow = 0
-    const getNow = () => {
-      const n = perf.now()
-      if (this.ttlResolution > 0) {
-        cachedNow = n
-        const t = setTimeout(
-          () => (cachedNow = 0),
-          this.ttlResolution
-        )
-        /* istanbul ignore else - not available on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-      return n
-    }
-
-    this.getRemainingTTL = key => {
-      const index = this.keyMap.get(key)
-      if (index === undefined) {
-        return 0
-      }
-      return this.ttls[index] === 0 || this.starts[index] === 0
-        ? Infinity
-        : this.starts[index] +
-            this.ttls[index] -
-            (cachedNow || getNow())
-    }
-
-    this.isStale = index => {
-      return (
-        this.ttls[index] !== 0 &&
-        this.starts[index] !== 0 &&
-        (cachedNow || getNow()) - this.starts[index] >
-          this.ttls[index]
-      )
-    }
-  }
-  updateItemAge(_index) {}
-  statusTTL(_status, _index) {}
-  setItemTTL(_index, _ttl, _start) {}
-  isStale(_index) {
-    return false
-  }
-
-  initializeSizeTracking() {
-    this.calculatedSize = 0
-    this.sizes = new ZeroArray(this.max)
-    this.removeItemSize = index => {
-      this.calculatedSize -= this.sizes[index]
-      this.sizes[index] = 0
-    }
-    this.requireSize = (k, v, size, sizeCalculation) => {
-      // provisionally accept background fetches.
-      // actual value size will be checked when they return.
-      if (this.isBackgroundFetch(v)) {
-        return 0
-      }
-      if (!isPosInt(size)) {
-        if (sizeCalculation) {
-          if (typeof sizeCalculation !== 'function') {
-            throw new TypeError('sizeCalculation must be a function')
-          }
-          size = sizeCalculation(v, k)
-          if (!isPosInt(size)) {
-            throw new TypeError(
-              'sizeCalculation return invalid (expect positive integer)'
-            )
-          }
-        } else {
-          throw new TypeError(
-            'invalid size value (must be positive integer). ' +
-              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
-              'must be set.'
-          )
-        }
-      }
-      return size
-    }
-    this.addItemSize = (index, size, status) => {
-      this.sizes[index] = size
-      if (this.maxSize) {
-        const maxSize = this.maxSize - this.sizes[index]
-        while (this.calculatedSize > maxSize) {
-          this.evict(true)
-        }
-      }
-      this.calculatedSize += this.sizes[index]
-      if (status) {
-        status.entrySize = size
-        status.totalCalculatedSize = this.calculatedSize
-      }
-    }
-  }
-  removeItemSize(_index) {}
-  addItemSize(_index, _size) {}
-  requireSize(_k, _v, size, sizeCalculation) {
-    if (size || sizeCalculation) {
-      throw new TypeError(
-        'cannot set size without setting maxSize or maxEntrySize on cache'
-      )
-    }
-  }
-
-  *indexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.tail; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.head) {
-          break
-        } else {
-          i = this.prev[i]
-        }
-      }
-    }
-  }
-
-  *rindexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.head; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.tail) {
-          break
-        } else {
-          i = this.next[i]
-        }
-      }
-    }
-  }
-
-  isValidIndex(index) {
-    return (
-      index !== undefined &&
-      this.keyMap.get(this.keyList[index]) === index
-    )
-  }
-
-  *entries() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-  *rentries() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-
-  *keys() {
-    for (const i of this.indexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-  *rkeys() {
-    for (const i of this.rindexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-
-  *values() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-  *rvalues() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-
-  [Symbol.iterator]() {
-    return this.entries()
-  }
-
-  find(fn, getOptions) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      if (fn(value, this.keyList[i], this)) {
-        return this.get(this.keyList[i], getOptions)
-      }
-    }
-  }
-
-  forEach(fn, thisp = this) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  rforEach(fn, thisp = this) {
-    for (const i of this.rindexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  get prune() {
-    deprecatedMethod('prune', 'purgeStale')
-    return this.purgeStale
-  }
-
-  purgeStale() {
-    let deleted = false
-    for (const i of this.rindexes({ allowStale: true })) {
-      if (this.isStale(i)) {
-        this.delete(this.keyList[i])
-        deleted = true
-      }
-    }
-    return deleted
-  }
-
-  dump() {
-    const arr = []
-    for (const i of this.indexes({ allowStale: true })) {
-      const key = this.keyList[i]
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      const entry = { value }
-      if (this.ttls) {
-        entry.ttl = this.ttls[i]
-        // always dump the start relative to a portable timestamp
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = perf.now() - this.starts[i]
-        entry.start = Math.floor(Date.now() - age)
-      }
-      if (this.sizes) {
-        entry.size = this.sizes[i]
-      }
-      arr.unshift([key, entry])
-    }
-    return arr
-  }
-
-  load(arr) {
-    this.clear()
-    for (const [key, entry] of arr) {
-      if (entry.start) {
-        // entry.start is a portable timestamp, but we may be using
-        // node's performance.now(), so calculate the offset.
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = Date.now() - entry.start
-        entry.start = perf.now() - age
-      }
-      this.set(key, entry.value, entry)
-    }
-  }
-
-  dispose(_v, _k, _reason) {}
-
-  set(
-    k,
-    v,
-    {
-      ttl = this.ttl,
-      start,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      status,
-    } = {}
-  ) {
-    size = this.requireSize(k, v, size, sizeCalculation)
-    // if the item doesn't fit, don't do anything
-    // NB: maxEntrySize set to maxSize by default
-    if (this.maxEntrySize && size > this.maxEntrySize) {
-      if (status) {
-        status.set = 'miss'
-        status.maxEntrySizeExceeded = true
-      }
-      // have to delete, in case a background fetch is there already.
-      // in non-async cases, this is a no-op
-      this.delete(k)
-      return this
-    }
-    let index = this.size === 0 ? undefined : this.keyMap.get(k)
-    if (index === undefined) {
-      // addition
-      index = this.newIndex()
-      this.keyList[index] = k
-      this.valList[index] = v
-      this.keyMap.set(k, index)
-      this.next[this.tail] = index
-      this.prev[index] = this.tail
-      this.tail = index
-      this.size++
-      this.addItemSize(index, size, status)
-      if (status) {
-        status.set = 'add'
-      }
-      noUpdateTTL = false
-    } else {
-      // update
-      this.moveToTail(index)
-      const oldVal = this.valList[index]
-      if (v !== oldVal) {
-        if (this.isBackgroundFetch(oldVal)) {
-          oldVal.__abortController.abort(new Error('replaced'))
-        } else {
-          if (!noDisposeOnSet) {
-            this.dispose(oldVal, k, 'set')
-            if (this.disposeAfter) {
-              this.disposed.push([oldVal, k, 'set'])
-            }
-          }
-        }
-        this.removeItemSize(index)
-        this.valList[index] = v
-        this.addItemSize(index, size, status)
-        if (status) {
-          status.set = 'replace'
-          const oldValue =
-            oldVal && this.isBackgroundFetch(oldVal)
-              ? oldVal.__staleWhileFetching
-              : oldVal
-          if (oldValue !== undefined) status.oldValue = oldValue
-        }
-      } else if (status) {
-        status.set = 'update'
-      }
-    }
-    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
-      this.initializeTTLTracking()
-    }
-    if (!noUpdateTTL) {
-      this.setItemTTL(index, ttl, start)
-    }
-    this.statusTTL(status, index)
-    if (this.disposeAfter) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return this
-  }
-
-  newIndex() {
-    if (this.size === 0) {
-      return this.tail
-    }
-    if (this.size === this.max && this.max !== 0) {
-      return this.evict(false)
-    }
-    if (this.free.length !== 0) {
-      return this.free.pop()
-    }
-    // initial fill, just keep writing down the list
-    return this.initialFill++
-  }
-
-  pop() {
-    if (this.size) {
-      const val = this.valList[this.head]
-      this.evict(true)
-      return val
-    }
-  }
-
-  evict(free) {
-    const head = this.head
-    const k = this.keyList[head]
-    const v = this.valList[head]
-    if (this.isBackgroundFetch(v)) {
-      v.__abortController.abort(new Error('evicted'))
-    } else {
-      this.dispose(v, k, 'evict')
-      if (this.disposeAfter) {
-        this.disposed.push([v, k, 'evict'])
-      }
-    }
-    this.removeItemSize(head)
-    // if we aren't about to use the index, then null these out
-    if (free) {
-      this.keyList[head] = null
-      this.valList[head] = null
-      this.free.push(head)
-    }
-    this.head = this.next[head]
-    this.keyMap.delete(k)
-    this.size--
-    return head
-  }
-
-  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      if (!this.isStale(index)) {
-        if (updateAgeOnHas) {
-          this.updateItemAge(index)
-        }
-        if (status) status.has = 'hit'
-        this.statusTTL(status, index)
-        return true
-      } else if (status) {
-        status.has = 'stale'
-        this.statusTTL(status, index)
-      }
-    } else if (status) {
-      status.has = 'miss'
-    }
-    return false
-  }
-
-  // like get(), but without any LRU updating or TTL expiration
-  peek(k, { allowStale = this.allowStale } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined && (allowStale || !this.isStale(index))) {
-      const v = this.valList[index]
-      // either stale and allowed, or forcing a refresh of non-stale value
-      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
-    }
-  }
-
-  backgroundFetch(k, index, options, context) {
-    const v = index === undefined ? undefined : this.valList[index]
-    if (this.isBackgroundFetch(v)) {
-      return v
-    }
-    const ac = new AC()
-    if (options.signal) {
-      options.signal.addEventListener('abort', () =>
-        ac.abort(options.signal.reason)
-      )
-    }
-    const fetchOpts = {
-      signal: ac.signal,
-      options,
-      context,
-    }
-    const cb = (v, updateCache = false) => {
-      const { aborted } = ac.signal
-      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
-      if (options.status) {
-        if (aborted && !updateCache) {
-          options.status.fetchAborted = true
-          options.status.fetchError = ac.signal.reason
-          if (ignoreAbort) options.status.fetchAbortIgnored = true
-        } else {
-          options.status.fetchResolved = true
-        }
-      }
-      if (aborted && !ignoreAbort && !updateCache) {
-        return fetchFail(ac.signal.reason)
-      }
-      // either we didn't abort, and are still here, or we did, and ignored
-      if (this.valList[index] === p) {
-        if (v === undefined) {
-          if (p.__staleWhileFetching) {
-            this.valList[index] = p.__staleWhileFetching
-          } else {
-            this.delete(k)
-          }
-        } else {
-          if (options.status) options.status.fetchUpdated = true
-          this.set(k, v, fetchOpts.options)
-        }
-      }
-      return v
-    }
-    const eb = er => {
-      if (options.status) {
-        options.status.fetchRejected = true
-        options.status.fetchError = er
-      }
-      return fetchFail(er)
-    }
-    const fetchFail = er => {
-      const { aborted } = ac.signal
-      const allowStaleAborted =
-        aborted && options.allowStaleOnFetchAbort
-      const allowStale =
-        allowStaleAborted || options.allowStaleOnFetchRejection
-      const noDelete = allowStale || options.noDeleteOnFetchRejection
-      if (this.valList[index] === p) {
-        // if we allow stale on fetch rejections, then we need to ensure that
-        // the stale value is not removed from the cache when the fetch fails.
-        const del = !noDelete || p.__staleWhileFetching === undefined
-        if (del) {
-          this.delete(k)
-        } else if (!allowStaleAborted) {
-          // still replace the *promise* with the stale value,
-          // since we are done with the promise at this point.
-          // leave it untouched if we're still waiting for an
-          // aborted background fetch that hasn't yet returned.
-          this.valList[index] = p.__staleWhileFetching
-        }
-      }
-      if (allowStale) {
-        if (options.status && p.__staleWhileFetching !== undefined) {
-          options.status.returnedStale = true
-        }
-        return p.__staleWhileFetching
-      } else if (p.__returned === p) {
-        throw er
-      }
-    }
-    const pcall = (res, rej) => {
-      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
-      // ignored, we go until we finish, regardless.
-      // defer check until we are actually aborting,
-      // so fetchMethod can override.
-      ac.signal.addEventListener('abort', () => {
-        if (
-          !options.ignoreFetchAbort ||
-          options.allowStaleOnFetchAbort
-        ) {
-          res()
-          // when it eventually resolves, update the cache.
-          if (options.allowStaleOnFetchAbort) {
-            res = v => cb(v, true)
-          }
-        }
-      })
-    }
-    if (options.status) options.status.fetchDispatched = true
-    const p = new Promise(pcall).then(cb, eb)
-    p.__abortController = ac
-    p.__staleWhileFetching = v
-    p.__returned = null
-    if (index === undefined) {
-      // internal, don't expose status.
-      this.set(k, p, { ...fetchOpts.options, status: undefined })
-      index = this.keyMap.get(k)
-    } else {
-      this.valList[index] = p
-    }
-    return p
-  }
-
-  isBackgroundFetch(p) {
-    return (
-      p &&
-      typeof p === 'object' &&
-      typeof p.then === 'function' &&
-      Object.prototype.hasOwnProperty.call(
-        p,
-        '__staleWhileFetching'
-      ) &&
-      Object.prototype.hasOwnProperty.call(p, '__returned') &&
-      (p.__returned === p || p.__returned === null)
-    )
-  }
-
-  // this takes the union of get() and set() opts, because it does both
-  async fetch(
-    k,
-    {
-      // get options
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      // set options
-      ttl = this.ttl,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      // fetch exclusive options
-      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
-      ignoreFetchAbort = this.ignoreFetchAbort,
-      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
-      fetchContext = this.fetchContext,
-      forceRefresh = false,
-      status,
-      signal,
-    } = {}
-  ) {
-    if (!this.fetchMethod) {
-      if (status) status.fetch = 'get'
-      return this.get(k, {
-        allowStale,
-        updateAgeOnGet,
-        noDeleteOnStaleGet,
-        status,
-      })
-    }
-
-    const options = {
-      allowStale,
-      updateAgeOnGet,
-      noDeleteOnStaleGet,
-      ttl,
-      noDisposeOnSet,
-      size,
-      sizeCalculation,
-      noUpdateTTL,
-      noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-      status,
-      signal,
-    }
-
-    let index = this.keyMap.get(k)
-    if (index === undefined) {
-      if (status) status.fetch = 'miss'
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      return (p.__returned = p)
-    } else {
-      // in cache, maybe already fetching
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        const stale =
-          allowStale && v.__staleWhileFetching !== undefined
-        if (status) {
-          status.fetch = 'inflight'
-          if (stale) status.returnedStale = true
-        }
-        return stale ? v.__staleWhileFetching : (v.__returned = v)
-      }
-
-      // if we force a refresh, that means do NOT serve the cached value,
-      // unless we are already in the process of refreshing the cache.
-      const isStale = this.isStale(index)
-      if (!forceRefresh && !isStale) {
-        if (status) status.fetch = 'hit'
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        this.statusTTL(status, index)
-        return v
-      }
-
-      // ok, it is stale or a forced refresh, and not already fetching.
-      // refresh the cache.
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      const hasStale = p.__staleWhileFetching !== undefined
-      const staleVal = hasStale && allowStale
-      if (status) {
-        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
-        if (staleVal && isStale) status.returnedStale = true
-      }
-      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
-    }
-  }
-
-  get(
-    k,
-    {
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      status,
-    } = {}
-  ) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      const value = this.valList[index]
-      const fetching = this.isBackgroundFetch(value)
-      this.statusTTL(status, index)
-      if (this.isStale(index)) {
-        if (status) status.get = 'stale'
-        // delete only if not an in-flight background fetch
-        if (!fetching) {
-          if (!noDeleteOnStaleGet) {
-            this.delete(k)
-          }
-          if (status) status.returnedStale = allowStale
-          return allowStale ? value : undefined
-        } else {
-          if (status) {
-            status.returnedStale =
-              allowStale && value.__staleWhileFetching !== undefined
-          }
-          return allowStale ? value.__staleWhileFetching : undefined
-        }
-      } else {
-        if (status) status.get = 'hit'
-        // if we're currently fetching it, we don't actually have it yet
-        // it's not stale, which means this isn't a staleWhileRefetching.
-        // If it's not stale, and fetching, AND has a __staleWhileFetching
-        // value, then that means the user fetched with {forceRefresh:true},
-        // so it's safe to return that value.
-        if (fetching) {
-          return value.__staleWhileFetching
-        }
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        return value
-      }
-    } else if (status) {
-      status.get = 'miss'
-    }
-  }
-
-  connect(p, n) {
-    this.prev[n] = p
-    this.next[p] = n
-  }
-
-  moveToTail(index) {
-    // if tail already, nothing to do
-    // if head, move head to next[index]
-    // else
-    //   move next[prev[index]] to next[index] (head has no prev)
-    //   move prev[next[index]] to prev[index]
-    // prev[index] = tail
-    // next[tail] = index
-    // tail = index
-    if (index !== this.tail) {
-      if (index === this.head) {
-        this.head = this.next[index]
-      } else {
-        this.connect(this.prev[index], this.next[index])
-      }
-      this.connect(this.tail, index)
-      this.tail = index
-    }
-  }
-
-  get del() {
-    deprecatedMethod('del', 'delete')
-    return this.delete
-  }
-
-  delete(k) {
-    let deleted = false
-    if (this.size !== 0) {
-      const index = this.keyMap.get(k)
-      if (index !== undefined) {
-        deleted = true
-        if (this.size === 1) {
-          this.clear()
-        } else {
-          this.removeItemSize(index)
-          const v = this.valList[index]
-          if (this.isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('deleted'))
-          } else {
-            this.dispose(v, k, 'delete')
-            if (this.disposeAfter) {
-              this.disposed.push([v, k, 'delete'])
-            }
-          }
-          this.keyMap.delete(k)
-          this.keyList[index] = null
-          this.valList[index] = null
-          if (index === this.tail) {
-            this.tail = this.prev[index]
-          } else if (index === this.head) {
-            this.head = this.next[index]
-          } else {
-            this.next[this.prev[index]] = this.next[index]
-            this.prev[this.next[index]] = this.prev[index]
-          }
-          this.size--
-          this.free.push(index)
-        }
-      }
-    }
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return deleted
-  }
-
-  clear() {
-    for (const index of this.rindexes({ allowStale: true })) {
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        v.__abortController.abort(new Error('deleted'))
-      } else {
-        const k = this.keyList[index]
-        this.dispose(v, k, 'delete')
-        if (this.disposeAfter) {
-          this.disposed.push([v, k, 'delete'])
-        }
-      }
-    }
-
-    this.keyMap.clear()
-    this.valList.fill(null)
-    this.keyList.fill(null)
-    if (this.ttls) {
-      this.ttls.fill(0)
-      this.starts.fill(0)
-    }
-    if (this.sizes) {
-      this.sizes.fill(0)
-    }
-    this.head = 0
-    this.tail = 0
-    this.initialFill = 1
-    this.free.length = 0
-    this.calculatedSize = 0
-    this.size = 0
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-  }
-
-  get reset() {
-    deprecatedMethod('reset', 'clear')
-    return this.clear
-  }
-
-  get length() {
-    deprecatedProperty('length', 'size')
-    return this.size
-  }
-
-  static get AbortController() {
-    return AC
-  }
-  static get AbortSignal() {
-    return AS
-  }
-}
-
-module.exports = LRUCache
diff --git a/node_modules/pacote/node_modules/lru-cache/index.mjs b/node_modules/pacote/node_modules/lru-cache/index.mjs
deleted file mode 100644
index 4a0b4813ec515..0000000000000
--- a/node_modules/pacote/node_modules/lru-cache/index.mjs
+++ /dev/null
@@ -1,1227 +0,0 @@
-const perf =
-  typeof performance === 'object' &&
-  performance &&
-  typeof performance.now === 'function'
-    ? performance
-    : Date
-
-const hasAbortController = typeof AbortController === 'function'
-
-// minimal backwards-compatibility polyfill
-// this doesn't have nearly all the checks and whatnot that
-// actual AbortController/Signal has, but it's enough for
-// our purposes, and if used properly, behaves the same.
-const AC = hasAbortController
-  ? AbortController
-  : class AbortController {
-      constructor() {
-        this.signal = new AS()
-      }
-      abort(reason = new Error('This operation was aborted')) {
-        this.signal.reason = this.signal.reason || reason
-        this.signal.aborted = true
-        this.signal.dispatchEvent({
-          type: 'abort',
-          target: this.signal,
-        })
-      }
-    }
-
-const hasAbortSignal = typeof AbortSignal === 'function'
-// Some polyfills put this on the AC class, not global
-const hasACAbortSignal = typeof AC.AbortSignal === 'function'
-const AS = hasAbortSignal
-  ? AbortSignal
-  : hasACAbortSignal
-  ? AC.AbortController
-  : class AbortSignal {
-      constructor() {
-        this.reason = undefined
-        this.aborted = false
-        this._listeners = []
-      }
-      dispatchEvent(e) {
-        if (e.type === 'abort') {
-          this.aborted = true
-          this.onabort(e)
-          this._listeners.forEach(f => f(e), this)
-        }
-      }
-      onabort() {}
-      addEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners.push(fn)
-        }
-      }
-      removeEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners = this._listeners.filter(f => f !== fn)
-        }
-      }
-    }
-
-const warned = new Set()
-const deprecatedOption = (opt, instead) => {
-  const code = `LRU_CACHE_OPTION_${opt}`
-  if (shouldWarn(code)) {
-    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
-  }
-}
-const deprecatedMethod = (method, instead) => {
-  const code = `LRU_CACHE_METHOD_${method}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
-    warn(code, `${method} method`, `cache.${instead}()`, get)
-  }
-}
-const deprecatedProperty = (field, instead) => {
-  const code = `LRU_CACHE_PROPERTY_${field}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
-    warn(code, `${field} property`, `cache.${instead}`, get)
-  }
-}
-
-const emitWarning = (...a) => {
-  typeof process === 'object' &&
-  process &&
-  typeof process.emitWarning === 'function'
-    ? process.emitWarning(...a)
-    : console.error(...a)
-}
-
-const shouldWarn = code => !warned.has(code)
-
-const warn = (code, what, instead, fn) => {
-  warned.add(code)
-  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
-  emitWarning(msg, 'DeprecationWarning', code, fn)
-}
-
-const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
-
-/* istanbul ignore next - This is a little bit ridiculous, tbh.
- * The maximum array length is 2^32-1 or thereabouts on most JS impls.
- * And well before that point, you're caching the entire world, I mean,
- * that's ~32GB of just integers for the next/prev links, plus whatever
- * else to hold that many keys and values.  Just filling the memory with
- * zeroes at init time is brutal when you get that big.
- * But why not be complete?
- * Maybe in the future, these limits will have expanded. */
-const getUintArray = max =>
-  !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-    ? Uint8Array
-    : max <= Math.pow(2, 16)
-    ? Uint16Array
-    : max <= Math.pow(2, 32)
-    ? Uint32Array
-    : max <= Number.MAX_SAFE_INTEGER
-    ? ZeroArray
-    : null
-
-class ZeroArray extends Array {
-  constructor(size) {
-    super(size)
-    this.fill(0)
-  }
-}
-
-class Stack {
-  constructor(max) {
-    if (max === 0) {
-      return []
-    }
-    const UintArray = getUintArray(max)
-    this.heap = new UintArray(max)
-    this.length = 0
-  }
-  push(n) {
-    this.heap[this.length++] = n
-  }
-  pop() {
-    return this.heap[--this.length]
-  }
-}
-
-class LRUCache {
-  constructor(options = {}) {
-    const {
-      max = 0,
-      ttl,
-      ttlResolution = 1,
-      ttlAutopurge,
-      updateAgeOnGet,
-      updateAgeOnHas,
-      allowStale,
-      dispose,
-      disposeAfter,
-      noDisposeOnSet,
-      noUpdateTTL,
-      maxSize = 0,
-      maxEntrySize = 0,
-      sizeCalculation,
-      fetchMethod,
-      fetchContext,
-      noDeleteOnFetchRejection,
-      noDeleteOnStaleGet,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-    } = options
-
-    // deprecated options, don't trigger a warning for getting them if
-    // the thing being passed in is another LRUCache we're copying.
-    const { length, maxAge, stale } =
-      options instanceof LRUCache ? {} : options
-
-    if (max !== 0 && !isPosInt(max)) {
-      throw new TypeError('max option must be a nonnegative integer')
-    }
-
-    const UintArray = max ? getUintArray(max) : Array
-    if (!UintArray) {
-      throw new Error('invalid max value: ' + max)
-    }
-
-    this.max = max
-    this.maxSize = maxSize
-    this.maxEntrySize = maxEntrySize || this.maxSize
-    this.sizeCalculation = sizeCalculation || length
-    if (this.sizeCalculation) {
-      if (!this.maxSize && !this.maxEntrySize) {
-        throw new TypeError(
-          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
-        )
-      }
-      if (typeof this.sizeCalculation !== 'function') {
-        throw new TypeError('sizeCalculation set to non-function')
-      }
-    }
-
-    this.fetchMethod = fetchMethod || null
-    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
-      throw new TypeError(
-        'fetchMethod must be a function if specified'
-      )
-    }
-
-    this.fetchContext = fetchContext
-    if (!this.fetchMethod && fetchContext !== undefined) {
-      throw new TypeError(
-        'cannot set fetchContext without fetchMethod'
-      )
-    }
-
-    this.keyMap = new Map()
-    this.keyList = new Array(max).fill(null)
-    this.valList = new Array(max).fill(null)
-    this.next = new UintArray(max)
-    this.prev = new UintArray(max)
-    this.head = 0
-    this.tail = 0
-    this.free = new Stack(max)
-    this.initialFill = 1
-    this.size = 0
-
-    if (typeof dispose === 'function') {
-      this.dispose = dispose
-    }
-    if (typeof disposeAfter === 'function') {
-      this.disposeAfter = disposeAfter
-      this.disposed = []
-    } else {
-      this.disposeAfter = null
-      this.disposed = null
-    }
-    this.noDisposeOnSet = !!noDisposeOnSet
-    this.noUpdateTTL = !!noUpdateTTL
-    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
-    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
-    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
-    this.ignoreFetchAbort = !!ignoreFetchAbort
-
-    // NB: maxEntrySize is set to maxSize if it's set
-    if (this.maxEntrySize !== 0) {
-      if (this.maxSize !== 0) {
-        if (!isPosInt(this.maxSize)) {
-          throw new TypeError(
-            'maxSize must be a positive integer if specified'
-          )
-        }
-      }
-      if (!isPosInt(this.maxEntrySize)) {
-        throw new TypeError(
-          'maxEntrySize must be a positive integer if specified'
-        )
-      }
-      this.initializeSizeTracking()
-    }
-
-    this.allowStale = !!allowStale || !!stale
-    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
-    this.updateAgeOnGet = !!updateAgeOnGet
-    this.updateAgeOnHas = !!updateAgeOnHas
-    this.ttlResolution =
-      isPosInt(ttlResolution) || ttlResolution === 0
-        ? ttlResolution
-        : 1
-    this.ttlAutopurge = !!ttlAutopurge
-    this.ttl = ttl || maxAge || 0
-    if (this.ttl) {
-      if (!isPosInt(this.ttl)) {
-        throw new TypeError(
-          'ttl must be a positive integer if specified'
-        )
-      }
-      this.initializeTTLTracking()
-    }
-
-    // do not allow completely unbounded caches
-    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
-      throw new TypeError(
-        'At least one of max, maxSize, or ttl is required'
-      )
-    }
-    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
-      const code = 'LRU_CACHE_UNBOUNDED'
-      if (shouldWarn(code)) {
-        warned.add(code)
-        const msg =
-          'TTL caching without ttlAutopurge, max, or maxSize can ' +
-          'result in unbounded memory consumption.'
-        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
-      }
-    }
-
-    if (stale) {
-      deprecatedOption('stale', 'allowStale')
-    }
-    if (maxAge) {
-      deprecatedOption('maxAge', 'ttl')
-    }
-    if (length) {
-      deprecatedOption('length', 'sizeCalculation')
-    }
-  }
-
-  getRemainingTTL(key) {
-    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
-  }
-
-  initializeTTLTracking() {
-    this.ttls = new ZeroArray(this.max)
-    this.starts = new ZeroArray(this.max)
-
-    this.setItemTTL = (index, ttl, start = perf.now()) => {
-      this.starts[index] = ttl !== 0 ? start : 0
-      this.ttls[index] = ttl
-      if (ttl !== 0 && this.ttlAutopurge) {
-        const t = setTimeout(() => {
-          if (this.isStale(index)) {
-            this.delete(this.keyList[index])
-          }
-        }, ttl + 1)
-        /* istanbul ignore else - unref() not supported on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-    }
-
-    this.updateItemAge = index => {
-      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
-    }
-
-    this.statusTTL = (status, index) => {
-      if (status) {
-        status.ttl = this.ttls[index]
-        status.start = this.starts[index]
-        status.now = cachedNow || getNow()
-        status.remainingTTL = status.now + status.ttl - status.start
-      }
-    }
-
-    // debounce calls to perf.now() to 1s so we're not hitting
-    // that costly call repeatedly.
-    let cachedNow = 0
-    const getNow = () => {
-      const n = perf.now()
-      if (this.ttlResolution > 0) {
-        cachedNow = n
-        const t = setTimeout(
-          () => (cachedNow = 0),
-          this.ttlResolution
-        )
-        /* istanbul ignore else - not available on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-      return n
-    }
-
-    this.getRemainingTTL = key => {
-      const index = this.keyMap.get(key)
-      if (index === undefined) {
-        return 0
-      }
-      return this.ttls[index] === 0 || this.starts[index] === 0
-        ? Infinity
-        : this.starts[index] +
-            this.ttls[index] -
-            (cachedNow || getNow())
-    }
-
-    this.isStale = index => {
-      return (
-        this.ttls[index] !== 0 &&
-        this.starts[index] !== 0 &&
-        (cachedNow || getNow()) - this.starts[index] >
-          this.ttls[index]
-      )
-    }
-  }
-  updateItemAge(_index) {}
-  statusTTL(_status, _index) {}
-  setItemTTL(_index, _ttl, _start) {}
-  isStale(_index) {
-    return false
-  }
-
-  initializeSizeTracking() {
-    this.calculatedSize = 0
-    this.sizes = new ZeroArray(this.max)
-    this.removeItemSize = index => {
-      this.calculatedSize -= this.sizes[index]
-      this.sizes[index] = 0
-    }
-    this.requireSize = (k, v, size, sizeCalculation) => {
-      // provisionally accept background fetches.
-      // actual value size will be checked when they return.
-      if (this.isBackgroundFetch(v)) {
-        return 0
-      }
-      if (!isPosInt(size)) {
-        if (sizeCalculation) {
-          if (typeof sizeCalculation !== 'function') {
-            throw new TypeError('sizeCalculation must be a function')
-          }
-          size = sizeCalculation(v, k)
-          if (!isPosInt(size)) {
-            throw new TypeError(
-              'sizeCalculation return invalid (expect positive integer)'
-            )
-          }
-        } else {
-          throw new TypeError(
-            'invalid size value (must be positive integer). ' +
-              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
-              'must be set.'
-          )
-        }
-      }
-      return size
-    }
-    this.addItemSize = (index, size, status) => {
-      this.sizes[index] = size
-      if (this.maxSize) {
-        const maxSize = this.maxSize - this.sizes[index]
-        while (this.calculatedSize > maxSize) {
-          this.evict(true)
-        }
-      }
-      this.calculatedSize += this.sizes[index]
-      if (status) {
-        status.entrySize = size
-        status.totalCalculatedSize = this.calculatedSize
-      }
-    }
-  }
-  removeItemSize(_index) {}
-  addItemSize(_index, _size) {}
-  requireSize(_k, _v, size, sizeCalculation) {
-    if (size || sizeCalculation) {
-      throw new TypeError(
-        'cannot set size without setting maxSize or maxEntrySize on cache'
-      )
-    }
-  }
-
-  *indexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.tail; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.head) {
-          break
-        } else {
-          i = this.prev[i]
-        }
-      }
-    }
-  }
-
-  *rindexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.head; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.tail) {
-          break
-        } else {
-          i = this.next[i]
-        }
-      }
-    }
-  }
-
-  isValidIndex(index) {
-    return (
-      index !== undefined &&
-      this.keyMap.get(this.keyList[index]) === index
-    )
-  }
-
-  *entries() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-  *rentries() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-
-  *keys() {
-    for (const i of this.indexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-  *rkeys() {
-    for (const i of this.rindexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-
-  *values() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-  *rvalues() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-
-  [Symbol.iterator]() {
-    return this.entries()
-  }
-
-  find(fn, getOptions) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      if (fn(value, this.keyList[i], this)) {
-        return this.get(this.keyList[i], getOptions)
-      }
-    }
-  }
-
-  forEach(fn, thisp = this) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  rforEach(fn, thisp = this) {
-    for (const i of this.rindexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  get prune() {
-    deprecatedMethod('prune', 'purgeStale')
-    return this.purgeStale
-  }
-
-  purgeStale() {
-    let deleted = false
-    for (const i of this.rindexes({ allowStale: true })) {
-      if (this.isStale(i)) {
-        this.delete(this.keyList[i])
-        deleted = true
-      }
-    }
-    return deleted
-  }
-
-  dump() {
-    const arr = []
-    for (const i of this.indexes({ allowStale: true })) {
-      const key = this.keyList[i]
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      const entry = { value }
-      if (this.ttls) {
-        entry.ttl = this.ttls[i]
-        // always dump the start relative to a portable timestamp
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = perf.now() - this.starts[i]
-        entry.start = Math.floor(Date.now() - age)
-      }
-      if (this.sizes) {
-        entry.size = this.sizes[i]
-      }
-      arr.unshift([key, entry])
-    }
-    return arr
-  }
-
-  load(arr) {
-    this.clear()
-    for (const [key, entry] of arr) {
-      if (entry.start) {
-        // entry.start is a portable timestamp, but we may be using
-        // node's performance.now(), so calculate the offset.
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = Date.now() - entry.start
-        entry.start = perf.now() - age
-      }
-      this.set(key, entry.value, entry)
-    }
-  }
-
-  dispose(_v, _k, _reason) {}
-
-  set(
-    k,
-    v,
-    {
-      ttl = this.ttl,
-      start,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      status,
-    } = {}
-  ) {
-    size = this.requireSize(k, v, size, sizeCalculation)
-    // if the item doesn't fit, don't do anything
-    // NB: maxEntrySize set to maxSize by default
-    if (this.maxEntrySize && size > this.maxEntrySize) {
-      if (status) {
-        status.set = 'miss'
-        status.maxEntrySizeExceeded = true
-      }
-      // have to delete, in case a background fetch is there already.
-      // in non-async cases, this is a no-op
-      this.delete(k)
-      return this
-    }
-    let index = this.size === 0 ? undefined : this.keyMap.get(k)
-    if (index === undefined) {
-      // addition
-      index = this.newIndex()
-      this.keyList[index] = k
-      this.valList[index] = v
-      this.keyMap.set(k, index)
-      this.next[this.tail] = index
-      this.prev[index] = this.tail
-      this.tail = index
-      this.size++
-      this.addItemSize(index, size, status)
-      if (status) {
-        status.set = 'add'
-      }
-      noUpdateTTL = false
-    } else {
-      // update
-      this.moveToTail(index)
-      const oldVal = this.valList[index]
-      if (v !== oldVal) {
-        if (this.isBackgroundFetch(oldVal)) {
-          oldVal.__abortController.abort(new Error('replaced'))
-        } else {
-          if (!noDisposeOnSet) {
-            this.dispose(oldVal, k, 'set')
-            if (this.disposeAfter) {
-              this.disposed.push([oldVal, k, 'set'])
-            }
-          }
-        }
-        this.removeItemSize(index)
-        this.valList[index] = v
-        this.addItemSize(index, size, status)
-        if (status) {
-          status.set = 'replace'
-          const oldValue =
-            oldVal && this.isBackgroundFetch(oldVal)
-              ? oldVal.__staleWhileFetching
-              : oldVal
-          if (oldValue !== undefined) status.oldValue = oldValue
-        }
-      } else if (status) {
-        status.set = 'update'
-      }
-    }
-    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
-      this.initializeTTLTracking()
-    }
-    if (!noUpdateTTL) {
-      this.setItemTTL(index, ttl, start)
-    }
-    this.statusTTL(status, index)
-    if (this.disposeAfter) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return this
-  }
-
-  newIndex() {
-    if (this.size === 0) {
-      return this.tail
-    }
-    if (this.size === this.max && this.max !== 0) {
-      return this.evict(false)
-    }
-    if (this.free.length !== 0) {
-      return this.free.pop()
-    }
-    // initial fill, just keep writing down the list
-    return this.initialFill++
-  }
-
-  pop() {
-    if (this.size) {
-      const val = this.valList[this.head]
-      this.evict(true)
-      return val
-    }
-  }
-
-  evict(free) {
-    const head = this.head
-    const k = this.keyList[head]
-    const v = this.valList[head]
-    if (this.isBackgroundFetch(v)) {
-      v.__abortController.abort(new Error('evicted'))
-    } else {
-      this.dispose(v, k, 'evict')
-      if (this.disposeAfter) {
-        this.disposed.push([v, k, 'evict'])
-      }
-    }
-    this.removeItemSize(head)
-    // if we aren't about to use the index, then null these out
-    if (free) {
-      this.keyList[head] = null
-      this.valList[head] = null
-      this.free.push(head)
-    }
-    this.head = this.next[head]
-    this.keyMap.delete(k)
-    this.size--
-    return head
-  }
-
-  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      if (!this.isStale(index)) {
-        if (updateAgeOnHas) {
-          this.updateItemAge(index)
-        }
-        if (status) status.has = 'hit'
-        this.statusTTL(status, index)
-        return true
-      } else if (status) {
-        status.has = 'stale'
-        this.statusTTL(status, index)
-      }
-    } else if (status) {
-      status.has = 'miss'
-    }
-    return false
-  }
-
-  // like get(), but without any LRU updating or TTL expiration
-  peek(k, { allowStale = this.allowStale } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined && (allowStale || !this.isStale(index))) {
-      const v = this.valList[index]
-      // either stale and allowed, or forcing a refresh of non-stale value
-      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
-    }
-  }
-
-  backgroundFetch(k, index, options, context) {
-    const v = index === undefined ? undefined : this.valList[index]
-    if (this.isBackgroundFetch(v)) {
-      return v
-    }
-    const ac = new AC()
-    if (options.signal) {
-      options.signal.addEventListener('abort', () =>
-        ac.abort(options.signal.reason)
-      )
-    }
-    const fetchOpts = {
-      signal: ac.signal,
-      options,
-      context,
-    }
-    const cb = (v, updateCache = false) => {
-      const { aborted } = ac.signal
-      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
-      if (options.status) {
-        if (aborted && !updateCache) {
-          options.status.fetchAborted = true
-          options.status.fetchError = ac.signal.reason
-          if (ignoreAbort) options.status.fetchAbortIgnored = true
-        } else {
-          options.status.fetchResolved = true
-        }
-      }
-      if (aborted && !ignoreAbort && !updateCache) {
-        return fetchFail(ac.signal.reason)
-      }
-      // either we didn't abort, and are still here, or we did, and ignored
-      if (this.valList[index] === p) {
-        if (v === undefined) {
-          if (p.__staleWhileFetching) {
-            this.valList[index] = p.__staleWhileFetching
-          } else {
-            this.delete(k)
-          }
-        } else {
-          if (options.status) options.status.fetchUpdated = true
-          this.set(k, v, fetchOpts.options)
-        }
-      }
-      return v
-    }
-    const eb = er => {
-      if (options.status) {
-        options.status.fetchRejected = true
-        options.status.fetchError = er
-      }
-      return fetchFail(er)
-    }
-    const fetchFail = er => {
-      const { aborted } = ac.signal
-      const allowStaleAborted =
-        aborted && options.allowStaleOnFetchAbort
-      const allowStale =
-        allowStaleAborted || options.allowStaleOnFetchRejection
-      const noDelete = allowStale || options.noDeleteOnFetchRejection
-      if (this.valList[index] === p) {
-        // if we allow stale on fetch rejections, then we need to ensure that
-        // the stale value is not removed from the cache when the fetch fails.
-        const del = !noDelete || p.__staleWhileFetching === undefined
-        if (del) {
-          this.delete(k)
-        } else if (!allowStaleAborted) {
-          // still replace the *promise* with the stale value,
-          // since we are done with the promise at this point.
-          // leave it untouched if we're still waiting for an
-          // aborted background fetch that hasn't yet returned.
-          this.valList[index] = p.__staleWhileFetching
-        }
-      }
-      if (allowStale) {
-        if (options.status && p.__staleWhileFetching !== undefined) {
-          options.status.returnedStale = true
-        }
-        return p.__staleWhileFetching
-      } else if (p.__returned === p) {
-        throw er
-      }
-    }
-    const pcall = (res, rej) => {
-      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
-      // ignored, we go until we finish, regardless.
-      // defer check until we are actually aborting,
-      // so fetchMethod can override.
-      ac.signal.addEventListener('abort', () => {
-        if (
-          !options.ignoreFetchAbort ||
-          options.allowStaleOnFetchAbort
-        ) {
-          res()
-          // when it eventually resolves, update the cache.
-          if (options.allowStaleOnFetchAbort) {
-            res = v => cb(v, true)
-          }
-        }
-      })
-    }
-    if (options.status) options.status.fetchDispatched = true
-    const p = new Promise(pcall).then(cb, eb)
-    p.__abortController = ac
-    p.__staleWhileFetching = v
-    p.__returned = null
-    if (index === undefined) {
-      // internal, don't expose status.
-      this.set(k, p, { ...fetchOpts.options, status: undefined })
-      index = this.keyMap.get(k)
-    } else {
-      this.valList[index] = p
-    }
-    return p
-  }
-
-  isBackgroundFetch(p) {
-    return (
-      p &&
-      typeof p === 'object' &&
-      typeof p.then === 'function' &&
-      Object.prototype.hasOwnProperty.call(
-        p,
-        '__staleWhileFetching'
-      ) &&
-      Object.prototype.hasOwnProperty.call(p, '__returned') &&
-      (p.__returned === p || p.__returned === null)
-    )
-  }
-
-  // this takes the union of get() and set() opts, because it does both
-  async fetch(
-    k,
-    {
-      // get options
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      // set options
-      ttl = this.ttl,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      // fetch exclusive options
-      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
-      ignoreFetchAbort = this.ignoreFetchAbort,
-      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
-      fetchContext = this.fetchContext,
-      forceRefresh = false,
-      status,
-      signal,
-    } = {}
-  ) {
-    if (!this.fetchMethod) {
-      if (status) status.fetch = 'get'
-      return this.get(k, {
-        allowStale,
-        updateAgeOnGet,
-        noDeleteOnStaleGet,
-        status,
-      })
-    }
-
-    const options = {
-      allowStale,
-      updateAgeOnGet,
-      noDeleteOnStaleGet,
-      ttl,
-      noDisposeOnSet,
-      size,
-      sizeCalculation,
-      noUpdateTTL,
-      noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-      status,
-      signal,
-    }
-
-    let index = this.keyMap.get(k)
-    if (index === undefined) {
-      if (status) status.fetch = 'miss'
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      return (p.__returned = p)
-    } else {
-      // in cache, maybe already fetching
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        const stale =
-          allowStale && v.__staleWhileFetching !== undefined
-        if (status) {
-          status.fetch = 'inflight'
-          if (stale) status.returnedStale = true
-        }
-        return stale ? v.__staleWhileFetching : (v.__returned = v)
-      }
-
-      // if we force a refresh, that means do NOT serve the cached value,
-      // unless we are already in the process of refreshing the cache.
-      const isStale = this.isStale(index)
-      if (!forceRefresh && !isStale) {
-        if (status) status.fetch = 'hit'
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        this.statusTTL(status, index)
-        return v
-      }
-
-      // ok, it is stale or a forced refresh, and not already fetching.
-      // refresh the cache.
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      const hasStale = p.__staleWhileFetching !== undefined
-      const staleVal = hasStale && allowStale
-      if (status) {
-        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
-        if (staleVal && isStale) status.returnedStale = true
-      }
-      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
-    }
-  }
-
-  get(
-    k,
-    {
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      status,
-    } = {}
-  ) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      const value = this.valList[index]
-      const fetching = this.isBackgroundFetch(value)
-      this.statusTTL(status, index)
-      if (this.isStale(index)) {
-        if (status) status.get = 'stale'
-        // delete only if not an in-flight background fetch
-        if (!fetching) {
-          if (!noDeleteOnStaleGet) {
-            this.delete(k)
-          }
-          if (status) status.returnedStale = allowStale
-          return allowStale ? value : undefined
-        } else {
-          if (status) {
-            status.returnedStale =
-              allowStale && value.__staleWhileFetching !== undefined
-          }
-          return allowStale ? value.__staleWhileFetching : undefined
-        }
-      } else {
-        if (status) status.get = 'hit'
-        // if we're currently fetching it, we don't actually have it yet
-        // it's not stale, which means this isn't a staleWhileRefetching.
-        // If it's not stale, and fetching, AND has a __staleWhileFetching
-        // value, then that means the user fetched with {forceRefresh:true},
-        // so it's safe to return that value.
-        if (fetching) {
-          return value.__staleWhileFetching
-        }
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        return value
-      }
-    } else if (status) {
-      status.get = 'miss'
-    }
-  }
-
-  connect(p, n) {
-    this.prev[n] = p
-    this.next[p] = n
-  }
-
-  moveToTail(index) {
-    // if tail already, nothing to do
-    // if head, move head to next[index]
-    // else
-    //   move next[prev[index]] to next[index] (head has no prev)
-    //   move prev[next[index]] to prev[index]
-    // prev[index] = tail
-    // next[tail] = index
-    // tail = index
-    if (index !== this.tail) {
-      if (index === this.head) {
-        this.head = this.next[index]
-      } else {
-        this.connect(this.prev[index], this.next[index])
-      }
-      this.connect(this.tail, index)
-      this.tail = index
-    }
-  }
-
-  get del() {
-    deprecatedMethod('del', 'delete')
-    return this.delete
-  }
-
-  delete(k) {
-    let deleted = false
-    if (this.size !== 0) {
-      const index = this.keyMap.get(k)
-      if (index !== undefined) {
-        deleted = true
-        if (this.size === 1) {
-          this.clear()
-        } else {
-          this.removeItemSize(index)
-          const v = this.valList[index]
-          if (this.isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('deleted'))
-          } else {
-            this.dispose(v, k, 'delete')
-            if (this.disposeAfter) {
-              this.disposed.push([v, k, 'delete'])
-            }
-          }
-          this.keyMap.delete(k)
-          this.keyList[index] = null
-          this.valList[index] = null
-          if (index === this.tail) {
-            this.tail = this.prev[index]
-          } else if (index === this.head) {
-            this.head = this.next[index]
-          } else {
-            this.next[this.prev[index]] = this.next[index]
-            this.prev[this.next[index]] = this.prev[index]
-          }
-          this.size--
-          this.free.push(index)
-        }
-      }
-    }
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return deleted
-  }
-
-  clear() {
-    for (const index of this.rindexes({ allowStale: true })) {
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        v.__abortController.abort(new Error('deleted'))
-      } else {
-        const k = this.keyList[index]
-        this.dispose(v, k, 'delete')
-        if (this.disposeAfter) {
-          this.disposed.push([v, k, 'delete'])
-        }
-      }
-    }
-
-    this.keyMap.clear()
-    this.valList.fill(null)
-    this.keyList.fill(null)
-    if (this.ttls) {
-      this.ttls.fill(0)
-      this.starts.fill(0)
-    }
-    if (this.sizes) {
-      this.sizes.fill(0)
-    }
-    this.head = 0
-    this.tail = 0
-    this.initialFill = 1
-    this.free.length = 0
-    this.calculatedSize = 0
-    this.size = 0
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-  }
-
-  get reset() {
-    deprecatedMethod('reset', 'clear')
-    return this.clear
-  }
-
-  get length() {
-    deprecatedProperty('length', 'size')
-    return this.size
-  }
-
-  static get AbortController() {
-    return AC
-  }
-  static get AbortSignal() {
-    return AS
-  }
-}
-
-export default LRUCache
diff --git a/node_modules/pacote/node_modules/lru-cache/package.json b/node_modules/pacote/node_modules/lru-cache/package.json
deleted file mode 100644
index 9684991727e7a..0000000000000
--- a/node_modules/pacote/node_modules/lru-cache/package.json
+++ /dev/null
@@ -1,96 +0,0 @@
-{
-  "name": "lru-cache",
-  "description": "A cache object that deletes the least-recently-used items.",
-  "version": "7.18.3",
-  "author": "Isaac Z. Schlueter ",
-  "keywords": [
-    "mru",
-    "lru",
-    "cache"
-  ],
-  "sideEffects": false,
-  "scripts": {
-    "build": "npm run prepare",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "prepare": "node ./scripts/transpile-to-esm.js",
-    "size": "size-limit",
-    "test": "tap",
-    "snap": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write .",
-    "typedoc": "typedoc ./index.d.ts"
-  },
-  "type": "commonjs",
-  "main": "./index.js",
-  "module": "./index.mjs",
-  "types": "./index.d.ts",
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./index.d.ts",
-        "default": "./index.mjs"
-      },
-      "require": {
-        "types": "./index.d.ts",
-        "default": "./index.js"
-      }
-    },
-    "./package.json": "./package.json"
-  },
-  "repository": "git://github.com/isaacs/node-lru-cache.git",
-  "devDependencies": {
-    "@size-limit/preset-small-lib": "^7.0.8",
-    "@types/node": "^17.0.31",
-    "@types/tap": "^15.0.6",
-    "benchmark": "^2.1.4",
-    "c8": "^7.11.2",
-    "clock-mock": "^1.0.6",
-    "eslint-config-prettier": "^8.5.0",
-    "prettier": "^2.6.2",
-    "size-limit": "^7.0.8",
-    "tap": "^16.3.4",
-    "ts-node": "^10.7.0",
-    "tslib": "^2.4.0",
-    "typedoc": "^0.23.24",
-    "typescript": "^4.6.4"
-  },
-  "license": "ISC",
-  "files": [
-    "index.js",
-    "index.mjs",
-    "index.d.ts"
-  ],
-  "engines": {
-    "node": ">=12"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tap": {
-    "nyc-arg": [
-      "--include=index.js"
-    ],
-    "node-arg": [
-      "--expose-gc",
-      "--require",
-      "ts-node/register"
-    ],
-    "ts": false
-  },
-  "size-limit": [
-    {
-      "path": "./index.js"
-    }
-  ]
-}
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/LICENSE.md b/node_modules/pacote/node_modules/npm-pick-manifest/LICENSE.md
deleted file mode 100644
index 8d28acf866d93..0000000000000
--- a/node_modules/pacote/node_modules/npm-pick-manifest/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/lib/index.js b/node_modules/pacote/node_modules/npm-pick-manifest/lib/index.js
deleted file mode 100644
index 8dbd2721c8996..0000000000000
--- a/node_modules/pacote/node_modules/npm-pick-manifest/lib/index.js
+++ /dev/null
@@ -1,218 +0,0 @@
-'use strict'
-
-const npa = require('npm-package-arg')
-const semver = require('semver')
-const { checkEngine } = require('npm-install-checks')
-const normalizeBin = require('npm-normalize-package-bin')
-
-const engineOk = (manifest, npmVersion, nodeVersion) => {
-  try {
-    checkEngine(manifest, npmVersion, nodeVersion)
-    return true
-  } catch (_) {
-    return false
-  }
-}
-
-const isBefore = (verTimes, ver, time) =>
-  !verTimes || !verTimes[ver] || Date.parse(verTimes[ver]) <= time
-
-const avoidSemverOpt = { includePrerelease: true, loose: true }
-const shouldAvoid = (ver, avoid) =>
-  avoid && semver.satisfies(ver, avoid, avoidSemverOpt)
-
-const decorateAvoid = (result, avoid) =>
-  result && shouldAvoid(result.version, avoid)
-    ? { ...result, _shouldAvoid: true }
-    : result
-
-const pickManifest = (packument, wanted, opts) => {
-  const {
-    defaultTag = 'latest',
-    before = null,
-    nodeVersion = process.version,
-    npmVersion = null,
-    includeStaged = false,
-    avoid = null,
-    avoidStrict = false,
-  } = opts
-
-  const { name, time: verTimes } = packument
-  const versions = packument.versions || {}
-
-  if (avoidStrict) {
-    const looseOpts = {
-      ...opts,
-      avoidStrict: false,
-    }
-
-    const result = pickManifest(packument, wanted, looseOpts)
-    if (!result || !result._shouldAvoid) {
-      return result
-    }
-
-    const caret = pickManifest(packument, `^${result.version}`, looseOpts)
-    if (!caret || !caret._shouldAvoid) {
-      return {
-        ...caret,
-        _outsideDependencyRange: true,
-        _isSemVerMajor: false,
-      }
-    }
-
-    const star = pickManifest(packument, '*', looseOpts)
-    if (!star || !star._shouldAvoid) {
-      return {
-        ...star,
-        _outsideDependencyRange: true,
-        _isSemVerMajor: true,
-      }
-    }
-
-    throw Object.assign(new Error(`No avoidable versions for ${name}`), {
-      code: 'ETARGET',
-      name,
-      wanted,
-      avoid,
-      before,
-      versions: Object.keys(versions),
-    })
-  }
-
-  const staged = (includeStaged && packument.stagedVersions &&
-    packument.stagedVersions.versions) || {}
-  const restricted = (packument.policyRestrictions &&
-    packument.policyRestrictions.versions) || {}
-
-  const time = before && verTimes ? +(new Date(before)) : Infinity
-  const spec = npa.resolve(name, wanted || defaultTag)
-  const type = spec.type
-  const distTags = packument['dist-tags'] || {}
-
-  if (type !== 'tag' && type !== 'version' && type !== 'range') {
-    throw new Error('Only tag, version, and range are supported')
-  }
-
-  // if the type is 'tag', and not just the implicit default, then it must
-  // be that exactly, or nothing else will do.
-  if (wanted && type === 'tag') {
-    const ver = distTags[wanted]
-    // if the version in the dist-tags is before the before date, then
-    // we use that.  Otherwise, we get the highest precedence version
-    // prior to the dist-tag.
-    if (isBefore(verTimes, ver, time)) {
-      return decorateAvoid(versions[ver] || staged[ver] || restricted[ver], avoid)
-    } else {
-      return pickManifest(packument, `<=${ver}`, opts)
-    }
-  }
-
-  // similarly, if a specific version, then only that version will do
-  if (wanted && type === 'version') {
-    const ver = semver.clean(wanted, { loose: true })
-    const mani = versions[ver] || staged[ver] || restricted[ver]
-    return isBefore(verTimes, ver, time) ? decorateAvoid(mani, avoid) : null
-  }
-
-  // ok, sort based on our heuristics, and pick the best fit
-  const range = type === 'range' ? wanted : '*'
-
-  // if the range is *, then we prefer the 'latest' if available
-  // but skip this if it should be avoided, in that case we have
-  // to try a little harder.
-  const defaultVer = distTags[defaultTag]
-  if (defaultVer &&
-      (range === '*' || semver.satisfies(defaultVer, range, { loose: true })) &&
-      !shouldAvoid(defaultVer, avoid)) {
-    const mani = versions[defaultVer]
-    if (mani && isBefore(verTimes, defaultVer, time)) {
-      return mani
-    }
-  }
-
-  // ok, actually have to sort the list and take the winner
-  const allEntries = Object.entries(versions)
-    .concat(Object.entries(staged))
-    .concat(Object.entries(restricted))
-    .filter(([ver, mani]) => isBefore(verTimes, ver, time))
-
-  if (!allEntries.length) {
-    throw Object.assign(new Error(`No versions available for ${name}`), {
-      code: 'ENOVERSIONS',
-      name,
-      type,
-      wanted,
-      before,
-      versions: Object.keys(versions),
-    })
-  }
-
-  const sortSemverOpt = { loose: true }
-  const entries = allEntries.filter(([ver, mani]) =>
-    semver.satisfies(ver, range, { loose: true }))
-    .sort((a, b) => {
-      const [vera, mania] = a
-      const [verb, manib] = b
-      const notavoida = !shouldAvoid(vera, avoid)
-      const notavoidb = !shouldAvoid(verb, avoid)
-      const notrestra = !restricted[a]
-      const notrestrb = !restricted[b]
-      const notstagea = !staged[a]
-      const notstageb = !staged[b]
-      const notdepra = !mania.deprecated
-      const notdeprb = !manib.deprecated
-      const enginea = engineOk(mania, npmVersion, nodeVersion)
-      const engineb = engineOk(manib, npmVersion, nodeVersion)
-      // sort by:
-      // - not an avoided version
-      // - not restricted
-      // - not staged
-      // - not deprecated and engine ok
-      // - engine ok
-      // - not deprecated
-      // - semver
-      return (notavoidb - notavoida) ||
-        (notrestrb - notrestra) ||
-        (notstageb - notstagea) ||
-        ((notdeprb && engineb) - (notdepra && enginea)) ||
-        (engineb - enginea) ||
-        (notdeprb - notdepra) ||
-        semver.rcompare(vera, verb, sortSemverOpt)
-    })
-
-  return decorateAvoid(entries[0] && entries[0][1], avoid)
-}
-
-module.exports = (packument, wanted, opts = {}) => {
-  const mani = pickManifest(packument, wanted, opts)
-  const picked = mani && normalizeBin(mani)
-  const policyRestrictions = packument.policyRestrictions
-  const restricted = (policyRestrictions && policyRestrictions.versions) || {}
-
-  if (picked && !restricted[picked.version]) {
-    return picked
-  }
-
-  const { before = null, defaultTag = 'latest' } = opts
-  const bstr = before ? new Date(before).toLocaleString() : ''
-  const { name } = packument
-  const pckg = `${name}@${wanted}` +
-    (before ? ` with a date before ${bstr}` : '')
-
-  const isForbidden = picked && !!restricted[picked.version]
-  const polMsg = isForbidden ? policyRestrictions.message : ''
-
-  const msg = !isForbidden ? `No matching version found for ${pckg}.`
-    : `Could not download ${pckg} due to policy violations:\n${polMsg}`
-
-  const code = isForbidden ? 'E403' : 'ETARGET'
-  throw Object.assign(new Error(msg), {
-    code,
-    type: npa.resolve(packument.name, wanted).type,
-    wanted,
-    versions: Object.keys(packument.versions ?? {}),
-    name,
-    distTags: packument['dist-tags'],
-    defaultTag,
-  })
-}
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/LICENSE b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/LICENSE
deleted file mode 100644
index 45055763dc838..0000000000000
--- a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright (c) 2015, Rebecca Turner
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/from-url.js b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/from-url.js
deleted file mode 100644
index efc1247d59d12..0000000000000
--- a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/from-url.js
+++ /dev/null
@@ -1,122 +0,0 @@
-'use strict'
-
-const parseUrl = require('./parse-url')
-
-// look for github shorthand inputs, such as npm/cli
-const isGitHubShorthand = (arg) => {
-  // it cannot contain whitespace before the first #
-  // it cannot start with a / because that's probably an absolute file path
-  // but it must include a slash since repos are username/repository
-  // it cannot start with a . because that's probably a relative file path
-  // it cannot start with an @ because that's a scoped package if it passes the other tests
-  // it cannot contain a : before a # because that tells us that there's a protocol
-  // a second / may not exist before a #
-  const firstHash = arg.indexOf('#')
-  const firstSlash = arg.indexOf('/')
-  const secondSlash = arg.indexOf('/', firstSlash + 1)
-  const firstColon = arg.indexOf(':')
-  const firstSpace = /\s/.exec(arg)
-  const firstAt = arg.indexOf('@')
-
-  const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash)
-  const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash)
-  const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash)
-  const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash)
-  const hasSlash = firstSlash > 0
-  // if a # is found, what we really want to know is that the character
-  // immediately before # is not a /
-  const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/')
-  const doesNotStartWithDot = !arg.startsWith('.')
-
-  return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash &&
-    doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash &&
-    secondSlashOnlyAfterHash
-}
-
-module.exports = (giturl, opts, { gitHosts, protocols }) => {
-  if (!giturl) {
-    return
-  }
-
-  const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl
-  const parsed = parseUrl(correctedUrl, protocols)
-  if (!parsed) {
-    return
-  }
-
-  const gitHostShortcut = gitHosts.byShortcut[parsed.protocol]
-  const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.')
-    ? parsed.hostname.slice(4)
-    : parsed.hostname]
-  const gitHostName = gitHostShortcut || gitHostDomain
-  if (!gitHostName) {
-    return
-  }
-
-  const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain]
-  let auth = null
-  if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) {
-    auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}`
-  }
-
-  let committish = null
-  let user = null
-  let project = null
-  let defaultRepresentation = null
-
-  try {
-    if (gitHostShortcut) {
-      let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname
-      const firstAt = pathname.indexOf('@')
-      // we ignore auth for shortcuts, so just trim it out
-      if (firstAt > -1) {
-        pathname = pathname.slice(firstAt + 1)
-      }
-
-      const lastSlash = pathname.lastIndexOf('/')
-      if (lastSlash > -1) {
-        user = decodeURIComponent(pathname.slice(0, lastSlash))
-        // we want nulls only, never empty strings
-        if (!user) {
-          user = null
-        }
-        project = decodeURIComponent(pathname.slice(lastSlash + 1))
-      } else {
-        project = decodeURIComponent(pathname)
-      }
-
-      if (project.endsWith('.git')) {
-        project = project.slice(0, -4)
-      }
-
-      if (parsed.hash) {
-        committish = decodeURIComponent(parsed.hash.slice(1))
-      }
-
-      defaultRepresentation = 'shortcut'
-    } else {
-      if (!gitHostInfo.protocols.includes(parsed.protocol)) {
-        return
-      }
-
-      const segments = gitHostInfo.extract(parsed)
-      if (!segments) {
-        return
-      }
-
-      user = segments.user && decodeURIComponent(segments.user)
-      project = decodeURIComponent(segments.project)
-      committish = decodeURIComponent(segments.committish)
-      defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1)
-    }
-  } catch (err) {
-    /* istanbul ignore else */
-    if (err instanceof URIError) {
-      return
-    } else {
-      throw err
-    }
-  }
-
-  return [gitHostName, user, auth, project, committish, defaultRepresentation, opts]
-}
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/hosts.js b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/hosts.js
deleted file mode 100644
index 013712b7842c8..0000000000000
--- a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/hosts.js
+++ /dev/null
@@ -1,228 +0,0 @@
-/* eslint-disable max-len */
-
-'use strict'
-
-const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
-const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
-const formatHashFragment = (f) => f.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-')
-
-const defaults = {
-  sshtemplate: ({ domain, user, project, committish }) =>
-    `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, user, project, committish }) =>
-    `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath, path }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`,
-  browsetemplate: ({ domain, user, project, committish, treepath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  docstemplate: ({ domain, user, project, treepath, committish }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`,
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`,
-  shortcuttemplate: ({ type, user, project, committish }) =>
-    `${type}:${user}/${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ user, project, committish }) =>
-    `${user}/${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, user, project }) =>
-    `https://${domain}/${user}/${project}/issues`,
-  hashformat: formatHashFragment,
-}
-
-const hosts = {}
-hosts.github = {
-  // First two are insecure and generally shouldn't be used any more, but
-  // they are still supported.
-  protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'github.com',
-  treepath: 'tree',
-  blobpath: 'blob',
-  editpath: 'edit',
-  filetemplate: ({ auth, user, project, committish, path }) =>
-    `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`,
-  gittemplate: ({ auth, domain, user, project, committish }) =>
-    `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, type, committish] = url.pathname.split('/', 5)
-    if (type && type !== 'tree') {
-      return
-    }
-
-    if (!type) {
-      committish = url.hash.slice(1)
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish }
-  },
-}
-
-hosts.bitbucket = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'bitbucket.org',
-  treepath: 'src',
-  blobpath: 'src',
-  editpath: '?mode=edit',
-  edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (['get'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gitlab = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gitlab.com',
-  treepath: 'tree',
-  blobpath: 'tree',
-  editpath: '-/edit',
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    const path = url.pathname.slice(1)
-    if (path.includes('/-/') || path.includes('/archive.tar.gz')) {
-      return
-    }
-
-    const segments = path.split('/')
-    let project = segments.pop()
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    const user = segments.join('/')
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gist = {
-  protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gist.github.com',
-  editpath: 'edit',
-  sshtemplate: ({ domain, project, committish }) =>
-    `git@${domain}:${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, project, committish }) =>
-    `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`,
-  browsetemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  browseblobtemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  docstemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  httpstemplate: ({ domain, project, committish }) =>
-    `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ user, project, committish, path }) =>
-    `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`,
-  shortcuttemplate: ({ type, project, committish }) =>
-    `${type}:${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ project, committish }) =>
-    `${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, project }) =>
-    `https://${domain}/${project}`,
-  gittemplate: ({ domain, project, committish }) =>
-    `git://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ project, committish }) =>
-    `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (aux === 'raw') {
-      return
-    }
-
-    if (!project) {
-      if (!user) {
-        return
-      }
-
-      project = user
-      user = null
-    }
-
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-  hashformat: function (fragment) {
-    return fragment && 'file-' + formatHashFragment(fragment)
-  },
-}
-
-hosts.sourcehut = {
-  protocols: ['git+ssh:', 'https:'],
-  domain: 'git.sr.ht',
-  treepath: 'tree',
-  blobpath: 'tree',
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`,
-  httpstemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`,
-  bugstemplate: ({ user, project }) =>
-    `https://todo.sr.ht/${user}/${project}`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-
-    // tarball url
-    if (['archive'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  hosts[name] = Object.assign({}, defaults, host)
-}
-
-module.exports = hosts
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/index.js b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/index.js
deleted file mode 100644
index a7339c217e9a3..0000000000000
--- a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/index.js
+++ /dev/null
@@ -1,179 +0,0 @@
-'use strict'
-
-const LRU = require('lru-cache')
-const hosts = require('./hosts.js')
-const fromUrl = require('./from-url.js')
-const parseUrl = require('./parse-url.js')
-
-const cache = new LRU({ max: 1000 })
-
-class GitHost {
-  constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
-    Object.assign(this, GitHost.#gitHosts[type], {
-      type,
-      user,
-      auth,
-      project,
-      committish,
-      default: defaultRepresentation,
-      opts,
-    })
-  }
-
-  static #gitHosts = { byShortcut: {}, byDomain: {} }
-  static #protocols = {
-    'git+ssh:': { name: 'sshurl' },
-    'ssh:': { name: 'sshurl' },
-    'git+https:': { name: 'https', auth: true },
-    'git:': { auth: true },
-    'http:': { auth: true },
-    'https:': { auth: true },
-    'git+http:': { auth: true },
-  }
-
-  static addHost (name, host) {
-    GitHost.#gitHosts[name] = host
-    GitHost.#gitHosts.byDomain[host.domain] = name
-    GitHost.#gitHosts.byShortcut[`${name}:`] = name
-    GitHost.#protocols[`${name}:`] = { name }
-  }
-
-  static fromUrl (giturl, opts) {
-    if (typeof giturl !== 'string') {
-      return
-    }
-
-    const key = giturl + JSON.stringify(opts || {})
-
-    if (!cache.has(key)) {
-      const hostArgs = fromUrl(giturl, opts, {
-        gitHosts: GitHost.#gitHosts,
-        protocols: GitHost.#protocols,
-      })
-      cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined)
-    }
-
-    return cache.get(key)
-  }
-
-  static parseUrl (url) {
-    return parseUrl(url)
-  }
-
-  #fill (template, opts) {
-    if (typeof template !== 'function') {
-      return null
-    }
-
-    const options = { ...this, ...this.opts, ...opts }
-
-    // the path should always be set so we don't end up with 'undefined' in urls
-    if (!options.path) {
-      options.path = ''
-    }
-
-    // template functions will insert the leading slash themselves
-    if (options.path.startsWith('/')) {
-      options.path = options.path.slice(1)
-    }
-
-    if (options.noCommittish) {
-      options.committish = null
-    }
-
-    const result = template(options)
-    return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result
-  }
-
-  hash () {
-    return this.committish ? `#${this.committish}` : ''
-  }
-
-  ssh (opts) {
-    return this.#fill(this.sshtemplate, opts)
-  }
-
-  sshurl (opts) {
-    return this.#fill(this.sshurltemplate, opts)
-  }
-
-  browse (path, ...args) {
-    // not a string, treat path as opts
-    if (typeof path !== 'string') {
-      return this.#fill(this.browsetemplate, path)
-    }
-
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browsetreetemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  // If the path is known to be a file, then browseFile should be used. For some hosts
-  // the url is the same as browse, but for others like GitHub a file can use both `/tree/`
-  // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/`
-  // path will redirect to a specific commit. Using the `/blob/` path avoids this and
-  // does not redirect to a different commit.
-  browseFile (path, ...args) {
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browseblobtemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  docs (opts) {
-    return this.#fill(this.docstemplate, opts)
-  }
-
-  bugs (opts) {
-    return this.#fill(this.bugstemplate, opts)
-  }
-
-  https (opts) {
-    return this.#fill(this.httpstemplate, opts)
-  }
-
-  git (opts) {
-    return this.#fill(this.gittemplate, opts)
-  }
-
-  shortcut (opts) {
-    return this.#fill(this.shortcuttemplate, opts)
-  }
-
-  path (opts) {
-    return this.#fill(this.pathtemplate, opts)
-  }
-
-  tarball (opts) {
-    return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false })
-  }
-
-  file (path, opts) {
-    return this.#fill(this.filetemplate, { ...opts, path })
-  }
-
-  edit (path, opts) {
-    return this.#fill(this.edittemplate, { ...opts, path })
-  }
-
-  getDefaultRepresentation () {
-    return this.default
-  }
-
-  toString (opts) {
-    if (this.default && typeof this[this.default] === 'function') {
-      return this[this.default](opts)
-    }
-
-    return this.sshurl(opts)
-  }
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  GitHost.addHost(name, host)
-}
-
-module.exports = GitHost
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/parse-url.js
deleted file mode 100644
index 7d5489c008ab4..0000000000000
--- a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/parse-url.js
+++ /dev/null
@@ -1,78 +0,0 @@
-const url = require('url')
-
-const lastIndexOfBefore = (str, char, beforeChar) => {
-  const startPosition = str.indexOf(beforeChar)
-  return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity)
-}
-
-const safeUrl = (u) => {
-  try {
-    return new url.URL(u)
-  } catch {
-    // this fn should never throw
-  }
-}
-
-// accepts input like git:github.com:user/repo and inserts the // after the first :
-const correctProtocol = (arg, protocols) => {
-  const firstColon = arg.indexOf(':')
-  const proto = arg.slice(0, firstColon + 1)
-  if (Object.prototype.hasOwnProperty.call(protocols, proto)) {
-    return arg
-  }
-
-  const firstAt = arg.indexOf('@')
-  if (firstAt > -1) {
-    if (firstAt > firstColon) {
-      return `git+ssh://${arg}`
-    } else {
-      return arg
-    }
-  }
-
-  const doubleSlash = arg.indexOf('//')
-  if (doubleSlash === firstColon + 1) {
-    return arg
-  }
-
-  return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}`
-}
-
-// attempt to correct an scp style url so that it will parse with `new URL()`
-const correctUrl = (giturl) => {
-  // ignore @ that come after the first hash since the denotes the start
-  // of a committish which can contain @ characters
-  const firstAt = lastIndexOfBefore(giturl, '@', '#')
-  // ignore colons that come after the hash since that could include colons such as:
-  // git@github.com:user/package-2#semver:^1.0.0
-  const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#')
-
-  if (lastColonBeforeHash > firstAt) {
-    // the last : comes after the first @ (or there is no @)
-    // like it would in:
-    // proto://hostname.com:user/repo
-    // username@hostname.com:user/repo
-    // :password@hostname.com:user/repo
-    // username:password@hostname.com:user/repo
-    // proto://username@hostname.com:user/repo
-    // proto://:password@hostname.com:user/repo
-    // proto://username:password@hostname.com:user/repo
-    // then we replace the last : with a / to create a valid path
-    giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1)
-  }
-
-  if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) {
-    // we have no : at all
-    // as it would be in:
-    // username@hostname.com/user/repo
-    // then we prepend a protocol
-    giturl = `git+ssh://${giturl}`
-  }
-
-  return giturl
-}
-
-module.exports = (giturl, protocols) => {
-  const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl
-  return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol))
-}
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/package.json b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/package.json
deleted file mode 100644
index 612259948afe7..0000000000000
--- a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/package.json
+++ /dev/null
@@ -1,59 +0,0 @@
-{
-  "name": "hosted-git-info",
-  "version": "6.1.1",
-  "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
-  "main": "./lib/index.js",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/hosted-git-info.git"
-  },
-  "keywords": [
-    "git",
-    "github",
-    "bitbucket",
-    "gitlab"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/hosted-git-info/issues"
-  },
-  "homepage": "https://github.com/npm/hosted-git-info",
-  "scripts": {
-    "posttest": "npm run lint",
-    "snap": "tap",
-    "test": "tap",
-    "test:coverage": "tap --coverage-report=html",
-    "lint": "eslint \"**/*.js\"",
-    "postlint": "template-oss-check",
-    "lintfix": "npm run lint -- --fix",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "dependencies": {
-    "lru-cache": "^7.5.1"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.7.1",
-    "tap": "^16.0.1"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "tap": {
-    "color": 1,
-    "coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.7.1"
-  }
-}
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE
deleted file mode 100644
index 19cec97b18468..0000000000000
--- a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js
deleted file mode 100644
index 36bd18cd9f9a6..0000000000000
--- a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js
+++ /dev/null
@@ -1,431 +0,0 @@
-'use strict'
-module.exports = npa
-module.exports.resolve = resolve
-module.exports.toPurl = toPurl
-module.exports.Result = Result
-
-const url = require('url')
-const HostedGit = require('hosted-git-info')
-const semver = require('semver')
-const path = global.FAKE_WINDOWS ? require('path').win32 : require('path')
-const validatePackageName = require('validate-npm-package-name')
-const { homedir } = require('os')
-const log = require('proc-log')
-
-const isWindows = process.platform === 'win32' || global.FAKE_WINDOWS
-const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
-const isURL = /^(?:git[+])?[a-z]+:/i
-const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
-const isFilename = /[.](?:tgz|tar.gz|tar)$/i
-
-function npa (arg, where) {
-  let name
-  let spec
-  if (typeof arg === 'object') {
-    if (arg instanceof Result && (!where || where === arg.where)) {
-      return arg
-    } else if (arg.name && arg.rawSpec) {
-      return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
-    } else {
-      return npa(arg.raw, where || arg.where)
-    }
-  }
-  const nameEndsAt = arg[0] === '@' ? arg.slice(1).indexOf('@') + 1 : arg.indexOf('@')
-  const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
-  if (isURL.test(arg)) {
-    spec = arg
-  } else if (isGit.test(arg)) {
-    spec = `git+ssh://${arg}`
-  } else if (namePart[0] !== '@' && (hasSlashes.test(namePart) || isFilename.test(namePart))) {
-    spec = arg
-  } else if (nameEndsAt > 0) {
-    name = namePart
-    spec = arg.slice(nameEndsAt + 1) || '*'
-  } else {
-    const valid = validatePackageName(arg)
-    if (valid.validForOldPackages) {
-      name = arg
-      spec = '*'
-    } else {
-      spec = arg
-    }
-  }
-  return resolve(name, spec, where, arg)
-}
-
-const isFilespec = isWindows ? /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/ : /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
-
-function resolve (name, spec, where, arg) {
-  const res = new Result({
-    raw: arg,
-    name: name,
-    rawSpec: spec,
-    fromArgument: arg != null,
-  })
-
-  if (name) {
-    res.setName(name)
-  }
-
-  if (spec && (isFilespec.test(spec) || /^file:/i.test(spec))) {
-    return fromFile(res, where)
-  } else if (spec && /^npm:/i.test(spec)) {
-    return fromAlias(res, where)
-  }
-
-  const hosted = HostedGit.fromUrl(spec, {
-    noGitPlus: true,
-    noCommittish: true,
-  })
-  if (hosted) {
-    return fromHostedGit(res, hosted)
-  } else if (spec && isURL.test(spec)) {
-    return fromURL(res)
-  } else if (spec && (hasSlashes.test(spec) || isFilename.test(spec))) {
-    return fromFile(res, where)
-  } else {
-    return fromRegistry(res)
-  }
-}
-
-const defaultRegistry = 'https://registry.npmjs.org'
-
-function toPurl (arg, reg = defaultRegistry) {
-  const res = npa(arg)
-
-  if (res.type !== 'version') {
-    throw invalidPurlType(res.type, res.raw)
-  }
-
-  // URI-encode leading @ of scoped packages
-  let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec
-  if (reg !== defaultRegistry) {
-    purl += '?repository_url=' + reg
-  }
-
-  return purl
-}
-
-function invalidPackageName (name, valid, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`)
-  err.code = 'EINVALIDPACKAGENAME'
-  return err
-}
-
-function invalidTagName (name, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`)
-  err.code = 'EINVALIDTAGNAME'
-  return err
-}
-
-function invalidPurlType (type, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`)
-  err.code = 'EINVALIDPURLTYPE'
-  return err
-}
-
-function Result (opts) {
-  this.type = opts.type
-  this.registry = opts.registry
-  this.where = opts.where
-  if (opts.raw == null) {
-    this.raw = opts.name ? opts.name + '@' + opts.rawSpec : opts.rawSpec
-  } else {
-    this.raw = opts.raw
-  }
-
-  this.name = undefined
-  this.escapedName = undefined
-  this.scope = undefined
-  this.rawSpec = opts.rawSpec || ''
-  this.saveSpec = opts.saveSpec
-  this.fetchSpec = opts.fetchSpec
-  if (opts.name) {
-    this.setName(opts.name)
-  }
-  this.gitRange = opts.gitRange
-  this.gitCommittish = opts.gitCommittish
-  this.gitSubdir = opts.gitSubdir
-  this.hosted = opts.hosted
-}
-
-Result.prototype.setName = function (name) {
-  const valid = validatePackageName(name)
-  if (!valid.validForOldPackages) {
-    throw invalidPackageName(name, valid, this.raw)
-  }
-
-  this.name = name
-  this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
-  // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
-  this.escapedName = name.replace('/', '%2f')
-  return this
-}
-
-Result.prototype.toString = function () {
-  const full = []
-  if (this.name != null && this.name !== '') {
-    full.push(this.name)
-  }
-  const spec = this.saveSpec || this.fetchSpec || this.rawSpec
-  if (spec != null && spec !== '') {
-    full.push(spec)
-  }
-  return full.length ? full.join('@') : this.raw
-}
-
-Result.prototype.toJSON = function () {
-  const result = Object.assign({}, this)
-  delete result.hosted
-  return result
-}
-
-function setGitCommittish (res, committish) {
-  if (!committish) {
-    res.gitCommittish = null
-    return res
-  }
-
-  // for each :: separated item:
-  for (const part of committish.split('::')) {
-    // if the item has no : the n it is a commit-ish
-    if (!part.includes(':')) {
-      if (res.gitRange) {
-        throw new Error('cannot override existing semver range with a committish')
-      }
-      if (res.gitCommittish) {
-        throw new Error('cannot override existing committish with a second committish')
-      }
-      res.gitCommittish = part
-      continue
-    }
-    // split on name:value
-    const [name, value] = part.split(':')
-    // if name is semver do semver lookup of ref or tag
-    if (name === 'semver') {
-      if (res.gitCommittish) {
-        throw new Error('cannot override existing committish with a semver range')
-      }
-      if (res.gitRange) {
-        throw new Error('cannot override existing semver range with a second semver range')
-      }
-      res.gitRange = decodeURIComponent(value)
-      continue
-    }
-    if (name === 'path') {
-      if (res.gitSubdir) {
-        throw new Error('cannot override existing path with a second path')
-      }
-      res.gitSubdir = `/${value}`
-      continue
-    }
-    log.warn('npm-package-arg', `ignoring unknown key "${name}"`)
-  }
-
-  return res
-}
-
-function fromFile (res, where) {
-  if (!where) {
-    where = process.cwd()
-  }
-  res.type = isFilename.test(res.rawSpec) ? 'file' : 'directory'
-  res.where = where
-
-  // always put the '/' on where when resolving urls, or else
-  // file:foo from /path/to/bar goes to /path/to/foo, when we want
-  // it to be /path/to/bar/foo
-
-  let specUrl
-  let resolvedUrl
-  const prefix = (!/^file:/.test(res.rawSpec) ? 'file:' : '')
-  const rawWithPrefix = prefix + res.rawSpec
-  let rawNoPrefix = rawWithPrefix.replace(/^file:/, '')
-  try {
-    resolvedUrl = new url.URL(rawWithPrefix, `file://${path.resolve(where)}/`)
-    specUrl = new url.URL(rawWithPrefix)
-  } catch (originalError) {
-    const er = new Error('Invalid file: URL, must comply with RFC 8909')
-    throw Object.assign(er, {
-      raw: res.rawSpec,
-      spec: res,
-      where,
-      originalError,
-    })
-  }
-
-  // environment switch for testing
-  if (process.env.NPM_PACKAGE_ARG_8909_STRICT !== '1') {
-    // XXX backwards compatibility lack of compliance with 8909
-    // Remove when we want a breaking change to come into RFC compliance.
-    if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
-      const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///')
-      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
-      specUrl = new url.URL(rawSpec)
-      rawNoPrefix = rawSpec.replace(/^file:/, '')
-    }
-    // turn file:/../foo into file:../foo
-    // for 1, 2 or 3 leading slashes since we attempted
-    // in the previous step to make it a file protocol url with a leading slash
-    if (/^\/{1,3}\.\.?(\/|$)/.test(rawNoPrefix)) {
-      const rawSpec = res.rawSpec.replace(/^file:\/{1,3}/, 'file:')
-      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
-      specUrl = new url.URL(rawSpec)
-      rawNoPrefix = rawSpec.replace(/^file:/, '')
-    }
-    // XXX end 8909 violation backwards compatibility section
-  }
-
-  // file:foo - relative url to ./foo
-  // file:/foo - absolute path /foo
-  // file:///foo - absolute path to /foo, no authority host
-  // file://localhost/foo - absolute path to /foo, on localhost
-  // file://foo - absolute path to / on foo host (error!)
-  if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
-    const msg = `Invalid file: URL, must be absolute if // present`
-    throw Object.assign(new Error(msg), {
-      raw: res.rawSpec,
-      parsed: resolvedUrl,
-    })
-  }
-
-  // turn /C:/blah into just C:/blah on windows
-  let specPath = decodeURIComponent(specUrl.pathname)
-  let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
-  if (isWindows) {
-    specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1')
-    resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1')
-  }
-
-  // replace ~ with homedir, but keep the ~ in the saveSpec
-  // otherwise, make it relative to where param
-  if (/^\/~(\/|$)/.test(specPath)) {
-    res.saveSpec = `file:${specPath.substr(1)}`
-    resolvedPath = path.resolve(homedir(), specPath.substr(3))
-  } else if (!path.isAbsolute(rawNoPrefix)) {
-    res.saveSpec = `file:${path.relative(where, resolvedPath)}`
-  } else {
-    res.saveSpec = `file:${path.resolve(resolvedPath)}`
-  }
-
-  res.fetchSpec = path.resolve(where, resolvedPath)
-  return res
-}
-
-function fromHostedGit (res, hosted) {
-  res.type = 'git'
-  res.hosted = hosted
-  res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
-  res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
-  return setGitCommittish(res, hosted.committish)
-}
-
-function unsupportedURLType (protocol, spec) {
-  const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
-  err.code = 'EUNSUPPORTEDPROTOCOL'
-  return err
-}
-
-function matchGitScp (spec) {
-  // git ssh specifiers are overloaded to also use scp-style git
-  // specifiers, so we have to parse those out and treat them special.
-  // They are NOT true URIs, so we can't hand them to `url.parse`.
-  //
-  // This regex looks for things that look like:
-  // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
-  //
-  // ...and various combinations. The username in the beginning is *required*.
-  const matched = spec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
-  return matched && !matched[1].match(/:[0-9]+\/?.*$/i) && {
-    fetchSpec: matched[1],
-    gitCommittish: matched[2] == null ? null : matched[2],
-  }
-}
-
-function fromURL (res) {
-  // eslint-disable-next-line node/no-deprecated-api
-  const urlparse = url.parse(res.rawSpec)
-  res.saveSpec = res.rawSpec
-  // check the protocol, and then see if it's git or not
-  switch (urlparse.protocol) {
-    case 'git:':
-    case 'git+http:':
-    case 'git+https:':
-    case 'git+rsync:':
-    case 'git+ftp:':
-    case 'git+file:':
-    case 'git+ssh:': {
-      res.type = 'git'
-      const match = urlparse.protocol === 'git+ssh:' ? matchGitScp(res.rawSpec)
-        : null
-      if (match) {
-        setGitCommittish(res, match.gitCommittish)
-        res.fetchSpec = match.fetchSpec
-      } else {
-        setGitCommittish(res, urlparse.hash != null ? urlparse.hash.slice(1) : '')
-        urlparse.protocol = urlparse.protocol.replace(/^git[+]/, '')
-        if (urlparse.protocol === 'file:' && /^git\+file:\/\/[a-z]:/i.test(res.rawSpec)) {
-          // keep the drive letter : on windows file paths
-          urlparse.host += ':'
-          urlparse.hostname += ':'
-        }
-        delete urlparse.hash
-        res.fetchSpec = url.format(urlparse)
-      }
-      break
-    }
-    case 'http:':
-    case 'https:':
-      res.type = 'remote'
-      res.fetchSpec = res.saveSpec
-      break
-
-    default:
-      throw unsupportedURLType(urlparse.protocol, res.rawSpec)
-  }
-
-  return res
-}
-
-function fromAlias (res, where) {
-  const subSpec = npa(res.rawSpec.substr(4), where)
-  if (subSpec.type === 'alias') {
-    throw new Error('nested aliases not supported')
-  }
-
-  if (!subSpec.registry) {
-    throw new Error('aliases only work for registry deps')
-  }
-
-  res.subSpec = subSpec
-  res.registry = true
-  res.type = 'alias'
-  res.saveSpec = null
-  res.fetchSpec = null
-  return res
-}
-
-function fromRegistry (res) {
-  res.registry = true
-  const spec = res.rawSpec.trim()
-  // no save spec for registry components as we save based on the fetched
-  // version, not on the argument so this can't compute that.
-  res.saveSpec = null
-  res.fetchSpec = spec
-  const version = semver.valid(spec, true)
-  const range = semver.validRange(spec, true)
-  if (version) {
-    res.type = 'version'
-  } else if (range) {
-    res.type = 'range'
-  } else {
-    if (encodeURIComponent(spec) !== spec) {
-      throw invalidTagName(spec, res.raw)
-    }
-    res.type = 'tag'
-  }
-  return res
-}
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json
deleted file mode 100644
index bb9e71b258a93..0000000000000
--- a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json
+++ /dev/null
@@ -1,59 +0,0 @@
-{
-  "name": "npm-package-arg",
-  "version": "10.1.0",
-  "description": "Parse the things that can be arguments to `npm install`",
-  "main": "./lib/npa.js",
-  "directories": {
-    "test": "test"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "dependencies": {
-    "hosted-git-info": "^6.0.0",
-    "proc-log": "^3.0.0",
-    "semver": "^7.3.5",
-    "validate-npm-package-name": "^5.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.10.0",
-    "tap": "^16.0.1"
-  },
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "npmclilint": "npmcli-lint",
-    "lint": "eslint \"**/*.js\"",
-    "lintfix": "npm run lint -- --fix",
-    "posttest": "npm run lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/npm-package-arg.git"
-  },
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/npm-package-arg/issues"
-  },
-  "homepage": "https://github.com/npm/npm-package-arg",
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "tap": {
-    "branches": 97,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.10.0"
-  }
-}
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md b/node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md
new file mode 100644
index 0000000000000..5fc208ff122e0
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md
@@ -0,0 +1,20 @@
+
+
+ISC License
+
+Copyright npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this
+software for any purpose with or without fee is hereby
+granted, provided that the above copyright notice and this
+permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
+WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
+EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js
new file mode 100644
index 0000000000000..870ce0d923cd0
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js
@@ -0,0 +1,145 @@
+'use strict'
+const fs = require('fs')
+const npa = require('npm-package-arg')
+const { URL } = require('url')
+
+// Find the longest registry key that is used for some kind of auth
+// in the options.
+const regKeyFromURI = (uri, opts) => {
+  const parsed = new URL(uri)
+  // try to find a config key indicating we have auth for this registry
+  // can be one of :_authToken, :_auth, :_password and :username, or
+  // :certfile and :keyfile
+  // We walk up the "path" until we're left with just //[:],
+  // stopping when we reach '//'.
+  let regKey = `//${parsed.host}${parsed.pathname}`
+  while (regKey.length > '//'.length) {
+    // got some auth for this URI
+    if (hasAuth(regKey, opts)) {
+      return regKey
+    }
+
+    // can be either //host/some/path/:_auth or //host/some/path:_auth
+    // walk up by removing EITHER what's after the slash OR the slash itself
+    regKey = regKey.replace(/([^/]+|\/)$/, '')
+  }
+}
+
+const hasAuth = (regKey, opts) => (
+  opts[`${regKey}:_authToken`] ||
+  opts[`${regKey}:_auth`] ||
+  opts[`${regKey}:username`] && opts[`${regKey}:_password`] ||
+  opts[`${regKey}:certfile`] && opts[`${regKey}:keyfile`]
+)
+
+const sameHost = (a, b) => {
+  const parsedA = new URL(a)
+  const parsedB = new URL(b)
+  return parsedA.host === parsedB.host
+}
+
+const getRegistry = opts => {
+  const { spec } = opts
+  const { scope: specScope, subSpec } = spec ? npa(spec) : {}
+  const subSpecScope = subSpec && subSpec.scope
+  const scope = subSpec ? subSpecScope : specScope
+  const scopeReg = scope && opts[`${scope}:registry`]
+  return scopeReg || opts.registry
+}
+
+const maybeReadFile = file => {
+  try {
+    return fs.readFileSync(file, 'utf8')
+  } catch (er) {
+    if (er.code !== 'ENOENT') {
+      throw er
+    }
+    return null
+  }
+}
+
+const getAuth = (uri, opts = {}) => {
+  const { forceAuth } = opts
+  if (!uri) {
+    throw new Error('URI is required')
+  }
+  const regKey = regKeyFromURI(uri, forceAuth || opts)
+
+  // we are only allowed to use what's in forceAuth if specified
+  if (forceAuth && !regKey) {
+    return new Auth({
+      scopeAuthKey: null,
+      token: forceAuth._authToken || forceAuth.token,
+      username: forceAuth.username,
+      password: forceAuth._password || forceAuth.password,
+      auth: forceAuth._auth || forceAuth.auth,
+      certfile: forceAuth.certfile,
+      keyfile: forceAuth.keyfile,
+    })
+  }
+
+  // no auth for this URI, but might have it for the registry
+  if (!regKey) {
+    const registry = getRegistry(opts)
+    if (registry && uri !== registry && sameHost(uri, registry)) {
+      return getAuth(registry, opts)
+    } else if (registry !== opts.registry) {
+      // If making a tarball request to a different base URI than the
+      // registry where we logged in, but the same auth SHOULD be sent
+      // to that artifact host, then we track where it was coming in from,
+      // and warn the user if we get a 4xx error on it.
+      const scopeAuthKey = regKeyFromURI(registry, opts)
+      return new Auth({ scopeAuthKey })
+    }
+  }
+
+  const {
+    [`${regKey}:_authToken`]: token,
+    [`${regKey}:username`]: username,
+    [`${regKey}:_password`]: password,
+    [`${regKey}:_auth`]: auth,
+    [`${regKey}:certfile`]: certfile,
+    [`${regKey}:keyfile`]: keyfile,
+  } = opts
+
+  return new Auth({
+    scopeAuthKey: null,
+    token,
+    auth,
+    username,
+    password,
+    certfile,
+    keyfile,
+  })
+}
+
+class Auth {
+  constructor ({ token, auth, username, password, scopeAuthKey, certfile, keyfile }) {
+    this.scopeAuthKey = scopeAuthKey
+    this.token = null
+    this.auth = null
+    this.isBasicAuth = false
+    this.cert = null
+    this.key = null
+    if (token) {
+      this.token = token
+    } else if (auth) {
+      this.auth = auth
+    } else if (username && password) {
+      const p = Buffer.from(password, 'base64').toString('utf8')
+      this.auth = Buffer.from(`${username}:${p}`, 'utf8').toString('base64')
+      this.isBasicAuth = true
+    }
+    // mTLS may be used in conjunction with another auth method above
+    if (certfile && keyfile) {
+      const cert = maybeReadFile(certfile, 'utf-8')
+      const key = maybeReadFile(keyfile, 'utf-8')
+      if (cert && key) {
+        this.cert = cert
+        this.key = key
+      }
+    }
+  }
+}
+
+module.exports = getAuth
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js
new file mode 100644
index 0000000000000..066ac3c32420f
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js
@@ -0,0 +1,100 @@
+'use strict'
+
+const errors = require('./errors.js')
+const { Response } = require('minipass-fetch')
+const defaultOpts = require('./default-opts.js')
+const log = require('proc-log')
+const cleanUrl = require('./clean-url.js')
+
+/* eslint-disable-next-line max-len */
+const moreInfoUrl = 'https://github.com/npm/cli/wiki/No-auth-for-URI,-but-auth-present-for-scoped-registry'
+const checkResponse =
+  async ({ method, uri, res, startTime, auth, opts }) => {
+    opts = { ...defaultOpts, ...opts }
+    if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) {
+      log.notice('', res.headers.get('npm-notice'))
+    }
+
+    if (res.status >= 400) {
+      logRequest(method, res, startTime)
+      if (auth && auth.scopeAuthKey && !auth.token && !auth.auth) {
+      // we didn't have auth for THIS request, but we do have auth for
+      // requests to the registry indicated by the spec's scope value.
+      // Warn the user.
+        log.warn('registry', `No auth for URI, but auth present for scoped registry.
+
+URI: ${uri}
+Scoped Registry Key: ${auth.scopeAuthKey}
+
+More info here: ${moreInfoUrl}`)
+      }
+      return checkErrors(method, res, startTime, opts)
+    } else {
+      res.body.on('end', () => logRequest(method, res, startTime, opts))
+      if (opts.ignoreBody) {
+        res.body.resume()
+        return new Response(null, res)
+      }
+      return res
+    }
+  }
+module.exports = checkResponse
+
+function logRequest (method, res, startTime) {
+  const elapsedTime = Date.now() - startTime
+  const attempt = res.headers.get('x-fetch-attempts')
+  const attemptStr = attempt && attempt > 1 ? ` attempt #${attempt}` : ''
+  const cacheStatus = res.headers.get('x-local-cache-status')
+  const cacheStr = cacheStatus ? ` (cache ${cacheStatus})` : ''
+  const urlStr = cleanUrl(res.url)
+
+  log.http(
+    'fetch',
+    `${method.toUpperCase()} ${res.status} ${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}`
+  )
+}
+
+function checkErrors (method, res, startTime, opts) {
+  return res.buffer()
+    .catch(() => null)
+    .then(body => {
+      let parsed = body
+      try {
+        parsed = JSON.parse(body.toString('utf8'))
+      } catch {
+        // ignore errors
+      }
+      if (res.status === 401 && res.headers.get('www-authenticate')) {
+        const auth = res.headers.get('www-authenticate')
+          .split(/,\s*/)
+          .map(s => s.toLowerCase())
+        if (auth.indexOf('ipaddress') !== -1) {
+          throw new errors.HttpErrorAuthIPAddress(
+            method, res, parsed, opts.spec
+          )
+        } else if (auth.indexOf('otp') !== -1) {
+          throw new errors.HttpErrorAuthOTP(
+            method, res, parsed, opts.spec
+          )
+        } else {
+          throw new errors.HttpErrorAuthUnknown(
+            method, res, parsed, opts.spec
+          )
+        }
+      } else if (
+        res.status === 401 &&
+        body != null &&
+        /one-time pass/.test(body.toString('utf8'))
+      ) {
+        // Heuristic for malformed OTP responses that don't include the
+        // www-authenticate header.
+        throw new errors.HttpErrorAuthOTP(
+          method, res, parsed, opts.spec
+        )
+      } else {
+        throw new errors.HttpErrorGeneral(
+          method, res, parsed, opts.spec
+        )
+      }
+    })
+}
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/clean-url.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/clean-url.js
new file mode 100644
index 0000000000000..0c2656b5653a0
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-registry-fetch/lib/clean-url.js
@@ -0,0 +1,27 @@
+const { URL } = require('url')
+
+const replace = '***'
+const tokenRegex = /\bnpm_[a-zA-Z0-9]{36}\b/g
+const guidRegex = /\b[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\b/g
+
+const cleanUrl = (str) => {
+  if (typeof str !== 'string' || !str) {
+    return str
+  }
+
+  try {
+    const url = new URL(str)
+    if (url.password) {
+      url.password = replace
+      str = url.toString()
+    }
+  } catch {
+    // ignore errors
+  }
+
+  return str
+    .replace(tokenRegex, `npm_${replace}`)
+    .replace(guidRegex, `npm_${replace}`)
+}
+
+module.exports = cleanUrl
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js
new file mode 100644
index 0000000000000..f0847f0b507e2
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js
@@ -0,0 +1,19 @@
+const pkg = require('../package.json')
+module.exports = {
+  maxSockets: 12,
+  method: 'GET',
+  registry: 'https://registry.npmjs.org/',
+  timeout: 5 * 60 * 1000, // 5 minutes
+  strictSSL: true,
+  noProxy: process.env.NOPROXY,
+  userAgent: `${pkg.name
+    }@${
+      pkg.version
+    }/node@${
+      process.version
+    }+${
+      process.arch
+    } (${
+      process.platform
+    })`,
+}
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js
new file mode 100644
index 0000000000000..cf5ddba6f300c
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js
@@ -0,0 +1,80 @@
+'use strict'
+
+const url = require('url')
+
+function packageName (href) {
+  try {
+    let basePath = new url.URL(href).pathname.slice(1)
+    if (!basePath.match(/^-/)) {
+      basePath = basePath.split('/')
+      var index = basePath.indexOf('_rewrite')
+      if (index === -1) {
+        index = basePath.length - 1
+      } else {
+        index++
+      }
+      return decodeURIComponent(basePath[index])
+    }
+  } catch (_) {
+    // this is ok
+  }
+}
+
+class HttpErrorBase extends Error {
+  constructor (method, res, body, spec) {
+    super()
+    this.name = this.constructor.name
+    this.headers = res.headers.raw()
+    this.statusCode = res.status
+    this.code = `E${res.status}`
+    this.method = method
+    this.uri = res.url
+    this.body = body
+    this.pkgid = spec ? spec.toString() : packageName(res.url)
+  }
+}
+module.exports.HttpErrorBase = HttpErrorBase
+
+class HttpErrorGeneral extends HttpErrorBase {
+  constructor (method, res, body, spec) {
+    super(method, res, body, spec)
+    this.message = `${res.status} ${res.statusText} - ${
+      this.method.toUpperCase()
+    } ${
+      this.spec || this.uri
+    }${
+      (body && body.error) ? ' - ' + body.error : ''
+    }`
+    Error.captureStackTrace(this, HttpErrorGeneral)
+  }
+}
+module.exports.HttpErrorGeneral = HttpErrorGeneral
+
+class HttpErrorAuthOTP extends HttpErrorBase {
+  constructor (method, res, body, spec) {
+    super(method, res, body, spec)
+    this.message = 'OTP required for authentication'
+    this.code = 'EOTP'
+    Error.captureStackTrace(this, HttpErrorAuthOTP)
+  }
+}
+module.exports.HttpErrorAuthOTP = HttpErrorAuthOTP
+
+class HttpErrorAuthIPAddress extends HttpErrorBase {
+  constructor (method, res, body, spec) {
+    super(method, res, body, spec)
+    this.message = 'Login is not allowed from your IP address'
+    this.code = 'EAUTHIP'
+    Error.captureStackTrace(this, HttpErrorAuthIPAddress)
+  }
+}
+module.exports.HttpErrorAuthIPAddress = HttpErrorAuthIPAddress
+
+class HttpErrorAuthUnknown extends HttpErrorBase {
+  constructor (method, res, body, spec) {
+    super(method, res, body, spec)
+    this.message = 'Unable to authenticate, need: ' + res.headers.get('www-authenticate')
+    Error.captureStackTrace(this, HttpErrorAuthUnknown)
+  }
+}
+module.exports.HttpErrorAuthUnknown = HttpErrorAuthUnknown
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js
new file mode 100644
index 0000000000000..23e349c5c5b96
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js
@@ -0,0 +1,247 @@
+'use strict'
+
+const { HttpErrorAuthOTP } = require('./errors.js')
+const checkResponse = require('./check-response.js')
+const getAuth = require('./auth.js')
+const fetch = require('make-fetch-happen')
+const JSONStream = require('minipass-json-stream')
+const npa = require('npm-package-arg')
+const qs = require('querystring')
+const url = require('url')
+const zlib = require('minizlib')
+const { Minipass } = require('minipass')
+
+const defaultOpts = require('./default-opts.js')
+
+// WhatWG URL throws if it's not fully resolved
+const urlIsValid = u => {
+  try {
+    return !!new url.URL(u)
+  } catch (_) {
+    return false
+  }
+}
+
+module.exports = regFetch
+function regFetch (uri, /* istanbul ignore next */ opts_ = {}) {
+  const opts = {
+    ...defaultOpts,
+    ...opts_,
+  }
+
+  // if we did not get a fully qualified URI, then we look at the registry
+  // config or relevant scope to resolve it.
+  const uriValid = urlIsValid(uri)
+  let registry = opts.registry || defaultOpts.registry
+  if (!uriValid) {
+    registry = opts.registry = (
+      (opts.spec && pickRegistry(opts.spec, opts)) ||
+      opts.registry ||
+      registry
+    )
+    uri = `${
+      registry.trim().replace(/\/?$/g, '')
+    }/${
+      uri.trim().replace(/^\//, '')
+    }`
+    // asserts that this is now valid
+    new url.URL(uri)
+  }
+
+  const method = opts.method || 'GET'
+
+  // through that takes into account the scope, the prefix of `uri`, etc
+  const startTime = Date.now()
+  const auth = getAuth(uri, opts)
+  const headers = getHeaders(uri, auth, opts)
+  let body = opts.body
+  const bodyIsStream = Minipass.isStream(body)
+  const bodyIsPromise = body &&
+    typeof body === 'object' &&
+    typeof body.then === 'function'
+
+  if (
+    body && !bodyIsStream && !bodyIsPromise && typeof body !== 'string' && !Buffer.isBuffer(body)
+  ) {
+    headers['content-type'] = headers['content-type'] || 'application/json'
+    body = JSON.stringify(body)
+  } else if (body && !headers['content-type']) {
+    headers['content-type'] = 'application/octet-stream'
+  }
+
+  if (opts.gzip) {
+    headers['content-encoding'] = 'gzip'
+    if (bodyIsStream) {
+      const gz = new zlib.Gzip()
+      body.on('error', /* istanbul ignore next: unlikely and hard to test */
+        err => gz.emit('error', err))
+      body = body.pipe(gz)
+    } else if (!bodyIsPromise) {
+      body = new zlib.Gzip().end(body).concat()
+    }
+  }
+
+  const parsed = new url.URL(uri)
+
+  if (opts.query) {
+    const q = typeof opts.query === 'string' ? qs.parse(opts.query)
+      : opts.query
+
+    Object.keys(q).forEach(key => {
+      if (q[key] !== undefined) {
+        parsed.searchParams.set(key, q[key])
+      }
+    })
+    uri = url.format(parsed)
+  }
+
+  if (parsed.searchParams.get('write') === 'true' && method === 'GET') {
+    // do not cache, because this GET is fetching a rev that will be
+    // used for a subsequent PUT or DELETE, so we need to conditionally
+    // update cache.
+    opts.offline = false
+    opts.preferOffline = false
+    opts.preferOnline = true
+  }
+
+  const doFetch = async fetchBody => {
+    const p = fetch(uri, {
+      agent: opts.agent,
+      algorithms: opts.algorithms,
+      body: fetchBody,
+      cache: getCacheMode(opts),
+      cachePath: opts.cache,
+      ca: opts.ca,
+      cert: auth.cert || opts.cert,
+      headers,
+      integrity: opts.integrity,
+      key: auth.key || opts.key,
+      localAddress: opts.localAddress,
+      maxSockets: opts.maxSockets,
+      memoize: opts.memoize,
+      method: method,
+      noProxy: opts.noProxy,
+      proxy: opts.httpsProxy || opts.proxy,
+      retry: opts.retry ? opts.retry : {
+        retries: opts.fetchRetries,
+        factor: opts.fetchRetryFactor,
+        minTimeout: opts.fetchRetryMintimeout,
+        maxTimeout: opts.fetchRetryMaxtimeout,
+      },
+      strictSSL: opts.strictSSL,
+      timeout: opts.timeout || 30 * 1000,
+    }).then(res => checkResponse({
+      method,
+      uri,
+      res,
+      registry,
+      startTime,
+      auth,
+      opts,
+    }))
+
+    if (typeof opts.otpPrompt === 'function') {
+      return p.catch(async er => {
+        if (er instanceof HttpErrorAuthOTP) {
+          let otp
+          // if otp fails to complete, we fail with that failure
+          try {
+            otp = await opts.otpPrompt()
+          } catch (_) {
+            // ignore this error
+          }
+          // if no otp provided, or otpPrompt errored, throw the original HTTP error
+          if (!otp) {
+            throw er
+          }
+          return regFetch(uri, { ...opts, otp })
+        }
+        throw er
+      })
+    } else {
+      return p
+    }
+  }
+
+  return Promise.resolve(body).then(doFetch)
+}
+
+module.exports.json = fetchJSON
+function fetchJSON (uri, opts) {
+  return regFetch(uri, opts).then(res => res.json())
+}
+
+module.exports.json.stream = fetchJSONStream
+function fetchJSONStream (uri, jsonPath,
+  /* istanbul ignore next */ opts_ = {}) {
+  const opts = { ...defaultOpts, ...opts_ }
+  const parser = JSONStream.parse(jsonPath, opts.mapJSON)
+  regFetch(uri, opts).then(res =>
+    res.body.on('error',
+      /* istanbul ignore next: unlikely and difficult to test */
+      er => parser.emit('error', er)).pipe(parser)
+  ).catch(er => parser.emit('error', er))
+  return parser
+}
+
+module.exports.pickRegistry = pickRegistry
+function pickRegistry (spec, opts = {}) {
+  spec = npa(spec)
+  let registry = spec.scope &&
+    opts[spec.scope.replace(/^@?/, '@') + ':registry']
+
+  if (!registry && opts.scope) {
+    registry = opts[opts.scope.replace(/^@?/, '@') + ':registry']
+  }
+
+  if (!registry) {
+    registry = opts.registry || defaultOpts.registry
+  }
+
+  return registry
+}
+
+function getCacheMode (opts) {
+  return opts.offline ? 'only-if-cached'
+    : opts.preferOffline ? 'force-cache'
+    : opts.preferOnline ? 'no-cache'
+    : 'default'
+}
+
+function getHeaders (uri, auth, opts) {
+  const headers = Object.assign({
+    'user-agent': opts.userAgent,
+  }, opts.headers || {})
+
+  if (opts.authType) {
+    headers['npm-auth-type'] = opts.authType
+  }
+
+  if (opts.scope) {
+    headers['npm-scope'] = opts.scope
+  }
+
+  if (opts.npmSession) {
+    headers['npm-session'] = opts.npmSession
+  }
+
+  if (opts.npmCommand) {
+    headers['npm-command'] = opts.npmCommand
+  }
+
+  // If a tarball is hosted on a different place than the manifest, only send
+  // credentials on `alwaysAuth`
+  if (auth.token) {
+    headers.authorization = `Bearer ${auth.token}`
+  } else if (auth.auth) {
+    headers.authorization = `Basic ${auth.auth}`
+  }
+
+  if (opts.otp) {
+    headers['npm-otp'] = opts.otp
+  }
+
+  return headers
+}
+
+module.exports.cleanUrl = require('./clean-url.js')
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/package.json b/node_modules/pacote/node_modules/npm-registry-fetch/package.json
similarity index 50%
rename from node_modules/pacote/node_modules/npm-pick-manifest/package.json
rename to node_modules/pacote/node_modules/npm-registry-fetch/package.json
index feff81f5b2fee..2afadf939743b 100644
--- a/node_modules/pacote/node_modules/npm-pick-manifest/package.json
+++ b/node_modules/pacote/node_modules/npm-registry-fetch/package.json
@@ -1,57 +1,73 @@
 {
-  "name": "npm-pick-manifest",
-  "version": "8.0.2",
-  "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.",
-  "main": "./lib",
+  "name": "npm-registry-fetch",
+  "version": "16.0.0",
+  "description": "Fetch-based http client for use with npm registry APIs",
+  "main": "lib",
   "files": [
     "bin/",
     "lib/"
   ],
   "scripts": {
-    "coverage": "tap",
+    "eslint": "eslint",
     "lint": "eslint \"**/*.js\"",
+    "lintfix": "npm run lint -- --fix",
     "test": "tap",
     "posttest": "npm run lint",
+    "npmclilint": "npmcli-lint",
+    "postsnap": "npm run lintfix --",
     "postlint": "template-oss-check",
-    "lintfix": "npm run lint -- --fix",
     "snap": "tap",
     "template-oss-apply": "template-oss-apply --force"
   },
   "repository": {
     "type": "git",
-    "url": "https://github.com/npm/npm-pick-manifest.git"
+    "url": "https://github.com/npm/npm-registry-fetch.git"
   },
   "keywords": [
     "npm",
-    "semver",
-    "package manager"
+    "registry",
+    "fetch"
   ],
   "author": "GitHub Inc.",
   "license": "ISC",
   "dependencies": {
-    "npm-install-checks": "^6.0.0",
-    "npm-normalize-package-bin": "^3.0.0",
-    "npm-package-arg": "^10.0.0",
-    "semver": "^7.3.5"
+    "make-fetch-happen": "^13.0.0",
+    "minipass": "^7.0.2",
+    "minipass-fetch": "^3.0.0",
+    "minipass-json-stream": "^1.0.1",
+    "minizlib": "^2.1.2",
+    "npm-package-arg": "^11.0.0",
+    "proc-log": "^3.0.0"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/template-oss": "4.18.0",
+    "cacache": "^18.0.0",
+    "nock": "^13.2.4",
+    "require-inject": "^1.4.4",
+    "ssri": "^10.0.0",
     "tap": "^16.0.1"
   },
   "tap": {
     "check-coverage": true,
+    "test-ignore": "test[\\\\/](util|cache)[\\\\/]",
     "nyc-arg": [
       "--exclude",
       "tap-snapshots/**"
     ]
   },
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "version": "4.18.0",
-    "publish": true
+    "publish": "true",
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ]
   }
 }
diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json
index 987cbccded90b..62864ae4500a2 100644
--- a/node_modules/pacote/package.json
+++ b/node_modules/pacote/package.json
@@ -1,6 +1,6 @@
 {
   "name": "pacote",
-  "version": "17.0.0",
+  "version": "17.0.1",
   "description": "JavaScript package downloader",
   "author": "GitHub Inc.",
   "bin": {
@@ -53,8 +53,8 @@
     "minipass": "^7.0.2",
     "npm-package-arg": "^11.0.0",
     "npm-packlist": "^7.0.0",
-    "npm-pick-manifest": "^8.0.0",
-    "npm-registry-fetch": "^15.0.0",
+    "npm-pick-manifest": "^9.0.0",
+    "npm-registry-fetch": "^16.0.0",
     "proc-log": "^3.0.0",
     "promise-retry": "^2.0.1",
     "read-package-json": "^7.0.0",
diff --git a/package-lock.json b/package-lock.json
index 77480935db1e3..0961d74d56a4e 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -138,7 +138,7 @@
         "npm-user-validate": "^2.0.0",
         "npmlog": "^7.0.1",
         "p-map": "^4.0.0",
-        "pacote": "^17.0.0",
+        "pacote": "^17.0.1",
         "parse-conflict-json": "^3.0.1",
         "proc-log": "^3.0.0",
         "qrcode-terminal": "^0.12.0",
@@ -230,7 +230,7 @@
         "json-stringify-safe": "^5.0.1",
         "nock": "^13.3.0",
         "npm-package-arg": "^11.0.0",
-        "pacote": "^17.0.0",
+        "pacote": "^17.0.1",
         "tap": "^16.3.4"
       },
       "engines": {
@@ -10474,9 +10474,9 @@
       }
     },
     "node_modules/pacote": {
-      "version": "17.0.0",
-      "resolved": "https://registry.npmjs.org/pacote/-/pacote-17.0.0.tgz",
-      "integrity": "sha512-ho3CUJW0Eh/z6qus9YfPE7lbLoIr97T9KkvrcWcqmykNMuvF1bGL2IXJ0U+hoe7rAamLCX6CXl6xp0aPvyoPag==",
+      "version": "17.0.1",
+      "resolved": "https://registry.npmjs.org/pacote/-/pacote-17.0.1.tgz",
+      "integrity": "sha512-rZzq8E6l+rputgZnhDd/t9rpp47oVnz4SPaKKBRPb7zmrM/QJLub7iJGge4UglWfpxeRynQLUI2Tj2MYaiRqCg==",
       "inBundle": true,
       "dependencies": {
         "@npmcli/git": "^5.0.0",
@@ -10488,8 +10488,8 @@
         "minipass": "^7.0.2",
         "npm-package-arg": "^11.0.0",
         "npm-packlist": "^7.0.0",
-        "npm-pick-manifest": "^8.0.0",
-        "npm-registry-fetch": "^15.0.0",
+        "npm-pick-manifest": "^9.0.0",
+        "npm-registry-fetch": "^16.0.0",
         "proc-log": "^3.0.0",
         "promise-retry": "^2.0.1",
         "read-package-json": "^7.0.0",
@@ -10505,55 +10505,22 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/pacote/node_modules/lru-cache": {
-      "version": "7.18.3",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
-      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
-      "inBundle": true,
-      "engines": {
-        "node": ">=12"
-      }
-    },
-    "node_modules/pacote/node_modules/npm-pick-manifest": {
-      "version": "8.0.2",
-      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-8.0.2.tgz",
-      "integrity": "sha512-1dKY+86/AIiq1tkKVD3l0WI+Gd3vkknVGAggsFeBkTvbhMQ1OND/LKkYv4JtXPKUJ8bOTCyLiqEg2P6QNdK+Gg==",
+    "node_modules/pacote/node_modules/npm-registry-fetch": {
+      "version": "16.0.0",
+      "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-16.0.0.tgz",
+      "integrity": "sha512-JFCpAPUpvpwfSydv99u85yhP68rNIxSFmDpNbNnRWKSe3gpjHnWL8v320gATwRzjtgmZ9Jfe37+ZPOLZPwz6BQ==",
       "inBundle": true,
       "dependencies": {
-        "npm-install-checks": "^6.0.0",
-        "npm-normalize-package-bin": "^3.0.0",
-        "npm-package-arg": "^10.0.0",
-        "semver": "^7.3.5"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info": {
-      "version": "6.1.1",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
-      "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
-      "inBundle": true,
-      "dependencies": {
-        "lru-cache": "^7.5.1"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg": {
-      "version": "10.1.0",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
-      "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==",
-      "inBundle": true,
-      "dependencies": {
-        "hosted-git-info": "^6.0.0",
-        "proc-log": "^3.0.0",
-        "semver": "^7.3.5",
-        "validate-npm-package-name": "^5.0.0"
+        "make-fetch-happen": "^13.0.0",
+        "minipass": "^7.0.2",
+        "minipass-fetch": "^3.0.0",
+        "minipass-json-stream": "^1.0.1",
+        "minizlib": "^2.1.2",
+        "npm-package-arg": "^11.0.0",
+        "proc-log": "^3.0.0"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "node_modules/parent-module": {
@@ -16152,7 +16119,7 @@
         "npm-pick-manifest": "^9.0.0",
         "npm-registry-fetch": "^15.0.0",
         "npmlog": "^7.0.1",
-        "pacote": "^17.0.0",
+        "pacote": "^17.0.1",
         "parse-conflict-json": "^3.0.0",
         "proc-log": "^3.0.0",
         "promise-all-reject-late": "^1.0.0",
@@ -16233,7 +16200,7 @@
         "diff": "^5.1.0",
         "minimatch": "^9.0.0",
         "npm-package-arg": "^11.0.0",
-        "pacote": "^17.0.0",
+        "pacote": "^17.0.1",
         "tar": "^6.1.13"
       },
       "devDependencies": {
@@ -16254,7 +16221,7 @@
         "ci-info": "^3.7.1",
         "npm-package-arg": "^11.0.0",
         "npmlog": "^7.0.1",
-        "pacote": "^17.0.0",
+        "pacote": "^17.0.1",
         "proc-log": "^3.0.0",
         "read": "^2.0.0",
         "read-package-json-fast": "^3.0.2",
@@ -16332,7 +16299,7 @@
         "@npmcli/arborist": "^6.3.0",
         "@npmcli/run-script": "^6.0.0",
         "npm-package-arg": "^11.0.0",
-        "pacote": "^17.0.0"
+        "pacote": "^17.0.1"
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^4.0.0",
diff --git a/package.json b/package.json
index 5c3cedfa0c64a..f8a14c77a1663 100644
--- a/package.json
+++ b/package.json
@@ -103,7 +103,7 @@
     "npm-user-validate": "^2.0.0",
     "npmlog": "^7.0.1",
     "p-map": "^4.0.0",
-    "pacote": "^17.0.0",
+    "pacote": "^17.0.1",
     "parse-conflict-json": "^3.0.1",
     "proc-log": "^3.0.0",
     "qrcode-terminal": "^0.12.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 0c7f52344ed4b..86605fcfd6fbc 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -26,7 +26,7 @@
     "npm-pick-manifest": "^9.0.0",
     "npm-registry-fetch": "^15.0.0",
     "npmlog": "^7.0.1",
-    "pacote": "^17.0.0",
+    "pacote": "^17.0.1",
     "parse-conflict-json": "^3.0.0",
     "proc-log": "^3.0.0",
     "promise-all-reject-late": "^1.0.0",
diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json
index 723fe876c5459..8f19809199c8c 100644
--- a/workspaces/libnpmdiff/package.json
+++ b/workspaces/libnpmdiff/package.json
@@ -53,7 +53,7 @@
     "diff": "^5.1.0",
     "minimatch": "^9.0.0",
     "npm-package-arg": "^11.0.0",
-    "pacote": "^17.0.0",
+    "pacote": "^17.0.1",
     "tar": "^6.1.13"
   },
   "templateOSS": {
diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json
index e749a8a0b4e6a..9a35feb28f0dd 100644
--- a/workspaces/libnpmexec/package.json
+++ b/workspaces/libnpmexec/package.json
@@ -64,7 +64,7 @@
     "ci-info": "^3.7.1",
     "npm-package-arg": "^11.0.0",
     "npmlog": "^7.0.1",
-    "pacote": "^17.0.0",
+    "pacote": "^17.0.1",
     "proc-log": "^3.0.0",
     "read": "^2.0.0",
     "read-package-json-fast": "^3.0.2",
diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json
index fa252913f865f..f461bf76c783b 100644
--- a/workspaces/libnpmpack/package.json
+++ b/workspaces/libnpmpack/package.json
@@ -39,7 +39,7 @@
     "@npmcli/arborist": "^6.3.0",
     "@npmcli/run-script": "^6.0.0",
     "npm-package-arg": "^11.0.0",
-    "pacote": "^17.0.0"
+    "pacote": "^17.0.1"
   },
   "engines": {
     "node": "^16.14.0 || >=18.0.0"

From 228b224cd328805e45e6e733429c5a5573b4c745 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Tue, 15 Aug 2023 15:40:34 -0700
Subject: [PATCH 46/68] deps: npm-registry-fetch@16.0.0

---
 node_modules/.gitignore                       |  24 ++---
 .../node_modules/@npmcli/agent/lib/dns.js     |   0
 .../node_modules/@npmcli/agent/lib/errors.js  |   0
 .../node_modules/@npmcli/agent/lib/http.js    |   0
 .../node_modules/@npmcli/agent/lib/https.js   |   0
 .../node_modules/@npmcli/agent/lib/index.js   |   0
 .../@npmcli/agent/lib/proxy/http.js           |   0
 .../@npmcli/agent/lib/proxy/index.js          |   0
 .../@npmcli/agent/lib/proxy/null.js           |   0
 .../@npmcli/agent/lib/proxy/socks.js          |   0
 .../node_modules/@npmcli/agent/lib/util.js    |   0
 .../node_modules/@npmcli/agent/package.json   |   0
 .../node_modules/cacache/LICENSE.md           |   0
 .../node_modules/cacache/lib/content/path.js  |   0
 .../node_modules/cacache/lib/content/read.js  |   0
 .../node_modules/cacache/lib/content/rm.js    |   0
 .../node_modules/cacache/lib/content/write.js |   0
 .../node_modules/cacache/lib/entry-index.js   |   0
 .../node_modules/cacache/lib/get.js           |   0
 .../node_modules/cacache/lib/index.js         |   0
 .../node_modules/cacache/lib/memoization.js   |   0
 .../node_modules/cacache/lib/put.js           |   0
 .../node_modules/cacache/lib/rm.js            |   0
 .../node_modules/cacache/lib/util/glob.js     |   0
 .../cacache/lib/util/hash-to-segments.js      |   0
 .../node_modules/cacache/lib/util/tmp.js      |   0
 .../node_modules/cacache/lib/verify.js        |   0
 .../node_modules/cacache/package.json         |   0
 .../node_modules/hosted-git-info/LICENSE      |   0
 .../hosted-git-info/lib/from-url.js           |   0
 .../node_modules/hosted-git-info/lib/hosts.js |   0
 .../node_modules/hosted-git-info/lib/index.js |   0
 .../hosted-git-info/lib/parse-url.js          |   0
 .../node_modules/hosted-git-info/package.json |   0
 .../node_modules/lru-cache/LICENSE            |   0
 .../node_modules/lru-cache/index.js           |   0
 .../node_modules/lru-cache/index.mjs          |   0
 .../node_modules/lru-cache/package.json       |   0
 .../node_modules/make-fetch-happen/LICENSE    |   0
 .../make-fetch-happen/lib/cache/entry.js      |   0
 .../make-fetch-happen/lib/cache/errors.js     |   0
 .../make-fetch-happen/lib/cache/index.js      |   0
 .../make-fetch-happen/lib/cache/key.js        |   0
 .../make-fetch-happen/lib/cache/policy.js     |   0
 .../make-fetch-happen/lib/fetch.js            |   0
 .../make-fetch-happen/lib/index.js            |   0
 .../make-fetch-happen/lib/options.js          |   0
 .../make-fetch-happen/lib/pipeline.js         |   0
 .../make-fetch-happen/lib/remote.js           |   0
 .../make-fetch-happen/package.json            |   0
 .../node_modules/npm-package-arg/LICENSE      |   0
 .../node_modules/npm-package-arg/lib/npa.js   |   0
 .../node_modules/npm-package-arg/package.json |   0
 .../npm-registry-fetch/LICENSE.md             |   0
 .../npm-registry-fetch/lib/auth.js            |   0
 .../npm-registry-fetch/lib/check-response.js  |   0
 .../npm-registry-fetch/lib/clean-url.js       |   0
 .../npm-registry-fetch/lib/default-opts.js    |   0
 .../npm-registry-fetch/lib/errors.js          |   0
 .../npm-registry-fetch/lib/index.js           |   0
 .../npm-registry-fetch/package.json           |  18 ++--
 node_modules/npm-registry-fetch/package.json  |  18 ++--
 package-lock.json                             | 100 +++++++++---------
 package.json                                  |   2 +-
 workspaces/arborist/package.json              |   2 +-
 workspaces/libnpmaccess/package.json          |   2 +-
 workspaces/libnpmhook/package.json            |   2 +-
 workspaces/libnpmorg/package.json             |   2 +-
 workspaces/libnpmpublish/package.json         |   2 +-
 workspaces/libnpmsearch/package.json          |   2 +-
 workspaces/libnpmteam/package.json            |   2 +-
 71 files changed, 87 insertions(+), 89 deletions(-)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/@npmcli/agent/lib/dns.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/@npmcli/agent/lib/errors.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/@npmcli/agent/lib/http.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/@npmcli/agent/lib/https.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/@npmcli/agent/lib/index.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/@npmcli/agent/lib/proxy/http.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/@npmcli/agent/lib/proxy/index.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/@npmcli/agent/lib/proxy/null.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/@npmcli/agent/lib/proxy/socks.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/@npmcli/agent/lib/util.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/@npmcli/agent/package.json (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/cacache/LICENSE.md (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/cacache/lib/content/path.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/cacache/lib/content/read.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/cacache/lib/content/rm.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/cacache/lib/content/write.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/cacache/lib/entry-index.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/cacache/lib/get.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/cacache/lib/index.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/cacache/lib/memoization.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/cacache/lib/put.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/cacache/lib/rm.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/cacache/lib/util/glob.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/cacache/lib/util/hash-to-segments.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/cacache/lib/util/tmp.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/cacache/lib/verify.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/cacache/package.json (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/hosted-git-info/LICENSE (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/hosted-git-info/lib/from-url.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/hosted-git-info/lib/hosts.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/hosted-git-info/lib/index.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/hosted-git-info/lib/parse-url.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/hosted-git-info/package.json (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/lru-cache/LICENSE (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/lru-cache/index.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/lru-cache/index.mjs (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/lru-cache/package.json (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/make-fetch-happen/LICENSE (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/make-fetch-happen/lib/cache/entry.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/make-fetch-happen/lib/cache/errors.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/make-fetch-happen/lib/cache/index.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/make-fetch-happen/lib/cache/key.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/make-fetch-happen/lib/cache/policy.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/make-fetch-happen/lib/fetch.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/make-fetch-happen/lib/index.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/make-fetch-happen/lib/options.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/make-fetch-happen/lib/pipeline.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/make-fetch-happen/lib/remote.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/make-fetch-happen/package.json (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/npm-package-arg/LICENSE (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/npm-package-arg/lib/npa.js (100%)
 rename node_modules/{npm-registry-fetch => npm-profile}/node_modules/npm-package-arg/package.json (100%)
 rename node_modules/{pacote => npm-profile}/node_modules/npm-registry-fetch/LICENSE.md (100%)
 rename node_modules/{pacote => npm-profile}/node_modules/npm-registry-fetch/lib/auth.js (100%)
 rename node_modules/{pacote => npm-profile}/node_modules/npm-registry-fetch/lib/check-response.js (100%)
 rename node_modules/{pacote => npm-profile}/node_modules/npm-registry-fetch/lib/clean-url.js (100%)
 rename node_modules/{pacote => npm-profile}/node_modules/npm-registry-fetch/lib/default-opts.js (100%)
 rename node_modules/{pacote => npm-profile}/node_modules/npm-registry-fetch/lib/errors.js (100%)
 rename node_modules/{pacote => npm-profile}/node_modules/npm-registry-fetch/lib/index.js (100%)
 rename node_modules/{pacote => npm-profile}/node_modules/npm-registry-fetch/package.json (88%)

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index aa04f66c550f2..19178b9efa92a 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -203,25 +203,23 @@
 !/npm-packlist
 !/npm-pick-manifest
 !/npm-profile
+!/npm-profile/node_modules/
+/npm-profile/node_modules/*
+!/npm-profile/node_modules/@npmcli/
+/npm-profile/node_modules/@npmcli/*
+!/npm-profile/node_modules/@npmcli/agent
+!/npm-profile/node_modules/cacache
+!/npm-profile/node_modules/hosted-git-info
+!/npm-profile/node_modules/lru-cache
+!/npm-profile/node_modules/make-fetch-happen
+!/npm-profile/node_modules/npm-package-arg
+!/npm-profile/node_modules/npm-registry-fetch
 !/npm-registry-fetch
-!/npm-registry-fetch/node_modules/
-/npm-registry-fetch/node_modules/*
-!/npm-registry-fetch/node_modules/@npmcli/
-/npm-registry-fetch/node_modules/@npmcli/*
-!/npm-registry-fetch/node_modules/@npmcli/agent
-!/npm-registry-fetch/node_modules/cacache
-!/npm-registry-fetch/node_modules/hosted-git-info
-!/npm-registry-fetch/node_modules/lru-cache
-!/npm-registry-fetch/node_modules/make-fetch-happen
-!/npm-registry-fetch/node_modules/npm-package-arg
 !/npm-user-validate
 !/npmlog
 !/once
 !/p-map
 !/pacote
-!/pacote/node_modules/
-/pacote/node_modules/*
-!/pacote/node_modules/npm-registry-fetch
 !/parse-conflict-json
 !/path-is-absolute
 !/path-key
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/dns.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/dns.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/dns.js
rename to node_modules/npm-profile/node_modules/@npmcli/agent/lib/dns.js
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/errors.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/errors.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/errors.js
rename to node_modules/npm-profile/node_modules/@npmcli/agent/lib/errors.js
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/http.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/http.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/http.js
rename to node_modules/npm-profile/node_modules/@npmcli/agent/lib/http.js
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/https.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/https.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/https.js
rename to node_modules/npm-profile/node_modules/@npmcli/agent/lib/https.js
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/index.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/index.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/index.js
rename to node_modules/npm-profile/node_modules/@npmcli/agent/lib/index.js
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/http.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/http.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/http.js
rename to node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/http.js
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/index.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/index.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/index.js
rename to node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/index.js
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/null.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/null.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/null.js
rename to node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/null.js
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/socks.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/socks.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/proxy/socks.js
rename to node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/socks.js
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/util.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/util.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/@npmcli/agent/lib/util.js
rename to node_modules/npm-profile/node_modules/@npmcli/agent/lib/util.js
diff --git a/node_modules/npm-registry-fetch/node_modules/@npmcli/agent/package.json b/node_modules/npm-profile/node_modules/@npmcli/agent/package.json
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/@npmcli/agent/package.json
rename to node_modules/npm-profile/node_modules/@npmcli/agent/package.json
diff --git a/node_modules/npm-registry-fetch/node_modules/cacache/LICENSE.md b/node_modules/npm-profile/node_modules/cacache/LICENSE.md
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/cacache/LICENSE.md
rename to node_modules/npm-profile/node_modules/cacache/LICENSE.md
diff --git a/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/path.js b/node_modules/npm-profile/node_modules/cacache/lib/content/path.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/cacache/lib/content/path.js
rename to node_modules/npm-profile/node_modules/cacache/lib/content/path.js
diff --git a/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/read.js b/node_modules/npm-profile/node_modules/cacache/lib/content/read.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/cacache/lib/content/read.js
rename to node_modules/npm-profile/node_modules/cacache/lib/content/read.js
diff --git a/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/rm.js b/node_modules/npm-profile/node_modules/cacache/lib/content/rm.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/cacache/lib/content/rm.js
rename to node_modules/npm-profile/node_modules/cacache/lib/content/rm.js
diff --git a/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/write.js b/node_modules/npm-profile/node_modules/cacache/lib/content/write.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/cacache/lib/content/write.js
rename to node_modules/npm-profile/node_modules/cacache/lib/content/write.js
diff --git a/node_modules/npm-registry-fetch/node_modules/cacache/lib/entry-index.js b/node_modules/npm-profile/node_modules/cacache/lib/entry-index.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/cacache/lib/entry-index.js
rename to node_modules/npm-profile/node_modules/cacache/lib/entry-index.js
diff --git a/node_modules/npm-registry-fetch/node_modules/cacache/lib/get.js b/node_modules/npm-profile/node_modules/cacache/lib/get.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/cacache/lib/get.js
rename to node_modules/npm-profile/node_modules/cacache/lib/get.js
diff --git a/node_modules/npm-registry-fetch/node_modules/cacache/lib/index.js b/node_modules/npm-profile/node_modules/cacache/lib/index.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/cacache/lib/index.js
rename to node_modules/npm-profile/node_modules/cacache/lib/index.js
diff --git a/node_modules/npm-registry-fetch/node_modules/cacache/lib/memoization.js b/node_modules/npm-profile/node_modules/cacache/lib/memoization.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/cacache/lib/memoization.js
rename to node_modules/npm-profile/node_modules/cacache/lib/memoization.js
diff --git a/node_modules/npm-registry-fetch/node_modules/cacache/lib/put.js b/node_modules/npm-profile/node_modules/cacache/lib/put.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/cacache/lib/put.js
rename to node_modules/npm-profile/node_modules/cacache/lib/put.js
diff --git a/node_modules/npm-registry-fetch/node_modules/cacache/lib/rm.js b/node_modules/npm-profile/node_modules/cacache/lib/rm.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/cacache/lib/rm.js
rename to node_modules/npm-profile/node_modules/cacache/lib/rm.js
diff --git a/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/glob.js b/node_modules/npm-profile/node_modules/cacache/lib/util/glob.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/cacache/lib/util/glob.js
rename to node_modules/npm-profile/node_modules/cacache/lib/util/glob.js
diff --git a/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/npm-profile/node_modules/cacache/lib/util/hash-to-segments.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/cacache/lib/util/hash-to-segments.js
rename to node_modules/npm-profile/node_modules/cacache/lib/util/hash-to-segments.js
diff --git a/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/tmp.js b/node_modules/npm-profile/node_modules/cacache/lib/util/tmp.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/cacache/lib/util/tmp.js
rename to node_modules/npm-profile/node_modules/cacache/lib/util/tmp.js
diff --git a/node_modules/npm-registry-fetch/node_modules/cacache/lib/verify.js b/node_modules/npm-profile/node_modules/cacache/lib/verify.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/cacache/lib/verify.js
rename to node_modules/npm-profile/node_modules/cacache/lib/verify.js
diff --git a/node_modules/npm-registry-fetch/node_modules/cacache/package.json b/node_modules/npm-profile/node_modules/cacache/package.json
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/cacache/package.json
rename to node_modules/npm-profile/node_modules/cacache/package.json
diff --git a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/LICENSE b/node_modules/npm-profile/node_modules/hosted-git-info/LICENSE
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/hosted-git-info/LICENSE
rename to node_modules/npm-profile/node_modules/hosted-git-info/LICENSE
diff --git a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/from-url.js b/node_modules/npm-profile/node_modules/hosted-git-info/lib/from-url.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/from-url.js
rename to node_modules/npm-profile/node_modules/hosted-git-info/lib/from-url.js
diff --git a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/hosts.js b/node_modules/npm-profile/node_modules/hosted-git-info/lib/hosts.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/hosts.js
rename to node_modules/npm-profile/node_modules/hosted-git-info/lib/hosts.js
diff --git a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/index.js b/node_modules/npm-profile/node_modules/hosted-git-info/lib/index.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/index.js
rename to node_modules/npm-profile/node_modules/hosted-git-info/lib/index.js
diff --git a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/npm-profile/node_modules/hosted-git-info/lib/parse-url.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/parse-url.js
rename to node_modules/npm-profile/node_modules/hosted-git-info/lib/parse-url.js
diff --git a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/package.json b/node_modules/npm-profile/node_modules/hosted-git-info/package.json
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/hosted-git-info/package.json
rename to node_modules/npm-profile/node_modules/hosted-git-info/package.json
diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/LICENSE b/node_modules/npm-profile/node_modules/lru-cache/LICENSE
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/lru-cache/LICENSE
rename to node_modules/npm-profile/node_modules/lru-cache/LICENSE
diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/index.js b/node_modules/npm-profile/node_modules/lru-cache/index.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/lru-cache/index.js
rename to node_modules/npm-profile/node_modules/lru-cache/index.js
diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/index.mjs b/node_modules/npm-profile/node_modules/lru-cache/index.mjs
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/lru-cache/index.mjs
rename to node_modules/npm-profile/node_modules/lru-cache/index.mjs
diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/package.json b/node_modules/npm-profile/node_modules/lru-cache/package.json
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/lru-cache/package.json
rename to node_modules/npm-profile/node_modules/lru-cache/package.json
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE b/node_modules/npm-profile/node_modules/make-fetch-happen/LICENSE
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE
rename to node_modules/npm-profile/node_modules/make-fetch-happen/LICENSE
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/entry.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/entry.js
rename to node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/entry.js
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/errors.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/errors.js
rename to node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/errors.js
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/index.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/index.js
rename to node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/index.js
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/key.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/key.js
rename to node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/key.js
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/policy.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/policy.js
rename to node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/policy.js
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/fetch.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/fetch.js
rename to node_modules/npm-profile/node_modules/make-fetch-happen/lib/fetch.js
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/index.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/index.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/index.js
rename to node_modules/npm-profile/node_modules/make-fetch-happen/lib/index.js
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/options.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/options.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/options.js
rename to node_modules/npm-profile/node_modules/make-fetch-happen/lib/options.js
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/pipeline.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/pipeline.js
rename to node_modules/npm-profile/node_modules/make-fetch-happen/lib/pipeline.js
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/remote.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/remote.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/remote.js
rename to node_modules/npm-profile/node_modules/make-fetch-happen/lib/remote.js
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json b/node_modules/npm-profile/node_modules/make-fetch-happen/package.json
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json
rename to node_modules/npm-profile/node_modules/make-fetch-happen/package.json
diff --git a/node_modules/npm-registry-fetch/node_modules/npm-package-arg/LICENSE b/node_modules/npm-profile/node_modules/npm-package-arg/LICENSE
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/npm-package-arg/LICENSE
rename to node_modules/npm-profile/node_modules/npm-package-arg/LICENSE
diff --git a/node_modules/npm-registry-fetch/node_modules/npm-package-arg/lib/npa.js b/node_modules/npm-profile/node_modules/npm-package-arg/lib/npa.js
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/npm-package-arg/lib/npa.js
rename to node_modules/npm-profile/node_modules/npm-package-arg/lib/npa.js
diff --git a/node_modules/npm-registry-fetch/node_modules/npm-package-arg/package.json b/node_modules/npm-profile/node_modules/npm-package-arg/package.json
similarity index 100%
rename from node_modules/npm-registry-fetch/node_modules/npm-package-arg/package.json
rename to node_modules/npm-profile/node_modules/npm-package-arg/package.json
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md b/node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md
similarity index 100%
rename from node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md
rename to node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js
similarity index 100%
rename from node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js
rename to node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js
similarity index 100%
rename from node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js
rename to node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/clean-url.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/clean-url.js
similarity index 100%
rename from node_modules/pacote/node_modules/npm-registry-fetch/lib/clean-url.js
rename to node_modules/npm-profile/node_modules/npm-registry-fetch/lib/clean-url.js
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js
similarity index 100%
rename from node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js
rename to node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js
similarity index 100%
rename from node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js
rename to node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js
rename to node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/package.json b/node_modules/npm-profile/node_modules/npm-registry-fetch/package.json
similarity index 88%
rename from node_modules/pacote/node_modules/npm-registry-fetch/package.json
rename to node_modules/npm-profile/node_modules/npm-registry-fetch/package.json
index 2afadf939743b..8832c8a2e95d3 100644
--- a/node_modules/pacote/node_modules/npm-registry-fetch/package.json
+++ b/node_modules/npm-profile/node_modules/npm-registry-fetch/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-registry-fetch",
-  "version": "16.0.0",
+  "version": "15.0.0",
   "description": "Fetch-based http client for use with npm registry APIs",
   "main": "lib",
   "files": [
@@ -31,18 +31,18 @@
   "author": "GitHub Inc.",
   "license": "ISC",
   "dependencies": {
-    "make-fetch-happen": "^13.0.0",
+    "make-fetch-happen": "^12.0.0",
     "minipass": "^7.0.2",
     "minipass-fetch": "^3.0.0",
     "minipass-json-stream": "^1.0.1",
     "minizlib": "^2.1.2",
-    "npm-package-arg": "^11.0.0",
+    "npm-package-arg": "^10.0.0",
     "proc-log": "^3.0.0"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/template-oss": "4.18.0",
-    "cacache": "^18.0.0",
+    "cacache": "^17.0.0",
     "nock": "^13.2.4",
     "require-inject": "^1.4.4",
     "ssri": "^10.0.0",
@@ -57,17 +57,17 @@
     ]
   },
   "engines": {
-    "node": "^16.14.0 || >=18.0.0"
+    "node": "^16.13.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.18.0",
-    "publish": "true",
     "ciVersions": [
-      "16.14.0",
+      "16.13.0",
       "16.x",
       "18.0.0",
       "18.x"
-    ]
+    ],
+    "version": "4.18.0",
+    "publish": "true"
   }
 }
diff --git a/node_modules/npm-registry-fetch/package.json b/node_modules/npm-registry-fetch/package.json
index 8832c8a2e95d3..2afadf939743b 100644
--- a/node_modules/npm-registry-fetch/package.json
+++ b/node_modules/npm-registry-fetch/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-registry-fetch",
-  "version": "15.0.0",
+  "version": "16.0.0",
   "description": "Fetch-based http client for use with npm registry APIs",
   "main": "lib",
   "files": [
@@ -31,18 +31,18 @@
   "author": "GitHub Inc.",
   "license": "ISC",
   "dependencies": {
-    "make-fetch-happen": "^12.0.0",
+    "make-fetch-happen": "^13.0.0",
     "minipass": "^7.0.2",
     "minipass-fetch": "^3.0.0",
     "minipass-json-stream": "^1.0.1",
     "minizlib": "^2.1.2",
-    "npm-package-arg": "^10.0.0",
+    "npm-package-arg": "^11.0.0",
     "proc-log": "^3.0.0"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/template-oss": "4.18.0",
-    "cacache": "^17.0.0",
+    "cacache": "^18.0.0",
     "nock": "^13.2.4",
     "require-inject": "^1.4.4",
     "ssri": "^10.0.0",
@@ -57,17 +57,17 @@
     ]
   },
   "engines": {
-    "node": "^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.18.0",
+    "publish": "true",
     "ciVersions": [
-      "16.13.0",
+      "16.14.0",
       "16.x",
       "18.0.0",
       "18.x"
-    ],
-    "version": "4.18.0",
-    "publish": "true"
+    ]
   }
 }
diff --git a/package-lock.json b/package-lock.json
index 0961d74d56a4e..5ebe3251dcd84 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -134,7 +134,7 @@
         "npm-package-arg": "^11.0.0",
         "npm-pick-manifest": "^9.0.0",
         "npm-profile": "^8.0.0",
-        "npm-registry-fetch": "^15.0.0",
+        "npm-registry-fetch": "^16.0.0",
         "npm-user-validate": "^2.0.0",
         "npmlog": "^7.0.1",
         "p-map": "^4.0.0",
@@ -9828,25 +9828,7 @@
         "node": "^16.13.0 || >=18.0.0"
       }
     },
-    "node_modules/npm-registry-fetch": {
-      "version": "15.0.0",
-      "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-15.0.0.tgz",
-      "integrity": "sha512-CMFzk0HMDQ3fmFZ4v62C05g6eBwoU3PxpzFf4QiE360vfmtKZJkj+iCpgLx+I4oJT6Kx8g67Coyk729Q27M2JQ==",
-      "inBundle": true,
-      "dependencies": {
-        "make-fetch-happen": "^12.0.0",
-        "minipass": "^7.0.2",
-        "minipass-fetch": "^3.0.0",
-        "minipass-json-stream": "^1.0.1",
-        "minizlib": "^2.1.2",
-        "npm-package-arg": "^10.0.0",
-        "proc-log": "^3.0.0"
-      },
-      "engines": {
-        "node": "^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/npm-registry-fetch/node_modules/@npmcli/agent": {
+    "node_modules/npm-profile/node_modules/@npmcli/agent": {
       "version": "1.1.0",
       "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-1.1.0.tgz",
       "integrity": "sha512-I9g/2XFOkflxm5IDrGSjCcR2d12Jmic0di9w/WpJBbzYuSXmfgoL+WwEV7zY/ajxzQr7o4vSkEJh6piyFLYtuQ==",
@@ -9859,7 +9841,7 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
-    "node_modules/npm-registry-fetch/node_modules/cacache": {
+    "node_modules/npm-profile/node_modules/cacache": {
       "version": "17.1.4",
       "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
       "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
@@ -9882,7 +9864,7 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
-    "node_modules/npm-registry-fetch/node_modules/hosted-git-info": {
+    "node_modules/npm-profile/node_modules/hosted-git-info": {
       "version": "6.1.1",
       "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
       "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
@@ -9894,7 +9876,7 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
-    "node_modules/npm-registry-fetch/node_modules/lru-cache": {
+    "node_modules/npm-profile/node_modules/lru-cache": {
       "version": "7.18.3",
       "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
       "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
@@ -9903,7 +9885,7 @@
         "node": ">=12"
       }
     },
-    "node_modules/npm-registry-fetch/node_modules/make-fetch-happen": {
+    "node_modules/npm-profile/node_modules/make-fetch-happen": {
       "version": "12.0.0",
       "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-12.0.0.tgz",
       "integrity": "sha512-xpuA2kA8Z66uGQjaSXd7rffqJOv60iYpP8X0TsZl3uwXlqxUVmHETImjM71JOPA694TlcX37GhlaCsl6z6fNVg==",
@@ -9925,7 +9907,7 @@
         "node": "^16.13.0 || >=18.0.0"
       }
     },
-    "node_modules/npm-registry-fetch/node_modules/npm-package-arg": {
+    "node_modules/npm-profile/node_modules/npm-package-arg": {
       "version": "10.1.0",
       "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
       "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==",
@@ -9940,6 +9922,42 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/npm-profile/node_modules/npm-registry-fetch": {
+      "version": "15.0.0",
+      "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-15.0.0.tgz",
+      "integrity": "sha512-CMFzk0HMDQ3fmFZ4v62C05g6eBwoU3PxpzFf4QiE360vfmtKZJkj+iCpgLx+I4oJT6Kx8g67Coyk729Q27M2JQ==",
+      "inBundle": true,
+      "dependencies": {
+        "make-fetch-happen": "^12.0.0",
+        "minipass": "^7.0.2",
+        "minipass-fetch": "^3.0.0",
+        "minipass-json-stream": "^1.0.1",
+        "minizlib": "^2.1.2",
+        "npm-package-arg": "^10.0.0",
+        "proc-log": "^3.0.0"
+      },
+      "engines": {
+        "node": "^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/npm-registry-fetch": {
+      "version": "16.0.0",
+      "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-16.0.0.tgz",
+      "integrity": "sha512-JFCpAPUpvpwfSydv99u85yhP68rNIxSFmDpNbNnRWKSe3gpjHnWL8v320gATwRzjtgmZ9Jfe37+ZPOLZPwz6BQ==",
+      "inBundle": true,
+      "dependencies": {
+        "make-fetch-happen": "^13.0.0",
+        "minipass": "^7.0.2",
+        "minipass-fetch": "^3.0.0",
+        "minipass-json-stream": "^1.0.1",
+        "minizlib": "^2.1.2",
+        "npm-package-arg": "^11.0.0",
+        "proc-log": "^3.0.0"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
     "node_modules/npm-run-path": {
       "version": "4.0.1",
       "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz",
@@ -10505,24 +10523,6 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/pacote/node_modules/npm-registry-fetch": {
-      "version": "16.0.0",
-      "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-16.0.0.tgz",
-      "integrity": "sha512-JFCpAPUpvpwfSydv99u85yhP68rNIxSFmDpNbNnRWKSe3gpjHnWL8v320gATwRzjtgmZ9Jfe37+ZPOLZPwz6BQ==",
-      "inBundle": true,
-      "dependencies": {
-        "make-fetch-happen": "^13.0.0",
-        "minipass": "^7.0.2",
-        "minipass-fetch": "^3.0.0",
-        "minipass-json-stream": "^1.0.1",
-        "minizlib": "^2.1.2",
-        "npm-package-arg": "^11.0.0",
-        "proc-log": "^3.0.0"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
     "node_modules/parent-module": {
       "version": "1.0.1",
       "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
@@ -16117,7 +16117,7 @@
         "npm-install-checks": "^6.2.0",
         "npm-package-arg": "^11.0.0",
         "npm-pick-manifest": "^9.0.0",
-        "npm-registry-fetch": "^15.0.0",
+        "npm-registry-fetch": "^16.0.0",
         "npmlog": "^7.0.1",
         "pacote": "^17.0.1",
         "parse-conflict-json": "^3.0.0",
@@ -16176,7 +16176,7 @@
       "license": "ISC",
       "dependencies": {
         "npm-package-arg": "^11.0.0",
-        "npm-registry-fetch": "^15.0.0"
+        "npm-registry-fetch": "^16.0.0"
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^4.0.0",
@@ -16262,7 +16262,7 @@
       "license": "ISC",
       "dependencies": {
         "aproba": "^2.0.0",
-        "npm-registry-fetch": "^15.0.0"
+        "npm-registry-fetch": "^16.0.0"
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^4.0.0",
@@ -16279,7 +16279,7 @@
       "license": "ISC",
       "dependencies": {
         "aproba": "^2.0.0",
-        "npm-registry-fetch": "^15.0.0"
+        "npm-registry-fetch": "^16.0.0"
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^4.0.0",
@@ -16319,7 +16319,7 @@
         "ci-info": "^3.6.1",
         "normalize-package-data": "^6.0.0",
         "npm-package-arg": "^11.0.0",
-        "npm-registry-fetch": "^15.0.0",
+        "npm-registry-fetch": "^16.0.0",
         "proc-log": "^3.0.0",
         "semver": "^7.3.7",
         "sigstore": "^1.4.0",
@@ -16341,7 +16341,7 @@
       "version": "6.0.2",
       "license": "ISC",
       "dependencies": {
-        "npm-registry-fetch": "^15.0.0"
+        "npm-registry-fetch": "^16.0.0"
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^4.0.0",
@@ -16358,7 +16358,7 @@
       "license": "ISC",
       "dependencies": {
         "aproba": "^2.0.0",
-        "npm-registry-fetch": "^15.0.0"
+        "npm-registry-fetch": "^16.0.0"
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^4.0.0",
diff --git a/package.json b/package.json
index f8a14c77a1663..8078dde05b6bb 100644
--- a/package.json
+++ b/package.json
@@ -99,7 +99,7 @@
     "npm-package-arg": "^11.0.0",
     "npm-pick-manifest": "^9.0.0",
     "npm-profile": "^8.0.0",
-    "npm-registry-fetch": "^15.0.0",
+    "npm-registry-fetch": "^16.0.0",
     "npm-user-validate": "^2.0.0",
     "npmlog": "^7.0.1",
     "p-map": "^4.0.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 86605fcfd6fbc..b3f740adeb018 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -24,7 +24,7 @@
     "npm-install-checks": "^6.2.0",
     "npm-package-arg": "^11.0.0",
     "npm-pick-manifest": "^9.0.0",
-    "npm-registry-fetch": "^15.0.0",
+    "npm-registry-fetch": "^16.0.0",
     "npmlog": "^7.0.1",
     "pacote": "^17.0.1",
     "parse-conflict-json": "^3.0.0",
diff --git a/workspaces/libnpmaccess/package.json b/workspaces/libnpmaccess/package.json
index 292bffe371ba8..87a0921c198bb 100644
--- a/workspaces/libnpmaccess/package.json
+++ b/workspaces/libnpmaccess/package.json
@@ -30,7 +30,7 @@
   "homepage": "https://npmjs.com/package/libnpmaccess",
   "dependencies": {
     "npm-package-arg": "^11.0.0",
-    "npm-registry-fetch": "^15.0.0"
+    "npm-registry-fetch": "^16.0.0"
   },
   "engines": {
     "node": "^16.14.0 || >=18.0.0"
diff --git a/workspaces/libnpmhook/package.json b/workspaces/libnpmhook/package.json
index c97047e644812..6ba8ae8cce833 100644
--- a/workspaces/libnpmhook/package.json
+++ b/workspaces/libnpmhook/package.json
@@ -31,7 +31,7 @@
   "license": "ISC",
   "dependencies": {
     "aproba": "^2.0.0",
-    "npm-registry-fetch": "^15.0.0"
+    "npm-registry-fetch": "^16.0.0"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
diff --git a/workspaces/libnpmorg/package.json b/workspaces/libnpmorg/package.json
index 584f113ed10c0..ae6d61c480859 100644
--- a/workspaces/libnpmorg/package.json
+++ b/workspaces/libnpmorg/package.json
@@ -42,7 +42,7 @@
   "homepage": "https://npmjs.com/package/libnpmorg",
   "dependencies": {
     "aproba": "^2.0.0",
-    "npm-registry-fetch": "^15.0.0"
+    "npm-registry-fetch": "^16.0.0"
   },
   "engines": {
     "node": "^16.14.0 || >=18.0.0"
diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json
index 583d8a86fb840..fb5daa47c8d93 100644
--- a/workspaces/libnpmpublish/package.json
+++ b/workspaces/libnpmpublish/package.json
@@ -41,7 +41,7 @@
     "ci-info": "^3.6.1",
     "normalize-package-data": "^6.0.0",
     "npm-package-arg": "^11.0.0",
-    "npm-registry-fetch": "^15.0.0",
+    "npm-registry-fetch": "^16.0.0",
     "proc-log": "^3.0.0",
     "semver": "^7.3.7",
     "sigstore": "^1.4.0",
diff --git a/workspaces/libnpmsearch/package.json b/workspaces/libnpmsearch/package.json
index ab0f73882b023..fc2df01ee15f5 100644
--- a/workspaces/libnpmsearch/package.json
+++ b/workspaces/libnpmsearch/package.json
@@ -38,7 +38,7 @@
   "bugs": "https://github.com/npm/libnpmsearch/issues",
   "homepage": "https://npmjs.com/package/libnpmsearch",
   "dependencies": {
-    "npm-registry-fetch": "^15.0.0"
+    "npm-registry-fetch": "^16.0.0"
   },
   "engines": {
     "node": "^16.14.0 || >=18.0.0"
diff --git a/workspaces/libnpmteam/package.json b/workspaces/libnpmteam/package.json
index 860c525afa4a5..38632f66bf4aa 100644
--- a/workspaces/libnpmteam/package.json
+++ b/workspaces/libnpmteam/package.json
@@ -32,7 +32,7 @@
   "homepage": "https://npmjs.com/package/libnpmteam",
   "dependencies": {
     "aproba": "^2.0.0",
-    "npm-registry-fetch": "^15.0.0"
+    "npm-registry-fetch": "^16.0.0"
   },
   "engines": {
     "node": "^16.14.0 || >=18.0.0"

From 0e1fdffd10cff801b680d664e6b1cffdaab8dc1f Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Tue, 15 Aug 2023 16:07:58 -0700
Subject: [PATCH 47/68] deps: npm-profile@9.0.0

---
 node_modules/.gitignore                       |   11 -
 .../node_modules/@npmcli/agent/lib/dns.js     |   51 -
 .../node_modules/@npmcli/agent/lib/errors.js  |   71 -
 .../node_modules/@npmcli/agent/lib/http.js    |   33 -
 .../node_modules/@npmcli/agent/lib/https.js   |   33 -
 .../node_modules/@npmcli/agent/lib/index.js   |  135 --
 .../@npmcli/agent/lib/proxy/http.js           |  146 --
 .../@npmcli/agent/lib/proxy/index.js          |   25 -
 .../@npmcli/agent/lib/proxy/null.js           |   97 --
 .../@npmcli/agent/lib/proxy/socks.js          |  153 --
 .../node_modules/@npmcli/agent/lib/util.js    |   33 -
 .../node_modules/@npmcli/agent/package.json   |   56 -
 .../node_modules/cacache/LICENSE.md           |   16 -
 .../node_modules/cacache/lib/content/path.js  |   29 -
 .../node_modules/cacache/lib/content/read.js  |  166 ---
 .../node_modules/cacache/lib/content/rm.js    |   18 -
 .../node_modules/cacache/lib/content/write.js |  205 ---
 .../node_modules/cacache/lib/entry-index.js   |  330 -----
 .../node_modules/cacache/lib/get.js           |  170 ---
 .../node_modules/cacache/lib/index.js         |   42 -
 .../node_modules/cacache/lib/memoization.js   |   72 -
 .../node_modules/cacache/lib/put.js           |   80 --
 .../node_modules/cacache/lib/rm.js            |   31 -
 .../node_modules/cacache/lib/util/glob.js     |    7 -
 .../cacache/lib/util/hash-to-segments.js      |    7 -
 .../node_modules/cacache/lib/util/tmp.js      |   26 -
 .../node_modules/cacache/lib/verify.js        |  257 ----
 .../node_modules/cacache/package.json         |   82 --
 .../node_modules/hosted-git-info/LICENSE      |   13 -
 .../hosted-git-info/lib/from-url.js           |  122 --
 .../node_modules/hosted-git-info/lib/hosts.js |  228 ---
 .../node_modules/hosted-git-info/lib/index.js |  179 ---
 .../hosted-git-info/lib/parse-url.js          |   78 --
 .../node_modules/hosted-git-info/package.json |   59 -
 .../node_modules/lru-cache/LICENSE            |   15 -
 .../node_modules/lru-cache/index.js           | 1227 -----------------
 .../node_modules/lru-cache/index.mjs          | 1227 -----------------
 .../node_modules/lru-cache/package.json       |   96 --
 .../node_modules/make-fetch-happen/LICENSE    |   16 -
 .../make-fetch-happen/lib/cache/entry.js      |  469 -------
 .../make-fetch-happen/lib/cache/errors.js     |   11 -
 .../make-fetch-happen/lib/cache/index.js      |   49 -
 .../make-fetch-happen/lib/cache/key.js        |   17 -
 .../make-fetch-happen/lib/cache/policy.js     |  161 ---
 .../make-fetch-happen/lib/fetch.js            |  118 --
 .../make-fetch-happen/lib/index.js            |   41 -
 .../make-fetch-happen/lib/options.js          |   54 -
 .../make-fetch-happen/lib/pipeline.js         |   41 -
 .../make-fetch-happen/lib/remote.js           |  127 --
 .../make-fetch-happen/package.json            |   80 --
 .../node_modules/npm-package-arg/LICENSE      |   15 -
 .../node_modules/npm-package-arg/lib/npa.js   |  431 ------
 .../node_modules/npm-package-arg/package.json |   59 -
 .../npm-registry-fetch/LICENSE.md             |   20 -
 .../npm-registry-fetch/lib/auth.js            |  145 --
 .../npm-registry-fetch/lib/check-response.js  |  100 --
 .../npm-registry-fetch/lib/clean-url.js       |   27 -
 .../npm-registry-fetch/lib/default-opts.js    |   19 -
 .../npm-registry-fetch/lib/errors.js          |   80 --
 .../npm-registry-fetch/lib/index.js           |  247 ----
 .../npm-registry-fetch/package.json           |   73 -
 node_modules/npm-profile/package.json         |    8 +-
 package-lock.json                             |  124 +-
 package.json                                  |    2 +-
 64 files changed, 11 insertions(+), 8149 deletions(-)
 delete mode 100644 node_modules/npm-profile/node_modules/@npmcli/agent/lib/dns.js
 delete mode 100644 node_modules/npm-profile/node_modules/@npmcli/agent/lib/errors.js
 delete mode 100644 node_modules/npm-profile/node_modules/@npmcli/agent/lib/http.js
 delete mode 100644 node_modules/npm-profile/node_modules/@npmcli/agent/lib/https.js
 delete mode 100644 node_modules/npm-profile/node_modules/@npmcli/agent/lib/index.js
 delete mode 100644 node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/http.js
 delete mode 100644 node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/index.js
 delete mode 100644 node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/null.js
 delete mode 100644 node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/socks.js
 delete mode 100644 node_modules/npm-profile/node_modules/@npmcli/agent/lib/util.js
 delete mode 100644 node_modules/npm-profile/node_modules/@npmcli/agent/package.json
 delete mode 100644 node_modules/npm-profile/node_modules/cacache/LICENSE.md
 delete mode 100644 node_modules/npm-profile/node_modules/cacache/lib/content/path.js
 delete mode 100644 node_modules/npm-profile/node_modules/cacache/lib/content/read.js
 delete mode 100644 node_modules/npm-profile/node_modules/cacache/lib/content/rm.js
 delete mode 100644 node_modules/npm-profile/node_modules/cacache/lib/content/write.js
 delete mode 100644 node_modules/npm-profile/node_modules/cacache/lib/entry-index.js
 delete mode 100644 node_modules/npm-profile/node_modules/cacache/lib/get.js
 delete mode 100644 node_modules/npm-profile/node_modules/cacache/lib/index.js
 delete mode 100644 node_modules/npm-profile/node_modules/cacache/lib/memoization.js
 delete mode 100644 node_modules/npm-profile/node_modules/cacache/lib/put.js
 delete mode 100644 node_modules/npm-profile/node_modules/cacache/lib/rm.js
 delete mode 100644 node_modules/npm-profile/node_modules/cacache/lib/util/glob.js
 delete mode 100644 node_modules/npm-profile/node_modules/cacache/lib/util/hash-to-segments.js
 delete mode 100644 node_modules/npm-profile/node_modules/cacache/lib/util/tmp.js
 delete mode 100644 node_modules/npm-profile/node_modules/cacache/lib/verify.js
 delete mode 100644 node_modules/npm-profile/node_modules/cacache/package.json
 delete mode 100644 node_modules/npm-profile/node_modules/hosted-git-info/LICENSE
 delete mode 100644 node_modules/npm-profile/node_modules/hosted-git-info/lib/from-url.js
 delete mode 100644 node_modules/npm-profile/node_modules/hosted-git-info/lib/hosts.js
 delete mode 100644 node_modules/npm-profile/node_modules/hosted-git-info/lib/index.js
 delete mode 100644 node_modules/npm-profile/node_modules/hosted-git-info/lib/parse-url.js
 delete mode 100644 node_modules/npm-profile/node_modules/hosted-git-info/package.json
 delete mode 100644 node_modules/npm-profile/node_modules/lru-cache/LICENSE
 delete mode 100644 node_modules/npm-profile/node_modules/lru-cache/index.js
 delete mode 100644 node_modules/npm-profile/node_modules/lru-cache/index.mjs
 delete mode 100644 node_modules/npm-profile/node_modules/lru-cache/package.json
 delete mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/LICENSE
 delete mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/entry.js
 delete mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/errors.js
 delete mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/index.js
 delete mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/key.js
 delete mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/policy.js
 delete mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/fetch.js
 delete mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/index.js
 delete mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/options.js
 delete mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/pipeline.js
 delete mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/remote.js
 delete mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/package.json
 delete mode 100644 node_modules/npm-profile/node_modules/npm-package-arg/LICENSE
 delete mode 100644 node_modules/npm-profile/node_modules/npm-package-arg/lib/npa.js
 delete mode 100644 node_modules/npm-profile/node_modules/npm-package-arg/package.json
 delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md
 delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js
 delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js
 delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/clean-url.js
 delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js
 delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js
 delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js
 delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 19178b9efa92a..56c6317d7d0e4 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -203,17 +203,6 @@
 !/npm-packlist
 !/npm-pick-manifest
 !/npm-profile
-!/npm-profile/node_modules/
-/npm-profile/node_modules/*
-!/npm-profile/node_modules/@npmcli/
-/npm-profile/node_modules/@npmcli/*
-!/npm-profile/node_modules/@npmcli/agent
-!/npm-profile/node_modules/cacache
-!/npm-profile/node_modules/hosted-git-info
-!/npm-profile/node_modules/lru-cache
-!/npm-profile/node_modules/make-fetch-happen
-!/npm-profile/node_modules/npm-package-arg
-!/npm-profile/node_modules/npm-registry-fetch
 !/npm-registry-fetch
 !/npm-user-validate
 !/npmlog
diff --git a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/dns.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/dns.js
deleted file mode 100644
index 10dcb8d471d10..0000000000000
--- a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/dns.js
+++ /dev/null
@@ -1,51 +0,0 @@
-'use strict'
-
-const LRUCache = require('lru-cache')
-const dns = require('dns')
-
-const defaultOptions = exports.defaultOptions = {
-  family: undefined,
-  hints: dns.ADDRCONFIG,
-  all: false,
-  verbatim: undefined,
-}
-
-const lookupCache = exports.lookupCache = new LRUCache({ max: 50 })
-
-// this is a factory so that each request can have its own opts (i.e. ttl)
-// while still sharing the cache across all requests
-exports.getLookup = (dnsOptions) => {
-  return (hostname, options, callback) => {
-    if (typeof options === 'function') {
-      callback = options
-      options = null
-    } else if (typeof options === 'number') {
-      options = { family: options }
-    }
-
-    options = { ...defaultOptions, ...options }
-
-    const key = JSON.stringify({
-      hostname,
-      family: options.family,
-      hints: options.hints,
-      all: options.all,
-      verbatim: options.verbatim,
-    })
-
-    if (lookupCache.has(key)) {
-      const [address, family] = lookupCache.get(key)
-      process.nextTick(callback, null, address, family)
-      return
-    }
-
-    dnsOptions.lookup(hostname, options, (err, address, family) => {
-      if (err) {
-        return callback(err)
-      }
-
-      lookupCache.set(key, [address, family], { ttl: dnsOptions.ttl })
-      return callback(null, address, family)
-    })
-  }
-}
diff --git a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/errors.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/errors.js
deleted file mode 100644
index 9c664aeb39757..0000000000000
--- a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/errors.js
+++ /dev/null
@@ -1,71 +0,0 @@
-'use strict'
-
-class InvalidProxyProtocolError extends Error {
-  constructor (url) {
-    super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``)
-    this.code = 'EINVALIDPROXY'
-    this.proxy = url
-  }
-}
-
-class InvalidProxyResponseError extends Error {
-  constructor (url, status) {
-    super(`Invalid status code \`${status}\` connecting to proxy \`${url.host}\``)
-    this.code = 'EINVALIDRESPONSE'
-    this.proxy = url
-    this.status = status
-  }
-}
-
-class ConnectionTimeoutError extends Error {
-  constructor (host) {
-    super(`Timeout connecting to host \`${host}\``)
-    this.code = 'ECONNECTIONTIMEOUT'
-    this.host = host
-  }
-}
-
-class IdleTimeoutError extends Error {
-  constructor (host) {
-    super(`Idle timeout reached for host \`${host}\``)
-    this.code = 'EIDLETIMEOUT'
-    this.host = host
-  }
-}
-
-class ResponseTimeoutError extends Error {
-  constructor (proxy, request) {
-    let msg = 'Response timeout '
-    if (proxy.url) {
-      msg += `from proxy \`${proxy.url.host}\` `
-    }
-    msg += `connecting to host \`${request.host}\``
-    super(msg)
-    this.code = 'ERESPONSETIMEOUT'
-    this.proxy = proxy.url
-    this.request = request
-  }
-}
-
-class TransferTimeoutError extends Error {
-  constructor (proxy, request) {
-    let msg = 'Transfer timeout '
-    if (proxy.url) {
-      msg += `from proxy \`${proxy.url.host}\` `
-    }
-    msg += `for \`${request.host}\``
-    super(msg)
-    this.code = 'ETRANSFERTIMEOUT'
-    this.proxy = proxy.url
-    this.request = request
-  }
-}
-
-module.exports = {
-  InvalidProxyProtocolError,
-  InvalidProxyResponseError,
-  ConnectionTimeoutError,
-  IdleTimeoutError,
-  ResponseTimeoutError,
-  TransferTimeoutError,
-}
diff --git a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/http.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/http.js
deleted file mode 100644
index 23512393caf3f..0000000000000
--- a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/http.js
+++ /dev/null
@@ -1,33 +0,0 @@
-'use strict'
-
-const http = require('http')
-
-const { getLookup } = require('./dns.js')
-const { normalizeOptions } = require('./util.js')
-const createProxy = require('./proxy/index.js')
-
-class HttpAgent extends http.Agent {
-  constructor (_options = {}) {
-    const options = normalizeOptions(_options)
-    super(options)
-    this.proxy = createProxy({
-      agent: this,
-      lookup: getLookup(options.dns),
-      proxy: options.proxy,
-      secure: false,
-    })
-  }
-
-  createConnection (_options, callback) {
-    const options = normalizeOptions(_options)
-    return this.proxy.createConnection(options, callback)
-  }
-
-  addRequest (request, _options) {
-    const options = normalizeOptions(_options)
-    super.addRequest(request, _options)
-    return this.proxy.addRequest(request, options)
-  }
-}
-
-module.exports = HttpAgent
diff --git a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/https.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/https.js
deleted file mode 100644
index b544614d7f47f..0000000000000
--- a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/https.js
+++ /dev/null
@@ -1,33 +0,0 @@
-'use strict'
-
-const https = require('https')
-
-const { getLookup } = require('./dns.js')
-const { normalizeOptions } = require('./util.js')
-const createProxy = require('./proxy/index.js')
-
-class HttpsAgent extends https.Agent {
-  constructor (_options) {
-    const options = normalizeOptions(_options)
-    super(options)
-    this.proxy = createProxy({
-      agent: this,
-      lookup: getLookup(options.dns),
-      proxy: options.proxy,
-      secure: true,
-    })
-  }
-
-  createConnection (_options, callback) {
-    const options = normalizeOptions(_options)
-    return this.proxy.createConnection(options, callback)
-  }
-
-  addRequest (request, _options) {
-    const options = normalizeOptions(_options)
-    super.addRequest(request, options)
-    return this.proxy.addRequest(request, options)
-  }
-}
-
-module.exports = HttpsAgent
diff --git a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/index.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/index.js
deleted file mode 100644
index a6f556964d86d..0000000000000
--- a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/index.js
+++ /dev/null
@@ -1,135 +0,0 @@
-'use strict'
-
-const { normalizeOptions } = require('./util.js')
-const HttpAgent = require('./http.js')
-const HttpsAgent = require('./https.js')
-
-const AgentCache = new Map()
-
-const proxyEnv = {}
-for (const [key, value] of Object.entries(process.env)) {
-  const lowerKey = key.toLowerCase()
-  if (['https_proxy', 'http_proxy', 'proxy', 'no_proxy'].includes(lowerKey)) {
-    proxyEnv[lowerKey] = value
-  }
-}
-
-const getAgent = (url, options) => {
-  url = new URL(url)
-  options = normalizeOptions(options)
-
-  // false has meaning so this can't be a simple truthiness check
-  if (options.agent != null) {
-    return options.agent
-  }
-
-  const isHttps = url.protocol === 'https:'
-
-  let proxy = options.proxy
-  if (!proxy) {
-    proxy = isHttps
-      ? proxyEnv.https_proxy
-      : (proxyEnv.https_proxy || proxyEnv.http_proxy || proxyEnv.proxy)
-  }
-
-  if (proxy) {
-    proxy = new URL(proxy)
-    let noProxy = options.noProxy || proxyEnv.no_proxy
-    if (typeof noProxy === 'string') {
-      noProxy = noProxy.split(',').map((p) => p.trim())
-    }
-
-    if (noProxy) {
-      const hostSegments = url.hostname.split('.').reverse()
-      const matches = noProxy.some((no) => {
-        const noSegments = no.split('.').filter(Boolean).reverse()
-        if (!noSegments.length) {
-          return false
-        }
-
-        for (let i = 0; i < noSegments.length; ++i) {
-          if (hostSegments[i] !== noSegments[i]) {
-            return false
-          }
-        }
-
-        return true
-      })
-
-      if (matches) {
-        proxy = ''
-      }
-    }
-  }
-
-  const timeouts = [
-    options.timeouts.connection || 0,
-    options.timeouts.idle || 0,
-    options.timeouts.response || 0,
-    options.timeouts.transfer || 0,
-  ].join('.')
-
-  const maxSockets = options.maxSockets || 15
-
-  let proxyDescriptor = 'proxy:'
-  if (!proxy) {
-    proxyDescriptor += 'null'
-  } else {
-    proxyDescriptor += `${proxy.protocol}//`
-    let auth = ''
-
-    if (proxy.username) {
-      auth += proxy.username
-    }
-
-    if (proxy.password) {
-      auth += `:${proxy.password}`
-    }
-
-    if (auth) {
-      proxyDescriptor += `${auth}@`
-    }
-
-    proxyDescriptor += proxy.host
-  }
-
-  const key = [
-    `https:${isHttps}`,
-    proxyDescriptor,
-    `local-address:${options.localAddress || 'null'}`,
-    `strict-ssl:${isHttps ? options.rejectUnauthorized : 'false'}`,
-    `ca:${isHttps && options.ca || 'null'}`,
-    `cert:${isHttps && options.cert || 'null'}`,
-    `key:${isHttps && options.key || 'null'}`,
-    `timeouts:${timeouts}`,
-    `maxSockets:${maxSockets}`,
-  ].join(':')
-
-  if (AgentCache.has(key)) {
-    return AgentCache.get(key)
-  }
-
-  const agentOptions = {
-    ca: options.ca,
-    cert: options.cert,
-    key: options.key,
-    rejectUnauthorized: options.rejectUnauthorized,
-    maxSockets,
-    timeouts: options.timeouts,
-    localAddress: options.localAddress,
-    proxy,
-  }
-
-  const agent = isHttps
-    ? new HttpsAgent(agentOptions)
-    : new HttpAgent(agentOptions)
-
-  AgentCache.set(key, agent)
-  return agent
-}
-
-module.exports = {
-  getAgent,
-  HttpAgent,
-  HttpsAgent,
-}
diff --git a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/http.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/http.js
deleted file mode 100644
index 8d092e963c084..0000000000000
--- a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/http.js
+++ /dev/null
@@ -1,146 +0,0 @@
-'use strict'
-
-const http = require('http')
-const https = require('https')
-const net = require('net')
-const tls = require('tls')
-
-const {
-  ConnectionTimeoutError,
-  IdleTimeoutError,
-  InvalidProxyResponseError,
-  ResponseTimeoutError,
-  TransferTimeoutError,
-} = require('../errors.js')
-
-// this proxy class uses the http CONNECT method
-class HttpProxy {
-  constructor ({ agent, lookup, url, secure }) {
-    this.agent = agent
-    this.lookup = lookup
-    this.url = url
-    this.secure = secure
-  }
-
-  createConnection (options, callback) {
-    const requestOptions = {
-      // pass createConnection so this request doesn't go through an agent
-      createConnection: (opts, cb) => {
-        // delete the path first, otherwise (net|tls).connect will try to open a unix socket
-        delete opts.path
-        // we also delete the timeout since we control it ourselves
-        delete opts.timeout
-        opts.family = this.agent.options.family
-        opts.lookup = this.lookup
-
-        if (this.url.protocol === 'https:') {
-          return tls.connect(opts, cb)
-        }
-
-        return net.connect(opts, cb)
-      },
-      method: 'CONNECT',
-      host: this.url.hostname,
-      port: this.url.port,
-      servername: this.url.hostname,
-      path: `${options.host}:${options.port}`,
-      setHost: false,
-      timeout: options.timeout,
-      headers: {
-        connection: this.agent.keepAlive ? 'keep-alive' : 'close',
-        host: `${options.host}:${options.port}`,
-      },
-      rejectUnauthorized: options.rejectUnauthorized,
-    }
-
-    if (this.url.username || this.url.password) {
-      const username = decodeURIComponent(this.url.username)
-      const password = decodeURIComponent(this.url.password)
-      requestOptions.headers['proxy-authentication'] =
-        Buffer.from(`${username}:${password}`).toString('base64')
-    }
-
-    let connectionTimeout
-
-    const onConnect = (res, socket) => {
-      clearTimeout(connectionTimeout)
-      req.removeListener('error', onError)
-
-      if (res.statusCode !== 200) {
-        return callback(new InvalidProxyResponseError(this.url, res.statusCode))
-      }
-
-      if (this.secure) {
-        socket = tls.connect({ ...options, socket })
-      }
-
-      socket.setKeepAlive(this.agent.keepAlive, this.agent.keepAliveMsecs)
-      socket.setNoDelay(this.agent.keepAlive)
-
-      if (options.timeouts.idle) {
-        socket.setTimeout(options.timeouts.idle)
-        socket.once('timeout', () => {
-          socket.destroy(new IdleTimeoutError(this.url.host))
-        })
-      }
-
-      return callback(null, socket)
-    }
-
-    const onError = (err) => {
-      req.removeListener('connect', onConnect)
-      return callback(err)
-    }
-
-    const req = this.secure
-      ? https.request(requestOptions)
-      : http.request(requestOptions)
-
-    req.once('connect', onConnect)
-    req.once('error', onError)
-    req.end()
-
-    if (options.timeouts.connection) {
-      connectionTimeout = setTimeout(() => {
-        return callback(new ConnectionTimeoutError(this.url.host))
-      }, options.timeouts.connection)
-    }
-  }
-
-  addRequest (request, options) {
-    if (this.agent.options.timeouts.response) {
-      let responseTimeout
-
-      const onFinish = () => {
-        responseTimeout = setTimeout(() => {
-          request.destroy(new ResponseTimeoutError(this, request))
-        }, this.agent.options.timeouts.response)
-      }
-
-      const onResponse = () => {
-        clearTimeout(responseTimeout)
-      }
-
-      request.once('finish', onFinish)
-      request.once('response', onResponse)
-    }
-
-    if (this.agent.options.timeouts.transfer) {
-      let transferTimeout
-
-      const onResponse = (res) => {
-        transferTimeout = setTimeout(() => {
-          res.destroy(new TransferTimeoutError(this, request))
-        }, this.agent.options.timeouts.transfer)
-
-        res.once('close', () => {
-          clearTimeout(transferTimeout)
-        })
-      }
-
-      request.once('response', onResponse)
-    }
-  }
-}
-
-module.exports = HttpProxy
diff --git a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/index.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/index.js
deleted file mode 100644
index 87f628c5bbf94..0000000000000
--- a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/index.js
+++ /dev/null
@@ -1,25 +0,0 @@
-'use strict'
-
-const { InvalidProxyProtocolError } = require('../errors.js')
-const HttpProxy = require('./http.js')
-const NullProxy = require('./null.js')
-const SocksProxy = require('./socks.js')
-
-const createProxy = ({ agent, lookup, proxy, secure }) => {
-  if (!proxy) {
-    return new NullProxy({ agent, lookup, secure })
-  }
-
-  const parsed = new URL(proxy)
-  if (parsed.protocol === 'http:' || parsed.protocol === 'https:') {
-    return new HttpProxy({ agent, lookup, url: parsed, secure })
-  }
-
-  if (parsed.protocol.startsWith('socks')) {
-    return new SocksProxy({ agent, lookup, url: parsed, secure })
-  }
-
-  throw new InvalidProxyProtocolError(parsed)
-}
-
-module.exports = createProxy
diff --git a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/null.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/null.js
deleted file mode 100644
index d2b2f6f777e92..0000000000000
--- a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/null.js
+++ /dev/null
@@ -1,97 +0,0 @@
-'use strict'
-
-const net = require('net')
-const tls = require('tls')
-
-const {
-  ConnectionTimeoutError,
-  IdleTimeoutError,
-  ResponseTimeoutError,
-  TransferTimeoutError,
-} = require('../errors.js')
-
-class NullProxy {
-  constructor ({ agent, lookup, secure }) {
-    this.agent = agent
-    this.lookup = lookup
-    this.secure = secure
-  }
-
-  createConnection (options, callback) {
-    const socket = this.secure
-      ? tls.connect({ ...options, family: this.agent.options.family, lookup: this.lookup })
-      : net.connect({ ...options, family: this.agent.options.family, lookup: this.lookup })
-
-    socket.setKeepAlive(this.agent.keepAlive, this.agent.keepAliveMsecs)
-    socket.setNoDelay(this.agent.keepAlive)
-
-    let connectionTimeout
-
-    if (options.timeouts.connection) {
-      connectionTimeout = setTimeout(() => {
-        callback(new ConnectionTimeoutError(options.host))
-      }, options.timeouts.connection)
-    }
-
-    if (options.timeouts.idle) {
-      socket.setTimeout(options.timeouts.idle)
-      socket.once('timeout', () => {
-        socket.destroy(new IdleTimeoutError(options.host))
-      })
-    }
-
-    const onConnect = () => {
-      clearTimeout(connectionTimeout)
-      socket.removeListener('error', onError)
-      callback(null, socket)
-    }
-
-    const onError = (err) => {
-      socket.removeListener('connect', onConnect)
-      callback(err)
-    }
-
-    socket.once('error', onError)
-    socket.once(this.secure ? 'secureConnect' : 'connect', onConnect)
-  }
-
-  addRequest (request, options) {
-    if (this.agent.options.timeouts.response) {
-      let responseTimeout
-
-      const onFinish = () => {
-        responseTimeout = setTimeout(() => {
-          request.destroy(new ResponseTimeoutError(this, request))
-        }, this.agent.options.timeouts.response)
-      }
-
-      const onResponse = () => {
-        clearTimeout(responseTimeout)
-      }
-
-      request.once('finish', onFinish)
-      request.once('response', onResponse)
-    }
-
-    if (this.agent.options.timeouts.transfer) {
-      let transferTimeout
-
-      const onResponse = (res) => {
-        transferTimeout = setTimeout(() => {
-          // swallow the error event on the request, this allows the one on the response
-          // to make it to the end user
-          request.once('error', () => {})
-          res.destroy(new TransferTimeoutError(this, request))
-        }, this.agent.options.timeouts.transfer)
-
-        res.once('close', () => {
-          clearTimeout(transferTimeout)
-        })
-      }
-
-      request.once('response', onResponse)
-    }
-  }
-}
-
-module.exports = NullProxy
diff --git a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/socks.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/socks.js
deleted file mode 100644
index 8cad7148e9227..0000000000000
--- a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/proxy/socks.js
+++ /dev/null
@@ -1,153 +0,0 @@
-'use strict'
-
-const { SocksClient } = require('socks')
-const tls = require('tls')
-
-const {
-  ConnectionTimeoutError,
-  IdleTimeoutError,
-  InvalidProxyProtocolError,
-  ResponseTimeoutError,
-  TransferTimeoutError,
-} = require('../errors.js')
-
-class SocksProxy {
-  constructor ({ agent, lookup, secure, url }) {
-    this.agent = agent
-    this.lookup = lookup
-    this.secure = secure
-    this.url = url
-    if (!this.url.port) {
-      this.url.port = 1080
-    }
-
-    if (this.url.protocol === 'socks4:') {
-      this.shouldLookup = true
-      this.type = 4
-    } else if (this.url.protocol === 'socks4a:') {
-      this.shouldLookup = false
-      this.type = 4
-    } else if (this.url.protocol === 'socks5:') {
-      this.shouldLookup = true
-      this.type = 5
-    } else if (this.url.protocol === 'socks5h:' || this.url.protocol === 'socks:') {
-      this.shouldLookup = false
-      this.type = 5
-    } else {
-      throw new InvalidProxyProtocolError(this.url)
-    }
-  }
-
-  createConnection (options, callback) {
-    const socksOptions = {
-      proxy: {
-        host: this.url.hostname,
-        port: parseInt(this.url.port, 10),
-        type: this.type,
-        userId: this.url.username,
-        password: this.url.password,
-      },
-      destination: {
-        host: options.host,
-        port: parseInt(options.port, 10),
-      },
-      command: 'connect',
-      socket_options: {
-        family: this.agent.options.family,
-        lookup: this.lookup,
-      },
-    }
-
-    const connect = () => {
-      let connectionTimeout
-      const socksClient = new SocksClient(socksOptions)
-
-      const onError = (err) => {
-        socksClient.removeListener('established', onEstablished)
-        return callback(err)
-      }
-
-      const onEstablished = (connection) => {
-        clearTimeout(connectionTimeout)
-        socksClient.removeListener('error', onError)
-
-        if (this.secure) {
-          connection.socket = tls.connect({ ...options, socket: connection.socket })
-        }
-
-        connection.socket.setKeepAlive(this.agent.keepAlive, this.agent.keepAliveMsecs)
-        connection.socket.setNoDelay(this.agent.keepAlive)
-
-        if (options.timeouts.idle) {
-          connection.socket.setTimeout(options.timeouts.idle)
-          connection.socket.once('timeout', () => {
-            connection.socket.destroy(new IdleTimeoutError(this.url.host))
-          })
-        }
-
-        return callback(null, connection.socket)
-      }
-
-      socksClient.once('error', onError)
-      socksClient.once('established', onEstablished)
-
-      if (options.timeouts.connection) {
-        connectionTimeout = setTimeout(() => {
-          return callback(new ConnectionTimeoutError(this.url.host))
-        }, options.timeouts.connection)
-      }
-
-      socksClient.connect()
-    }
-
-    if (!this.shouldLookup) {
-      return connect()
-    }
-
-    this.lookup(options.host, (err, result) => {
-      if (err) {
-        return callback(err)
-      }
-
-      socksOptions.destination.host = result
-      connect()
-    })
-  }
-
-  addRequest (request, options) {
-    if (this.agent.options.timeouts.response) {
-      let responseTimeout
-
-      const onFinish = () => {
-        responseTimeout = setTimeout(() => {
-          request.destroy(new ResponseTimeoutError(this, request))
-        }, this.agent.options.timeouts.response)
-      }
-
-      const onResponse = () => {
-        clearTimeout(responseTimeout)
-      }
-
-      request.once('finish', onFinish)
-      request.once('response', onResponse)
-    }
-
-    if (this.agent.options.timeouts.transfer) {
-      let transferTimeout
-
-      const onResponse = (res) => {
-        transferTimeout = setTimeout(() => {
-          res.destroy(new TransferTimeoutError(this, request))
-        }, this.agent.options.timeouts.transfer)
-
-        res.once('close', () => {
-          clearTimeout(transferTimeout)
-        })
-      }
-
-      request.once('response', onResponse)
-    }
-  }
-}
-
-module.exports = SocksProxy
diff --git a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/util.js b/node_modules/npm-profile/node_modules/@npmcli/agent/lib/util.js
deleted file mode 100644
index 512207084d23e..0000000000000
--- a/node_modules/npm-profile/node_modules/@npmcli/agent/lib/util.js
+++ /dev/null
@@ -1,33 +0,0 @@
-'use strict'
-
-const dns = require('dns')
-
-const normalizeOptions = (_options) => {
-  const options = { ..._options }
-
-  if (typeof options.keepAlive === 'undefined') {
-    options.keepAlive = true
-  }
-
-  if (!options.timeouts) {
-    options.timeouts = {}
-  }
-
-  if (options.timeout) {
-    options.timeouts.idle = options.timeout
-    delete options.timeout
-  }
-
-  options.family = !isNaN(+options.family) ? +options.family : 0
-  options.dns = {
-    ttl: 5 * 60 * 1000,
-    lookup: dns.lookup,
-    ...options.dns,
-  }
-
-  return options
-}
-
-module.exports = {
-  normalizeOptions,
-}
diff --git a/node_modules/npm-profile/node_modules/@npmcli/agent/package.json b/node_modules/npm-profile/node_modules/@npmcli/agent/package.json
deleted file mode 100644
index a3fb4262b9c86..0000000000000
--- a/node_modules/npm-profile/node_modules/@npmcli/agent/package.json
+++ /dev/null
@@ -1,56 +0,0 @@
-{
-  "name": "@npmcli/agent",
-  "version": "1.1.0",
-  "description": "the http/https agent used by the npm cli",
-  "main": "lib/index.js",
-  "scripts": {
-    "gencerts": "bash scripts/create-cert.sh",
-    "test": "tap",
-    "lint": "eslint \"**/*.js\"",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force",
-    "lintfix": "npm run lint -- --fix",
-    "snap": "tap",
-    "posttest": "npm run lint"
-  },
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/agent/issues"
-  },
-  "homepage": "https://github.com/npm/agent#readme",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.15.1",
-    "publish": "true"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.15.1",
-    "minipass-fetch": "^3.0.3",
-    "nock": "^13.2.7",
-    "simple-socks": "^2.2.2",
-    "tap": "^16.3.0"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/agent.git"
-  },
-  "tap": {
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "dependencies": {
-    "lru-cache": "^7.18.3",
-    "socks": "^2.7.1"
-  }
-}
diff --git a/node_modules/npm-profile/node_modules/cacache/LICENSE.md b/node_modules/npm-profile/node_modules/cacache/LICENSE.md
deleted file mode 100644
index 8d28acf866d93..0000000000000
--- a/node_modules/npm-profile/node_modules/cacache/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-profile/node_modules/cacache/lib/content/path.js b/node_modules/npm-profile/node_modules/cacache/lib/content/path.js
deleted file mode 100644
index ad5a76a4f73f2..0000000000000
--- a/node_modules/npm-profile/node_modules/cacache/lib/content/path.js
+++ /dev/null
@@ -1,29 +0,0 @@
-'use strict'
-
-const contentVer = require('../../package.json')['cache-version'].content
-const hashToSegments = require('../util/hash-to-segments')
-const path = require('path')
-const ssri = require('ssri')
-
-// Current format of content file path:
-//
-// sha512-BaSE64Hex= ->
-// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
-//
-module.exports = contentPath
-
-function contentPath (cache, integrity) {
-  const sri = ssri.parse(integrity, { single: true })
-  // contentPath is the *strongest* algo given
-  return path.join(
-    contentDir(cache),
-    sri.algorithm,
-    ...hashToSegments(sri.hexDigest())
-  )
-}
-
-module.exports.contentDir = contentDir
-
-function contentDir (cache) {
-  return path.join(cache, `content-v${contentVer}`)
-}
diff --git a/node_modules/npm-profile/node_modules/cacache/lib/content/read.js b/node_modules/npm-profile/node_modules/cacache/lib/content/read.js
deleted file mode 100644
index f41b539df65dc..0000000000000
--- a/node_modules/npm-profile/node_modules/cacache/lib/content/read.js
+++ /dev/null
@@ -1,166 +0,0 @@
-'use strict'
-
-const fs = require('fs/promises')
-const fsm = require('fs-minipass')
-const ssri = require('ssri')
-const contentPath = require('./path')
-const Pipeline = require('minipass-pipeline')
-
-module.exports = read
-
-const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
-async function read (cache, integrity, opts = {}) {
-  const { size } = opts
-  const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
-    // get size
-    const stat = await fs.stat(cpath)
-    return { stat, cpath, sri }
-  })
-  if (typeof size === 'number' && stat.size !== size) {
-    throw sizeError(size, stat.size)
-  }
-
-  if (stat.size > MAX_SINGLE_READ_SIZE) {
-    return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
-  }
-
-  const data = await fs.readFile(cpath, { encoding: null })
-  if (!ssri.checkData(data, sri)) {
-    throw integrityError(sri, cpath)
-  }
-
-  return data
-}
-
-const readPipeline = (cpath, size, sri, stream) => {
-  stream.push(
-    new fsm.ReadStream(cpath, {
-      size,
-      readSize: MAX_SINGLE_READ_SIZE,
-    }),
-    ssri.integrityStream({
-      integrity: sri,
-      size,
-    })
-  )
-  return stream
-}
-
-module.exports.stream = readStream
-module.exports.readStream = readStream
-
-function readStream (cache, integrity, opts = {}) {
-  const { size } = opts
-  const stream = new Pipeline()
-  // Set all this up to run on the stream and then just return the stream
-  Promise.resolve().then(async () => {
-    const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
-      // just stat to ensure it exists
-      const stat = await fs.stat(cpath)
-      return { stat, cpath, sri }
-    })
-    if (typeof size === 'number' && size !== stat.size) {
-      return stream.emit('error', sizeError(size, stat.size))
-    }
-
-    return readPipeline(cpath, stat.size, sri, stream)
-  }).catch(err => stream.emit('error', err))
-
-  return stream
-}
-
-module.exports.copy = copy
-
-function copy (cache, integrity, dest) {
-  return withContentSri(cache, integrity, (cpath, sri) => {
-    return fs.copyFile(cpath, dest)
-  })
-}
-
-module.exports.hasContent = hasContent
-
-async function hasContent (cache, integrity) {
-  if (!integrity) {
-    return false
-  }
-
-  try {
-    return await withContentSri(cache, integrity, async (cpath, sri) => {
-      const stat = await fs.stat(cpath)
-      return { size: stat.size, sri, stat }
-    })
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return false
-    }
-
-    if (err.code === 'EPERM') {
-      /* istanbul ignore else */
-      if (process.platform !== 'win32') {
-        throw err
-      } else {
-        return false
-      }
-    }
-  }
-}
-
-async function withContentSri (cache, integrity, fn) {
-  const sri = ssri.parse(integrity)
-  // If `integrity` has multiple entries, pick the first digest
-  // with available local data.
-  const algo = sri.pickAlgorithm()
-  const digests = sri[algo]
-
-  if (digests.length <= 1) {
-    const cpath = contentPath(cache, digests[0])
-    return fn(cpath, digests[0])
-  } else {
-    // Can't use race here because a generic error can happen before
-    // a ENOENT error, and can happen before a valid result
-    const results = await Promise.all(digests.map(async (meta) => {
-      try {
-        return await withContentSri(cache, meta, fn)
-      } catch (err) {
-        if (err.code === 'ENOENT') {
-          return Object.assign(
-            new Error('No matching content found for ' + sri.toString()),
-            { code: 'ENOENT' }
-          )
-        }
-        return err
-      }
-    }))
-    // Return the first non error if it is found
-    const result = results.find((r) => !(r instanceof Error))
-    if (result) {
-      return result
-    }
-
-    // Throw the No matching content found error
-    const enoentError = results.find((r) => r.code === 'ENOENT')
-    if (enoentError) {
-      throw enoentError
-    }
-
-    // Throw generic error
-    throw results.find((r) => r instanceof Error)
-  }
-}
-
-function sizeError (expected, found) {
-  /* eslint-disable-next-line max-len */
-  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
-  err.expected = expected
-  err.found = found
-  err.code = 'EBADSIZE'
-  return err
-}
-
-function integrityError (sri, path) {
-  const err = new Error(`Integrity verification failed for ${sri} (${path})`)
-  err.code = 'EINTEGRITY'
-  err.sri = sri
-  err.path = path
-  return err
-}
diff --git a/node_modules/npm-profile/node_modules/cacache/lib/content/rm.js b/node_modules/npm-profile/node_modules/cacache/lib/content/rm.js
deleted file mode 100644
index ce58d679e4cb2..0000000000000
--- a/node_modules/npm-profile/node_modules/cacache/lib/content/rm.js
+++ /dev/null
@@ -1,18 +0,0 @@
-'use strict'
-
-const fs = require('fs/promises')
-const contentPath = require('./path')
-const { hasContent } = require('./read')
-
-module.exports = rm
-
-async function rm (cache, integrity) {
-  const content = await hasContent(cache, integrity)
-  // ~pretty~ sure we can't end up with a content lacking sri, but be safe
-  if (content && content.sri) {
-    await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
-    return true
-  } else {
-    return false
-  }
-}
diff --git a/node_modules/npm-profile/node_modules/cacache/lib/content/write.js b/node_modules/npm-profile/node_modules/cacache/lib/content/write.js
deleted file mode 100644
index 7146146581287..0000000000000
--- a/node_modules/npm-profile/node_modules/cacache/lib/content/write.js
+++ /dev/null
@@ -1,205 +0,0 @@
-'use strict'
-
-const events = require('events')
-
-const contentPath = require('./path')
-const fs = require('fs/promises')
-const { moveFile } = require('@npmcli/fs')
-const { Minipass } = require('minipass')
-const Pipeline = require('minipass-pipeline')
-const Flush = require('minipass-flush')
-const path = require('path')
-const ssri = require('ssri')
-const uniqueFilename = require('unique-filename')
-const fsm = require('fs-minipass')
-
-module.exports = write
-
-// Cache of move operations in process so we don't duplicate
-const moveOperations = new Map()
-
-async function write (cache, data, opts = {}) {
-  const { algorithms, size, integrity } = opts
-
-  if (typeof size === 'number' && data.length !== size) {
-    throw sizeError(size, data.length)
-  }
-
-  const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
-  if (integrity && !ssri.checkData(data, integrity, opts)) {
-    throw checksumError(integrity, sri)
-  }
-
-  for (const algo in sri) {
-    const tmp = await makeTmp(cache, opts)
-    const hash = sri[algo].toString()
-    try {
-      await fs.writeFile(tmp.target, data, { flag: 'wx' })
-      await moveToDestination(tmp, cache, hash, opts)
-    } finally {
-      if (!tmp.moved) {
-        await fs.rm(tmp.target, { recursive: true, force: true })
-      }
-    }
-  }
-  return { integrity: sri, size: data.length }
-}
-
-module.exports.stream = writeStream
-
-// writes proxied to the 'inputStream' that is passed to the Promise
-// 'end' is deferred until content is handled.
-class CacacheWriteStream extends Flush {
-  constructor (cache, opts) {
-    super()
-    this.opts = opts
-    this.cache = cache
-    this.inputStream = new Minipass()
-    this.inputStream.on('error', er => this.emit('error', er))
-    this.inputStream.on('drain', () => this.emit('drain'))
-    this.handleContentP = null
-  }
-
-  write (chunk, encoding, cb) {
-    if (!this.handleContentP) {
-      this.handleContentP = handleContent(
-        this.inputStream,
-        this.cache,
-        this.opts
-      )
-    }
-    return this.inputStream.write(chunk, encoding, cb)
-  }
-
-  flush (cb) {
-    this.inputStream.end(() => {
-      if (!this.handleContentP) {
-        const e = new Error('Cache input stream was empty')
-        e.code = 'ENODATA'
-        // empty streams are probably emitting end right away.
-        // defer this one tick by rejecting a promise on it.
-        return Promise.reject(e).catch(cb)
-      }
-      // eslint-disable-next-line promise/catch-or-return
-      this.handleContentP.then(
-        (res) => {
-          res.integrity && this.emit('integrity', res.integrity)
-          // eslint-disable-next-line promise/always-return
-          res.size !== null && this.emit('size', res.size)
-          cb()
-        },
-        (er) => cb(er)
-      )
-    })
-  }
-}
-
-function writeStream (cache, opts = {}) {
-  return new CacacheWriteStream(cache, opts)
-}
-
-async function handleContent (inputStream, cache, opts) {
-  const tmp = await makeTmp(cache, opts)
-  try {
-    const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
-    await moveToDestination(
-      tmp,
-      cache,
-      res.integrity,
-      opts
-    )
-    return res
-  } finally {
-    if (!tmp.moved) {
-      await fs.rm(tmp.target, { recursive: true, force: true })
-    }
-  }
-}
-
-async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
-  const outStream = new fsm.WriteStream(tmpTarget, {
-    flags: 'wx',
-  })
-
-  if (opts.integrityEmitter) {
-    // we need to create these all simultaneously since they can fire in any order
-    const [integrity, size] = await Promise.all([
-      events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
-      events.once(opts.integrityEmitter, 'size').then(res => res[0]),
-      new Pipeline(inputStream, outStream).promise(),
-    ])
-    return { integrity, size }
-  }
-
-  let integrity
-  let size
-  const hashStream = ssri.integrityStream({
-    integrity: opts.integrity,
-    algorithms: opts.algorithms,
-    size: opts.size,
-  })
-  hashStream.on('integrity', i => {
-    integrity = i
-  })
-  hashStream.on('size', s => {
-    size = s
-  })
-
-  const pipeline = new Pipeline(inputStream, hashStream, outStream)
-  await pipeline.promise()
-  return { integrity, size }
-}
-
-async function makeTmp (cache, opts) {
-  const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
-  await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
-  return {
-    target: tmpTarget,
-    moved: false,
-  }
-}
-
-async function moveToDestination (tmp, cache, sri, opts) {
-  const destination = contentPath(cache, sri)
-  const destDir = path.dirname(destination)
-  if (moveOperations.has(destination)) {
-    return moveOperations.get(destination)
-  }
-  moveOperations.set(
-    destination,
-    fs.mkdir(destDir, { recursive: true })
-      .then(async () => {
-        await moveFile(tmp.target, destination, { overwrite: false })
-        tmp.moved = true
-        return tmp.moved
-      })
-      .catch(err => {
-        if (!err.message.startsWith('The destination file exists')) {
-          throw Object.assign(err, { code: 'EEXIST' })
-        }
-      }).finally(() => {
-        moveOperations.delete(destination)
-      })
-
-  )
-  return moveOperations.get(destination)
-}
-
-function sizeError (expected, found) {
-  /* eslint-disable-next-line max-len */
-  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
-  err.expected = expected
-  err.found = found
-  err.code = 'EBADSIZE'
-  return err
-}
-
-function checksumError (expected, found) {
-  const err = new Error(`Integrity check failed:
-  Wanted: ${expected}
-   Found: ${found}`)
-  err.code = 'EINTEGRITY'
-  err.expected = expected
-  err.found = found
-  return err
-}
diff --git a/node_modules/npm-profile/node_modules/cacache/lib/entry-index.js b/node_modules/npm-profile/node_modules/cacache/lib/entry-index.js
deleted file mode 100644
index 722a37af5ce15..0000000000000
--- a/node_modules/npm-profile/node_modules/cacache/lib/entry-index.js
+++ /dev/null
@@ -1,330 +0,0 @@
-'use strict'
-
-const crypto = require('crypto')
-const {
-  appendFile,
-  mkdir,
-  readFile,
-  readdir,
-  rm,
-  writeFile,
-} = require('fs/promises')
-const { Minipass } = require('minipass')
-const path = require('path')
-const ssri = require('ssri')
-const uniqueFilename = require('unique-filename')
-
-const contentPath = require('./content/path')
-const hashToSegments = require('./util/hash-to-segments')
-const indexV = require('../package.json')['cache-version'].index
-const { moveFile } = require('@npmcli/fs')
-
-module.exports.NotFoundError = class NotFoundError extends Error {
-  constructor (cache, key) {
-    super(`No cache entry for ${key} found in ${cache}`)
-    this.code = 'ENOENT'
-    this.cache = cache
-    this.key = key
-  }
-}
-
-module.exports.compact = compact
-
-async function compact (cache, key, matchFn, opts = {}) {
-  const bucket = bucketPath(cache, key)
-  const entries = await bucketEntries(bucket)
-  const newEntries = []
-  // we loop backwards because the bottom-most result is the newest
-  // since we add new entries with appendFile
-  for (let i = entries.length - 1; i >= 0; --i) {
-    const entry = entries[i]
-    // a null integrity could mean either a delete was appended
-    // or the user has simply stored an index that does not map
-    // to any content. we determine if the user wants to keep the
-    // null integrity based on the validateEntry function passed in options.
-    // if the integrity is null and no validateEntry is provided, we break
-    // as we consider the null integrity to be a deletion of everything
-    // that came before it.
-    if (entry.integrity === null && !opts.validateEntry) {
-      break
-    }
-
-    // if this entry is valid, and it is either the first entry or
-    // the newEntries array doesn't already include an entry that
-    // matches this one based on the provided matchFn, then we add
-    // it to the beginning of our list
-    if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
-      (newEntries.length === 0 ||
-        !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
-      newEntries.unshift(entry)
-    }
-  }
-
-  const newIndex = '\n' + newEntries.map((entry) => {
-    const stringified = JSON.stringify(entry)
-    const hash = hashEntry(stringified)
-    return `${hash}\t${stringified}`
-  }).join('\n')
-
-  const setup = async () => {
-    const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
-    await mkdir(path.dirname(target), { recursive: true })
-    return {
-      target,
-      moved: false,
-    }
-  }
-
-  const teardown = async (tmp) => {
-    if (!tmp.moved) {
-      return rm(tmp.target, { recursive: true, force: true })
-    }
-  }
-
-  const write = async (tmp) => {
-    await writeFile(tmp.target, newIndex, { flag: 'wx' })
-    await mkdir(path.dirname(bucket), { recursive: true })
-    // we use @npmcli/move-file directly here because we
-    // want to overwrite the existing file
-    await moveFile(tmp.target, bucket)
-    tmp.moved = true
-  }
-
-  // write the file atomically
-  const tmp = await setup()
-  try {
-    await write(tmp)
-  } finally {
-    await teardown(tmp)
-  }
-
-  // we reverse the list we generated such that the newest
-  // entries come first in order to make looping through them easier
-  // the true passed to formatEntry tells it to keep null
-  // integrity values, if they made it this far it's because
-  // validateEntry returned true, and as such we should return it
-  return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
-}
-
-module.exports.insert = insert
-
-async function insert (cache, key, integrity, opts = {}) {
-  const { metadata, size, time } = opts
-  const bucket = bucketPath(cache, key)
-  const entry = {
-    key,
-    integrity: integrity && ssri.stringify(integrity),
-    time: time || Date.now(),
-    size,
-    metadata,
-  }
-  try {
-    await mkdir(path.dirname(bucket), { recursive: true })
-    const stringified = JSON.stringify(entry)
-    // NOTE - Cleverness ahoy!
-    //
-    // This works because it's tremendously unlikely for an entry to corrupt
-    // another while still preserving the string length of the JSON in
-    // question. So, we just slap the length in there and verify it on read.
-    //
-    // Thanks to @isaacs for the whiteboarding session that ended up with
-    // this.
-    await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return undefined
-    }
-
-    throw err
-  }
-  return formatEntry(cache, entry)
-}
-
-module.exports.find = find
-
-async function find (cache, key) {
-  const bucket = bucketPath(cache, key)
-  try {
-    const entries = await bucketEntries(bucket)
-    return entries.reduce((latest, next) => {
-      if (next && next.key === key) {
-        return formatEntry(cache, next)
-      } else {
-        return latest
-      }
-    }, null)
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return null
-    } else {
-      throw err
-    }
-  }
-}
-
-module.exports.delete = del
-
-function del (cache, key, opts = {}) {
-  if (!opts.removeFully) {
-    return insert(cache, key, null, opts)
-  }
-
-  const bucket = bucketPath(cache, key)
-  return rm(bucket, { recursive: true, force: true })
-}
-
-module.exports.lsStream = lsStream
-
-function lsStream (cache) {
-  const indexDir = bucketDir(cache)
-  const stream = new Minipass({ objectMode: true })
-
-  // Set all this up to run on the stream and then just return the stream
-  Promise.resolve().then(async () => {
-    const buckets = await readdirOrEmpty(indexDir)
-    await Promise.all(buckets.map(async (bucket) => {
-      const bucketPath = path.join(indexDir, bucket)
-      const subbuckets = await readdirOrEmpty(bucketPath)
-      await Promise.all(subbuckets.map(async (subbucket) => {
-        const subbucketPath = path.join(bucketPath, subbucket)
-
-        // "/cachename//./*"
-        const subbucketEntries = await readdirOrEmpty(subbucketPath)
-        await Promise.all(subbucketEntries.map(async (entry) => {
-          const entryPath = path.join(subbucketPath, entry)
-          try {
-            const entries = await bucketEntries(entryPath)
-            // using a Map here prevents duplicate keys from showing up
-            // twice, I guess?
-            const reduced = entries.reduce((acc, entry) => {
-              acc.set(entry.key, entry)
-              return acc
-            }, new Map())
-            // reduced is a map of key => entry
-            for (const entry of reduced.values()) {
-              const formatted = formatEntry(cache, entry)
-              if (formatted) {
-                stream.write(formatted)
-              }
-            }
-          } catch (err) {
-            if (err.code === 'ENOENT') {
-              return undefined
-            }
-            throw err
-          }
-        }))
-      }))
-    }))
-    stream.end()
-    return stream
-  }).catch(err => stream.emit('error', err))
-
-  return stream
-}
-
-module.exports.ls = ls
-
-async function ls (cache) {
-  const entries = await lsStream(cache).collect()
-  return entries.reduce((acc, xs) => {
-    acc[xs.key] = xs
-    return acc
-  }, {})
-}
-
-module.exports.bucketEntries = bucketEntries
-
-async function bucketEntries (bucket, filter) {
-  const data = await readFile(bucket, 'utf8')
-  return _bucketEntries(data, filter)
-}
-
-function _bucketEntries (data, filter) {
-  const entries = []
-  data.split('\n').forEach((entry) => {
-    if (!entry) {
-      return
-    }
-
-    const pieces = entry.split('\t')
-    if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
-      // Hash is no good! Corruption or malice? Doesn't matter!
-      // EJECT EJECT
-      return
-    }
-    let obj
-    try {
-      obj = JSON.parse(pieces[1])
-    } catch (_) {
-      // eslint-ignore-next-line no-empty-block
-    }
-    // coverage disabled here, no need to test with an entry that parses to something falsey
-    // istanbul ignore else
-    if (obj) {
-      entries.push(obj)
-    }
-  })
-  return entries
-}
-
-module.exports.bucketDir = bucketDir
-
-function bucketDir (cache) {
-  return path.join(cache, `index-v${indexV}`)
-}
-
-module.exports.bucketPath = bucketPath
-
-function bucketPath (cache, key) {
-  const hashed = hashKey(key)
-  return path.join.apply(
-    path,
-    [bucketDir(cache)].concat(hashToSegments(hashed))
-  )
-}
-
-module.exports.hashKey = hashKey
-
-function hashKey (key) {
-  return hash(key, 'sha256')
-}
-
-module.exports.hashEntry = hashEntry
-
-function hashEntry (str) {
-  return hash(str, 'sha1')
-}
-
-function hash (str, digest) {
-  return crypto
-    .createHash(digest)
-    .update(str)
-    .digest('hex')
-}
-
-function formatEntry (cache, entry, keepAll) {
-  // Treat null digests as deletions. They'll shadow any previous entries.
-  if (!entry.integrity && !keepAll) {
-    return null
-  }
-
-  return {
-    key: entry.key,
-    integrity: entry.integrity,
-    path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
-    size: entry.size,
-    time: entry.time,
-    metadata: entry.metadata,
-  }
-}
-
-function readdirOrEmpty (dir) {
-  return readdir(dir).catch((err) => {
-    if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
-      return []
-    }
-
-    throw err
-  })
-}
diff --git a/node_modules/npm-profile/node_modules/cacache/lib/get.js b/node_modules/npm-profile/node_modules/cacache/lib/get.js
deleted file mode 100644
index 80ec206c7ecaa..0000000000000
--- a/node_modules/npm-profile/node_modules/cacache/lib/get.js
+++ /dev/null
@@ -1,170 +0,0 @@
-'use strict'
-
-const Collect = require('minipass-collect')
-const { Minipass } = require('minipass')
-const Pipeline = require('minipass-pipeline')
-
-const index = require('./entry-index')
-const memo = require('./memoization')
-const read = require('./content/read')
-
-async function getData (cache, key, opts = {}) {
-  const { integrity, memoize, size } = opts
-  const memoized = memo.get(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return {
-      metadata: memoized.entry.metadata,
-      data: memoized.data,
-      integrity: memoized.entry.integrity,
-      size: memoized.entry.size,
-    }
-  }
-
-  const entry = await index.find(cache, key, opts)
-  if (!entry) {
-    throw new index.NotFoundError(cache, key)
-  }
-  const data = await read(cache, entry.integrity, { integrity, size })
-  if (memoize) {
-    memo.put(cache, entry, data, opts)
-  }
-
-  return {
-    data,
-    metadata: entry.metadata,
-    size: entry.size,
-    integrity: entry.integrity,
-  }
-}
-module.exports = getData
-
-async function getDataByDigest (cache, key, opts = {}) {
-  const { integrity, memoize, size } = opts
-  const memoized = memo.get.byDigest(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return memoized
-  }
-
-  const res = await read(cache, key, { integrity, size })
-  if (memoize) {
-    memo.put.byDigest(cache, key, res, opts)
-  }
-  return res
-}
-module.exports.byDigest = getDataByDigest
-
-const getMemoizedStream = (memoized) => {
-  const stream = new Minipass()
-  stream.on('newListener', function (ev, cb) {
-    ev === 'metadata' && cb(memoized.entry.metadata)
-    ev === 'integrity' && cb(memoized.entry.integrity)
-    ev === 'size' && cb(memoized.entry.size)
-  })
-  stream.end(memoized.data)
-  return stream
-}
-
-function getStream (cache, key, opts = {}) {
-  const { memoize, size } = opts
-  const memoized = memo.get(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return getMemoizedStream(memoized)
-  }
-
-  const stream = new Pipeline()
-  // Set all this up to run on the stream and then just return the stream
-  Promise.resolve().then(async () => {
-    const entry = await index.find(cache, key)
-    if (!entry) {
-      throw new index.NotFoundError(cache, key)
-    }
-
-    stream.emit('metadata', entry.metadata)
-    stream.emit('integrity', entry.integrity)
-    stream.emit('size', entry.size)
-    stream.on('newListener', function (ev, cb) {
-      ev === 'metadata' && cb(entry.metadata)
-      ev === 'integrity' && cb(entry.integrity)
-      ev === 'size' && cb(entry.size)
-    })
-
-    const src = read.readStream(
-      cache,
-      entry.integrity,
-      { ...opts, size: typeof size !== 'number' ? entry.size : size }
-    )
-
-    if (memoize) {
-      const memoStream = new Collect.PassThrough()
-      memoStream.on('collect', data => memo.put(cache, entry, data, opts))
-      stream.unshift(memoStream)
-    }
-    stream.unshift(src)
-    return stream
-  }).catch((err) => stream.emit('error', err))
-
-  return stream
-}
-
-module.exports.stream = getStream
-
-function getStreamDigest (cache, integrity, opts = {}) {
-  const { memoize } = opts
-  const memoized = memo.get.byDigest(cache, integrity, opts)
-  if (memoized && memoize !== false) {
-    const stream = new Minipass()
-    stream.end(memoized)
-    return stream
-  } else {
-    const stream = read.readStream(cache, integrity, opts)
-    if (!memoize) {
-      return stream
-    }
-
-    const memoStream = new Collect.PassThrough()
-    memoStream.on('collect', data => memo.put.byDigest(
-      cache,
-      integrity,
-      data,
-      opts
-    ))
-    return new Pipeline(stream, memoStream)
-  }
-}
-
-module.exports.stream.byDigest = getStreamDigest
-
-function info (cache, key, opts = {}) {
-  const { memoize } = opts
-  const memoized = memo.get(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return Promise.resolve(memoized.entry)
-  } else {
-    return index.find(cache, key)
-  }
-}
-module.exports.info = info
-
-async function copy (cache, key, dest, opts = {}) {
-  const entry = await index.find(cache, key, opts)
-  if (!entry) {
-    throw new index.NotFoundError(cache, key)
-  }
-  await read.copy(cache, entry.integrity, dest, opts)
-  return {
-    metadata: entry.metadata,
-    size: entry.size,
-    integrity: entry.integrity,
-  }
-}
-
-module.exports.copy = copy
-
-async function copyByDigest (cache, key, dest, opts = {}) {
-  await read.copy(cache, key, dest, opts)
-  return key
-}
-
-module.exports.copy.byDigest = copyByDigest
-
-module.exports.hasContent = read.hasContent
diff --git a/node_modules/npm-profile/node_modules/cacache/lib/index.js b/node_modules/npm-profile/node_modules/cacache/lib/index.js
deleted file mode 100644
index c9b0da5f3a271..0000000000000
--- a/node_modules/npm-profile/node_modules/cacache/lib/index.js
+++ /dev/null
@@ -1,42 +0,0 @@
-'use strict'
-
-const get = require('./get.js')
-const put = require('./put.js')
-const rm = require('./rm.js')
-const verify = require('./verify.js')
-const { clearMemoized } = require('./memoization.js')
-const tmp = require('./util/tmp.js')
-const index = require('./entry-index.js')
-
-module.exports.index = {}
-module.exports.index.compact = index.compact
-module.exports.index.insert = index.insert
-
-module.exports.ls = index.ls
-module.exports.ls.stream = index.lsStream
-
-module.exports.get = get
-module.exports.get.byDigest = get.byDigest
-module.exports.get.stream = get.stream
-module.exports.get.stream.byDigest = get.stream.byDigest
-module.exports.get.copy = get.copy
-module.exports.get.copy.byDigest = get.copy.byDigest
-module.exports.get.info = get.info
-module.exports.get.hasContent = get.hasContent
-
-module.exports.put = put
-module.exports.put.stream = put.stream
-
-module.exports.rm = rm.entry
-module.exports.rm.all = rm.all
-module.exports.rm.entry = module.exports.rm
-module.exports.rm.content = rm.content
-
-module.exports.clearMemoized = clearMemoized
-
-module.exports.tmp = {}
-module.exports.tmp.mkdir = tmp.mkdir
-module.exports.tmp.withTmp = tmp.withTmp
-
-module.exports.verify = verify
-module.exports.verify.lastRun = verify.lastRun
diff --git a/node_modules/npm-profile/node_modules/cacache/lib/memoization.js b/node_modules/npm-profile/node_modules/cacache/lib/memoization.js
deleted file mode 100644
index 0ff604a479c9c..0000000000000
--- a/node_modules/npm-profile/node_modules/cacache/lib/memoization.js
+++ /dev/null
@@ -1,72 +0,0 @@
-'use strict'
-
-const LRU = require('lru-cache')
-
-const MEMOIZED = new LRU({
-  max: 500,
-  maxSize: 50 * 1024 * 1024, // 50MB
-  ttl: 3 * 60 * 1000, // 3 minutes
-  sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
-})
-
-module.exports.clearMemoized = clearMemoized
-
-function clearMemoized () {
-  const old = {}
-  MEMOIZED.forEach((v, k) => {
-    old[k] = v
-  })
-  MEMOIZED.clear()
-  return old
-}
-
-module.exports.put = put
-
-function put (cache, entry, data, opts) {
-  pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
-  putDigest(cache, entry.integrity, data, opts)
-}
-
-module.exports.put.byDigest = putDigest
-
-function putDigest (cache, integrity, data, opts) {
-  pickMem(opts).set(`digest:${cache}:${integrity}`, data)
-}
-
-module.exports.get = get
-
-function get (cache, key, opts) {
-  return pickMem(opts).get(`key:${cache}:${key}`)
-}
-
-module.exports.get.byDigest = getDigest
-
-function getDigest (cache, integrity, opts) {
-  return pickMem(opts).get(`digest:${cache}:${integrity}`)
-}
-
-class ObjProxy {
-  constructor (obj) {
-    this.obj = obj
-  }
-
-  get (key) {
-    return this.obj[key]
-  }
-
-  set (key, val) {
-    this.obj[key] = val
-  }
-}
-
-function pickMem (opts) {
-  if (!opts || !opts.memoize) {
-    return MEMOIZED
-  } else if (opts.memoize.get && opts.memoize.set) {
-    return opts.memoize
-  } else if (typeof opts.memoize === 'object') {
-    return new ObjProxy(opts.memoize)
-  } else {
-    return MEMOIZED
-  }
-}
diff --git a/node_modules/npm-profile/node_modules/cacache/lib/put.js b/node_modules/npm-profile/node_modules/cacache/lib/put.js
deleted file mode 100644
index 9fc932d5f6dec..0000000000000
--- a/node_modules/npm-profile/node_modules/cacache/lib/put.js
+++ /dev/null
@@ -1,80 +0,0 @@
-'use strict'
-
-const index = require('./entry-index')
-const memo = require('./memoization')
-const write = require('./content/write')
-const Flush = require('minipass-flush')
-const { PassThrough } = require('minipass-collect')
-const Pipeline = require('minipass-pipeline')
-
-const putOpts = (opts) => ({
-  algorithms: ['sha512'],
-  ...opts,
-})
-
-module.exports = putData
-
-async function putData (cache, key, data, opts = {}) {
-  const { memoize } = opts
-  opts = putOpts(opts)
-  const res = await write(cache, data, opts)
-  const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
-  if (memoize) {
-    memo.put(cache, entry, data, opts)
-  }
-
-  return res.integrity
-}
-
-module.exports.stream = putStream
-
-function putStream (cache, key, opts = {}) {
-  const { memoize } = opts
-  opts = putOpts(opts)
-  let integrity
-  let size
-  let error
-
-  let memoData
-  const pipeline = new Pipeline()
-  // first item in the pipeline is the memoizer, because we need
-  // that to end first and get the collected data.
-  if (memoize) {
-    const memoizer = new PassThrough().on('collect', data => {
-      memoData = data
-    })
-    pipeline.push(memoizer)
-  }
-
-  // contentStream is a write-only, not a passthrough
-  // no data comes out of it.
-  const contentStream = write.stream(cache, opts)
-    .on('integrity', (int) => {
-      integrity = int
-    })
-    .on('size', (s) => {
-      size = s
-    })
-    .on('error', (err) => {
-      error = err
-    })
-
-  pipeline.push(contentStream)
-
-  // last but not least, we write the index and emit hash and size,
-  // and memoize if we're doing that
-  pipeline.push(new Flush({
-    async flush () {
-      if (!error) {
-        const entry = await index.insert(cache, key, integrity, { ...opts, size })
-        if (memoize && memoData) {
-          memo.put(cache, entry, memoData, opts)
-        }
-        pipeline.emit('integrity', integrity)
-        pipeline.emit('size', size)
-      }
-    },
-  }))
-
-  return pipeline
-}
diff --git a/node_modules/npm-profile/node_modules/cacache/lib/rm.js b/node_modules/npm-profile/node_modules/cacache/lib/rm.js
deleted file mode 100644
index a94760c7cf243..0000000000000
--- a/node_modules/npm-profile/node_modules/cacache/lib/rm.js
+++ /dev/null
@@ -1,31 +0,0 @@
-'use strict'
-
-const { rm } = require('fs/promises')
-const glob = require('./util/glob.js')
-const index = require('./entry-index')
-const memo = require('./memoization')
-const path = require('path')
-const rmContent = require('./content/rm')
-
-module.exports = entry
-module.exports.entry = entry
-
-function entry (cache, key, opts) {
-  memo.clearMemoized()
-  return index.delete(cache, key, opts)
-}
-
-module.exports.content = content
-
-function content (cache, integrity) {
-  memo.clearMemoized()
-  return rmContent(cache, integrity)
-}
-
-module.exports.all = all
-
-async function all (cache) {
-  memo.clearMemoized()
-  const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
-  return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
-}
diff --git a/node_modules/npm-profile/node_modules/cacache/lib/util/glob.js b/node_modules/npm-profile/node_modules/cacache/lib/util/glob.js
deleted file mode 100644
index 8500c1c16a429..0000000000000
--- a/node_modules/npm-profile/node_modules/cacache/lib/util/glob.js
+++ /dev/null
@@ -1,7 +0,0 @@
-'use strict'
-
-const { glob } = require('glob')
-const path = require('path')
-
-const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep)
-module.exports = (path, options) => glob(globify(path), options)
diff --git a/node_modules/npm-profile/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/npm-profile/node_modules/cacache/lib/util/hash-to-segments.js
deleted file mode 100644
index 445599b503808..0000000000000
--- a/node_modules/npm-profile/node_modules/cacache/lib/util/hash-to-segments.js
+++ /dev/null
@@ -1,7 +0,0 @@
-'use strict'
-
-module.exports = hashToSegments
-
-function hashToSegments (hash) {
-  return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
-}
diff --git a/node_modules/npm-profile/node_modules/cacache/lib/util/tmp.js b/node_modules/npm-profile/node_modules/cacache/lib/util/tmp.js
deleted file mode 100644
index 0bf5302136ebe..0000000000000
--- a/node_modules/npm-profile/node_modules/cacache/lib/util/tmp.js
+++ /dev/null
@@ -1,26 +0,0 @@
-'use strict'
-
-const { withTempDir } = require('@npmcli/fs')
-const fs = require('fs/promises')
-const path = require('path')
-
-module.exports.mkdir = mktmpdir
-
-async function mktmpdir (cache, opts = {}) {
-  const { tmpPrefix } = opts
-  const tmpDir = path.join(cache, 'tmp')
-  await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
-  // do not use path.join(), it drops the trailing / if tmpPrefix is unset
-  const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
-  return fs.mkdtemp(target, { owner: 'inherit' })
-}
-
-module.exports.withTmp = withTmp
-
-function withTmp (cache, opts, cb) {
-  if (!cb) {
-    cb = opts
-    opts = {}
-  }
-  return withTempDir(path.join(cache, 'tmp'), cb, opts)
-}
diff --git a/node_modules/npm-profile/node_modules/cacache/lib/verify.js b/node_modules/npm-profile/node_modules/cacache/lib/verify.js
deleted file mode 100644
index 62e85c946490f..0000000000000
--- a/node_modules/npm-profile/node_modules/cacache/lib/verify.js
+++ /dev/null
@@ -1,257 +0,0 @@
-'use strict'
-
-const {
-  mkdir,
-  readFile,
-  rm,
-  stat,
-  truncate,
-  writeFile,
-} = require('fs/promises')
-const pMap = require('p-map')
-const contentPath = require('./content/path')
-const fsm = require('fs-minipass')
-const glob = require('./util/glob.js')
-const index = require('./entry-index')
-const path = require('path')
-const ssri = require('ssri')
-
-const hasOwnProperty = (obj, key) =>
-  Object.prototype.hasOwnProperty.call(obj, key)
-
-const verifyOpts = (opts) => ({
-  concurrency: 20,
-  log: { silly () {} },
-  ...opts,
-})
-
-module.exports = verify
-
-async function verify (cache, opts) {
-  opts = verifyOpts(opts)
-  opts.log.silly('verify', 'verifying cache at', cache)
-
-  const steps = [
-    markStartTime,
-    fixPerms,
-    garbageCollect,
-    rebuildIndex,
-    cleanTmp,
-    writeVerifile,
-    markEndTime,
-  ]
-
-  const stats = {}
-  for (const step of steps) {
-    const label = step.name
-    const start = new Date()
-    const s = await step(cache, opts)
-    if (s) {
-      Object.keys(s).forEach((k) => {
-        stats[k] = s[k]
-      })
-    }
-    const end = new Date()
-    if (!stats.runTime) {
-      stats.runTime = {}
-    }
-    stats.runTime[label] = end - start
-  }
-  stats.runTime.total = stats.endTime - stats.startTime
-  opts.log.silly(
-    'verify',
-    'verification finished for',
-    cache,
-    'in',
-    `${stats.runTime.total}ms`
-  )
-  return stats
-}
-
-async function markStartTime (cache, opts) {
-  return { startTime: new Date() }
-}
-
-async function markEndTime (cache, opts) {
-  return { endTime: new Date() }
-}
-
-async function fixPerms (cache, opts) {
-  opts.log.silly('verify', 'fixing cache permissions')
-  await mkdir(cache, { recursive: true })
-  return null
-}
-
-// Implements a naive mark-and-sweep tracing garbage collector.
-//
-// The algorithm is basically as follows:
-// 1. Read (and filter) all index entries ("pointers")
-// 2. Mark each integrity value as "live"
-// 3. Read entire filesystem tree in `content-vX/` dir
-// 4. If content is live, verify its checksum and delete it if it fails
-// 5. If content is not marked as live, rm it.
-//
-async function garbageCollect (cache, opts) {
-  opts.log.silly('verify', 'garbage collecting content')
-  const indexStream = index.lsStream(cache)
-  const liveContent = new Set()
-  indexStream.on('data', (entry) => {
-    if (opts.filter && !opts.filter(entry)) {
-      return
-    }
-
-    // integrity is stringified, re-parse it so we can get each hash
-    const integrity = ssri.parse(entry.integrity)
-    for (const algo in integrity) {
-      liveContent.add(integrity[algo].toString())
-    }
-  })
-  await new Promise((resolve, reject) => {
-    indexStream.on('end', resolve).on('error', reject)
-  })
-  const contentDir = contentPath.contentDir(cache)
-  const files = await glob(path.join(contentDir, '**'), {
-    follow: false,
-    nodir: true,
-    nosort: true,
-  })
-  const stats = {
-    verifiedContent: 0,
-    reclaimedCount: 0,
-    reclaimedSize: 0,
-    badContentCount: 0,
-    keptSize: 0,
-  }
-  await pMap(
-    files,
-    async (f) => {
-      const split = f.split(/[/\\]/)
-      const digest = split.slice(split.length - 3).join('')
-      const algo = split[split.length - 4]
-      const integrity = ssri.fromHex(digest, algo)
-      if (liveContent.has(integrity.toString())) {
-        const info = await verifyContent(f, integrity)
-        if (!info.valid) {
-          stats.reclaimedCount++
-          stats.badContentCount++
-          stats.reclaimedSize += info.size
-        } else {
-          stats.verifiedContent++
-          stats.keptSize += info.size
-        }
-      } else {
-        // No entries refer to this content. We can delete.
-        stats.reclaimedCount++
-        const s = await stat(f)
-        await rm(f, { recursive: true, force: true })
-        stats.reclaimedSize += s.size
-      }
-      return stats
-    },
-    { concurrency: opts.concurrency }
-  )
-  return stats
-}
-
-async function verifyContent (filepath, sri) {
-  const contentInfo = {}
-  try {
-    const { size } = await stat(filepath)
-    contentInfo.size = size
-    contentInfo.valid = true
-    await ssri.checkStream(new fsm.ReadStream(filepath), sri)
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return { size: 0, valid: false }
-    }
-    if (err.code !== 'EINTEGRITY') {
-      throw err
-    }
-
-    await rm(filepath, { recursive: true, force: true })
-    contentInfo.valid = false
-  }
-  return contentInfo
-}
-
-async function rebuildIndex (cache, opts) {
-  opts.log.silly('verify', 'rebuilding index')
-  const entries = await index.ls(cache)
-  const stats = {
-    missingContent: 0,
-    rejectedEntries: 0,
-    totalEntries: 0,
-  }
-  const buckets = {}
-  for (const k in entries) {
-    /* istanbul ignore else */
-    if (hasOwnProperty(entries, k)) {
-      const hashed = index.hashKey(k)
-      const entry = entries[k]
-      const excluded = opts.filter && !opts.filter(entry)
-      excluded && stats.rejectedEntries++
-      if (buckets[hashed] && !excluded) {
-        buckets[hashed].push(entry)
-      } else if (buckets[hashed] && excluded) {
-        // skip
-      } else if (excluded) {
-        buckets[hashed] = []
-        buckets[hashed]._path = index.bucketPath(cache, k)
-      } else {
-        buckets[hashed] = [entry]
-        buckets[hashed]._path = index.bucketPath(cache, k)
-      }
-    }
-  }
-  await pMap(
-    Object.keys(buckets),
-    (key) => {
-      return rebuildBucket(cache, buckets[key], stats, opts)
-    },
-    { concurrency: opts.concurrency }
-  )
-  return stats
-}
-
-async function rebuildBucket (cache, bucket, stats, opts) {
-  await truncate(bucket._path)
-  // This needs to be serialized because cacache explicitly
-  // lets very racy bucket conflicts clobber each other.
-  for (const entry of bucket) {
-    const content = contentPath(cache, entry.integrity)
-    try {
-      await stat(content)
-      await index.insert(cache, entry.key, entry.integrity, {
-        metadata: entry.metadata,
-        size: entry.size,
-        time: entry.time,
-      })
-      stats.totalEntries++
-    } catch (err) {
-      if (err.code === 'ENOENT') {
-        stats.rejectedEntries++
-        stats.missingContent++
-      } else {
-        throw err
-      }
-    }
-  }
-}
-
-function cleanTmp (cache, opts) {
-  opts.log.silly('verify', 'cleaning tmp directory')
-  return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
-}
-
-async function writeVerifile (cache, opts) {
-  const verifile = path.join(cache, '_lastverified')
-  opts.log.silly('verify', 'writing verifile to ' + verifile)
-  return writeFile(verifile, `${Date.now()}`)
-}
-
-module.exports.lastRun = lastRun
-
-async function lastRun (cache) {
-  const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
-  return new Date(+data)
-}
diff --git a/node_modules/npm-profile/node_modules/cacache/package.json b/node_modules/npm-profile/node_modules/cacache/package.json
deleted file mode 100644
index ab58cb8b7c50f..0000000000000
--- a/node_modules/npm-profile/node_modules/cacache/package.json
+++ /dev/null
@@ -1,82 +0,0 @@
-{
-  "name": "cacache",
-  "version": "17.1.4",
-  "cache-version": {
-    "content": "2",
-    "index": "5"
-  },
-  "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
-  "main": "lib/index.js",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "coverage": "tap",
-    "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
-    "lint": "eslint \"**/*.js\"",
-    "npmclilint": "npmcli-lint",
-    "lintfix": "npm run lint -- --fix",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "posttest": "npm run lint",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/cacache.git"
-  },
-  "keywords": [
-    "cache",
-    "caching",
-    "content-addressable",
-    "sri",
-    "sri hash",
-    "subresource integrity",
-    "cache",
-    "storage",
-    "store",
-    "file store",
-    "filesystem",
-    "disk cache",
-    "disk storage"
-  ],
-  "license": "ISC",
-  "dependencies": {
-    "@npmcli/fs": "^3.1.0",
-    "fs-minipass": "^3.0.0",
-    "glob": "^10.2.2",
-    "lru-cache": "^7.7.1",
-    "minipass": "^7.0.3",
-    "minipass-collect": "^1.0.2",
-    "minipass-flush": "^1.0.5",
-    "minipass-pipeline": "^1.2.4",
-    "p-map": "^4.0.0",
-    "ssri": "^10.0.0",
-    "tar": "^6.1.11",
-    "unique-filename": "^3.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.18.0",
-    "tap": "^16.0.0"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "windowsCI": false,
-    "version": "4.18.0",
-    "publish": "true"
-  },
-  "author": "GitHub Inc.",
-  "tap": {
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  }
-}
diff --git a/node_modules/npm-profile/node_modules/hosted-git-info/LICENSE b/node_modules/npm-profile/node_modules/hosted-git-info/LICENSE
deleted file mode 100644
index 45055763dc838..0000000000000
--- a/node_modules/npm-profile/node_modules/hosted-git-info/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright (c) 2015, Rebecca Turner
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-profile/node_modules/hosted-git-info/lib/from-url.js b/node_modules/npm-profile/node_modules/hosted-git-info/lib/from-url.js
deleted file mode 100644
index efc1247d59d12..0000000000000
--- a/node_modules/npm-profile/node_modules/hosted-git-info/lib/from-url.js
+++ /dev/null
@@ -1,122 +0,0 @@
-'use strict'
-
-const parseUrl = require('./parse-url')
-
-// look for github shorthand inputs, such as npm/cli
-const isGitHubShorthand = (arg) => {
-  // it cannot contain whitespace before the first #
-  // it cannot start with a / because that's probably an absolute file path
-  // but it must include a slash since repos are username/repository
-  // it cannot start with a . because that's probably a relative file path
-  // it cannot start with an @ because that's a scoped package if it passes the other tests
-  // it cannot contain a : before a # because that tells us that there's a protocol
-  // a second / may not exist before a #
-  const firstHash = arg.indexOf('#')
-  const firstSlash = arg.indexOf('/')
-  const secondSlash = arg.indexOf('/', firstSlash + 1)
-  const firstColon = arg.indexOf(':')
-  const firstSpace = /\s/.exec(arg)
-  const firstAt = arg.indexOf('@')
-
-  const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash)
-  const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash)
-  const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash)
-  const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash)
-  const hasSlash = firstSlash > 0
-  // if a # is found, what we really want to know is that the character
-  // immediately before # is not a /
-  const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/')
-  const doesNotStartWithDot = !arg.startsWith('.')
-
-  return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash &&
-    doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash &&
-    secondSlashOnlyAfterHash
-}
-
-module.exports = (giturl, opts, { gitHosts, protocols }) => {
-  if (!giturl) {
-    return
-  }
-
-  const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl
-  const parsed = parseUrl(correctedUrl, protocols)
-  if (!parsed) {
-    return
-  }
-
-  const gitHostShortcut = gitHosts.byShortcut[parsed.protocol]
-  const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.')
-    ? parsed.hostname.slice(4)
-    : parsed.hostname]
-  const gitHostName = gitHostShortcut || gitHostDomain
-  if (!gitHostName) {
-    return
-  }
-
-  const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain]
-  let auth = null
-  if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) {
-    auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}`
-  }
-
-  let committish = null
-  let user = null
-  let project = null
-  let defaultRepresentation = null
-
-  try {
-    if (gitHostShortcut) {
-      let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname
-      const firstAt = pathname.indexOf('@')
-      // we ignore auth for shortcuts, so just trim it out
-      if (firstAt > -1) {
-        pathname = pathname.slice(firstAt + 1)
-      }
-
-      const lastSlash = pathname.lastIndexOf('/')
-      if (lastSlash > -1) {
-        user = decodeURIComponent(pathname.slice(0, lastSlash))
-        // we want nulls only, never empty strings
-        if (!user) {
-          user = null
-        }
-        project = decodeURIComponent(pathname.slice(lastSlash + 1))
-      } else {
-        project = decodeURIComponent(pathname)
-      }
-
-      if (project.endsWith('.git')) {
-        project = project.slice(0, -4)
-      }
-
-      if (parsed.hash) {
-        committish = decodeURIComponent(parsed.hash.slice(1))
-      }
-
-      defaultRepresentation = 'shortcut'
-    } else {
-      if (!gitHostInfo.protocols.includes(parsed.protocol)) {
-        return
-      }
-
-      const segments = gitHostInfo.extract(parsed)
-      if (!segments) {
-        return
-      }
-
-      user = segments.user && decodeURIComponent(segments.user)
-      project = decodeURIComponent(segments.project)
-      committish = decodeURIComponent(segments.committish)
-      defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1)
-    }
-  } catch (err) {
-    /* istanbul ignore else */
-    if (err instanceof URIError) {
-      return
-    } else {
-      throw err
-    }
-  }
-
-  return [gitHostName, user, auth, project, committish, defaultRepresentation, opts]
-}
diff --git a/node_modules/npm-profile/node_modules/hosted-git-info/lib/hosts.js b/node_modules/npm-profile/node_modules/hosted-git-info/lib/hosts.js
deleted file mode 100644
index 013712b7842c8..0000000000000
--- a/node_modules/npm-profile/node_modules/hosted-git-info/lib/hosts.js
+++ /dev/null
@@ -1,228 +0,0 @@
-/* eslint-disable max-len */
-
-'use strict'
-
-const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
-const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
-const formatHashFragment = (f) => f.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-')
-
-const defaults = {
-  sshtemplate: ({ domain, user, project, committish }) =>
-    `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, user, project, committish }) =>
-    `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath, path }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`,
-  browsetemplate: ({ domain, user, project, committish, treepath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  docstemplate: ({ domain, user, project, treepath, committish }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`,
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`,
-  shortcuttemplate: ({ type, user, project, committish }) =>
-    `${type}:${user}/${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ user, project, committish }) =>
-    `${user}/${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, user, project }) =>
-    `https://${domain}/${user}/${project}/issues`,
-  hashformat: formatHashFragment,
-}
-
-const hosts = {}
-hosts.github = {
-  // First two are insecure and generally shouldn't be used any more, but
-  // they are still supported.
-  protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'github.com',
-  treepath: 'tree',
-  blobpath: 'blob',
-  editpath: 'edit',
-  filetemplate: ({ auth, user, project, committish, path }) =>
-    `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`,
-  gittemplate: ({ auth, domain, user, project, committish }) =>
-    `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, type, committish] = url.pathname.split('/', 5)
-    if (type && type !== 'tree') {
-      return
-    }
-
-    if (!type) {
-      committish = url.hash.slice(1)
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish }
-  },
-}
-
-hosts.bitbucket = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'bitbucket.org',
-  treepath: 'src',
-  blobpath: 'src',
-  editpath: '?mode=edit',
-  edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (['get'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gitlab = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gitlab.com',
-  treepath: 'tree',
-  blobpath: 'tree',
-  editpath: '-/edit',
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    const path = url.pathname.slice(1)
-    if (path.includes('/-/') || path.includes('/archive.tar.gz')) {
-      return
-    }
-
-    const segments = path.split('/')
-    let project = segments.pop()
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    const user = segments.join('/')
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gist = {
-  protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gist.github.com',
-  editpath: 'edit',
-  sshtemplate: ({ domain, project, committish }) =>
-    `git@${domain}:${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, project, committish }) =>
-    `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`,
-  browsetemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  browseblobtemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  docstemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  httpstemplate: ({ domain, project, committish }) =>
-    `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ user, project, committish, path }) =>
-    `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`,
-  shortcuttemplate: ({ type, project, committish }) =>
-    `${type}:${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ project, committish }) =>
-    `${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, project }) =>
-    `https://${domain}/${project}`,
-  gittemplate: ({ domain, project, committish }) =>
-    `git://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ project, committish }) =>
-    `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (aux === 'raw') {
-      return
-    }
-
-    if (!project) {
-      if (!user) {
-        return
-      }
-
-      project = user
-      user = null
-    }
-
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-  hashformat: function (fragment) {
-    return fragment && 'file-' + formatHashFragment(fragment)
-  },
-}
-
-hosts.sourcehut = {
-  protocols: ['git+ssh:', 'https:'],
-  domain: 'git.sr.ht',
-  treepath: 'tree',
-  blobpath: 'tree',
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`,
-  httpstemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`,
-  bugstemplate: ({ user, project }) =>
-    `https://todo.sr.ht/${user}/${project}`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-
-    // tarball url
-    if (['archive'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  hosts[name] = Object.assign({}, defaults, host)
-}
-
-module.exports = hosts
diff --git a/node_modules/npm-profile/node_modules/hosted-git-info/lib/index.js b/node_modules/npm-profile/node_modules/hosted-git-info/lib/index.js
deleted file mode 100644
index a7339c217e9a3..0000000000000
--- a/node_modules/npm-profile/node_modules/hosted-git-info/lib/index.js
+++ /dev/null
@@ -1,179 +0,0 @@
-'use strict'
-
-const LRU = require('lru-cache')
-const hosts = require('./hosts.js')
-const fromUrl = require('./from-url.js')
-const parseUrl = require('./parse-url.js')
-
-const cache = new LRU({ max: 1000 })
-
-class GitHost {
-  constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
-    Object.assign(this, GitHost.#gitHosts[type], {
-      type,
-      user,
-      auth,
-      project,
-      committish,
-      default: defaultRepresentation,
-      opts,
-    })
-  }
-
-  static #gitHosts = { byShortcut: {}, byDomain: {} }
-  static #protocols = {
-    'git+ssh:': { name: 'sshurl' },
-    'ssh:': { name: 'sshurl' },
-    'git+https:': { name: 'https', auth: true },
-    'git:': { auth: true },
-    'http:': { auth: true },
-    'https:': { auth: true },
-    'git+http:': { auth: true },
-  }
-
-  static addHost (name, host) {
-    GitHost.#gitHosts[name] = host
-    GitHost.#gitHosts.byDomain[host.domain] = name
-    GitHost.#gitHosts.byShortcut[`${name}:`] = name
-    GitHost.#protocols[`${name}:`] = { name }
-  }
-
-  static fromUrl (giturl, opts) {
-    if (typeof giturl !== 'string') {
-      return
-    }
-
-    const key = giturl + JSON.stringify(opts || {})
-
-    if (!cache.has(key)) {
-      const hostArgs = fromUrl(giturl, opts, {
-        gitHosts: GitHost.#gitHosts,
-        protocols: GitHost.#protocols,
-      })
-      cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined)
-    }
-
-    return cache.get(key)
-  }
-
-  static parseUrl (url) {
-    return parseUrl(url)
-  }
-
-  #fill (template, opts) {
-    if (typeof template !== 'function') {
-      return null
-    }
-
-    const options = { ...this, ...this.opts, ...opts }
-
-    // the path should always be set so we don't end up with 'undefined' in urls
-    if (!options.path) {
-      options.path = ''
-    }
-
-    // template functions will insert the leading slash themselves
-    if (options.path.startsWith('/')) {
-      options.path = options.path.slice(1)
-    }
-
-    if (options.noCommittish) {
-      options.committish = null
-    }
-
-    const result = template(options)
-    return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result
-  }
-
-  hash () {
-    return this.committish ? `#${this.committish}` : ''
-  }
-
-  ssh (opts) {
-    return this.#fill(this.sshtemplate, opts)
-  }
-
-  sshurl (opts) {
-    return this.#fill(this.sshurltemplate, opts)
-  }
-
-  browse (path, ...args) {
-    // not a string, treat path as opts
-    if (typeof path !== 'string') {
-      return this.#fill(this.browsetemplate, path)
-    }
-
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browsetreetemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  // If the path is known to be a file, then browseFile should be used. For some hosts
-  // the url is the same as browse, but for others like GitHub a file can use both `/tree/`
-  // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/`
-  // path will redirect to a specific commit. Using the `/blob/` path avoids this and
-  // does not redirect to a different commit.
-  browseFile (path, ...args) {
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browseblobtemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  docs (opts) {
-    return this.#fill(this.docstemplate, opts)
-  }
-
-  bugs (opts) {
-    return this.#fill(this.bugstemplate, opts)
-  }
-
-  https (opts) {
-    return this.#fill(this.httpstemplate, opts)
-  }
-
-  git (opts) {
-    return this.#fill(this.gittemplate, opts)
-  }
-
-  shortcut (opts) {
-    return this.#fill(this.shortcuttemplate, opts)
-  }
-
-  path (opts) {
-    return this.#fill(this.pathtemplate, opts)
-  }
-
-  tarball (opts) {
-    return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false })
-  }
-
-  file (path, opts) {
-    return this.#fill(this.filetemplate, { ...opts, path })
-  }
-
-  edit (path, opts) {
-    return this.#fill(this.edittemplate, { ...opts, path })
-  }
-
-  getDefaultRepresentation () {
-    return this.default
-  }
-
-  toString (opts) {
-    if (this.default && typeof this[this.default] === 'function') {
-      return this[this.default](opts)
-    }
-
-    return this.sshurl(opts)
-  }
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  GitHost.addHost(name, host)
-}
-
-module.exports = GitHost
diff --git a/node_modules/npm-profile/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/npm-profile/node_modules/hosted-git-info/lib/parse-url.js
deleted file mode 100644
index 7d5489c008ab4..0000000000000
--- a/node_modules/npm-profile/node_modules/hosted-git-info/lib/parse-url.js
+++ /dev/null
@@ -1,78 +0,0 @@
-const url = require('url')
-
-const lastIndexOfBefore = (str, char, beforeChar) => {
-  const startPosition = str.indexOf(beforeChar)
-  return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity)
-}
-
-const safeUrl = (u) => {
-  try {
-    return new url.URL(u)
-  } catch {
-    // this fn should never throw
-  }
-}
-
-// accepts input like git:github.com:user/repo and inserts the // after the first :
-const correctProtocol = (arg, protocols) => {
-  const firstColon = arg.indexOf(':')
-  const proto = arg.slice(0, firstColon + 1)
-  if (Object.prototype.hasOwnProperty.call(protocols, proto)) {
-    return arg
-  }
-
-  const firstAt = arg.indexOf('@')
-  if (firstAt > -1) {
-    if (firstAt > firstColon) {
-      return `git+ssh://${arg}`
-    } else {
-      return arg
-    }
-  }
-
-  const doubleSlash = arg.indexOf('//')
-  if (doubleSlash === firstColon + 1) {
-    return arg
-  }
-
-  return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}`
-}
-
-// attempt to correct an scp style url so that it will parse with `new URL()`
-const correctUrl = (giturl) => {
-  // ignore @ that come after the first hash since the denotes the start
-  // of a committish which can contain @ characters
-  const firstAt = lastIndexOfBefore(giturl, '@', '#')
-  // ignore colons that come after the hash since that could include colons such as:
-  // git@github.com:user/package-2#semver:^1.0.0
-  const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#')
-
-  if (lastColonBeforeHash > firstAt) {
-    // the last : comes after the first @ (or there is no @)
-    // like it would in:
-    // proto://hostname.com:user/repo
-    // username@hostname.com:user/repo
-    // :password@hostname.com:user/repo
-    // username:password@hostname.com:user/repo
-    // proto://username@hostname.com:user/repo
-    // proto://:password@hostname.com:user/repo
-    // proto://username:password@hostname.com:user/repo
-    // then we replace the last : with a / to create a valid path
-    giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1)
-  }
-
-  if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) {
-    // we have no : at all
-    // as it would be in:
-    // username@hostname.com/user/repo
-    // then we prepend a protocol
-    giturl = `git+ssh://${giturl}`
-  }
-
-  return giturl
-}
-
-module.exports = (giturl, protocols) => {
-  const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl
-  return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol))
-}
diff --git a/node_modules/npm-profile/node_modules/hosted-git-info/package.json b/node_modules/npm-profile/node_modules/hosted-git-info/package.json
deleted file mode 100644
index 612259948afe7..0000000000000
--- a/node_modules/npm-profile/node_modules/hosted-git-info/package.json
+++ /dev/null
@@ -1,59 +0,0 @@
-{
-  "name": "hosted-git-info",
-  "version": "6.1.1",
-  "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
-  "main": "./lib/index.js",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/hosted-git-info.git"
-  },
-  "keywords": [
-    "git",
-    "github",
-    "bitbucket",
-    "gitlab"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/hosted-git-info/issues"
-  },
-  "homepage": "https://github.com/npm/hosted-git-info",
-  "scripts": {
-    "posttest": "npm run lint",
-    "snap": "tap",
-    "test": "tap",
-    "test:coverage": "tap --coverage-report=html",
-    "lint": "eslint \"**/*.js\"",
-    "postlint": "template-oss-check",
-    "lintfix": "npm run lint -- --fix",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "dependencies": {
-    "lru-cache": "^7.5.1"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.7.1",
-    "tap": "^16.0.1"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "tap": {
-    "color": 1,
-    "coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.7.1"
-  }
-}
diff --git a/node_modules/npm-profile/node_modules/lru-cache/LICENSE b/node_modules/npm-profile/node_modules/lru-cache/LICENSE
deleted file mode 100644
index f785757cd63f8..0000000000000
--- a/node_modules/npm-profile/node_modules/lru-cache/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-profile/node_modules/lru-cache/index.js b/node_modules/npm-profile/node_modules/lru-cache/index.js
deleted file mode 100644
index 48e99fe5e5a70..0000000000000
--- a/node_modules/npm-profile/node_modules/lru-cache/index.js
+++ /dev/null
@@ -1,1227 +0,0 @@
-const perf =
-  typeof performance === 'object' &&
-  performance &&
-  typeof performance.now === 'function'
-    ? performance
-    : Date
-
-const hasAbortController = typeof AbortController === 'function'
-
-// minimal backwards-compatibility polyfill
-// this doesn't have nearly all the checks and whatnot that
-// actual AbortController/Signal has, but it's enough for
-// our purposes, and if used properly, behaves the same.
-const AC = hasAbortController
-  ? AbortController
-  : class AbortController {
-      constructor() {
-        this.signal = new AS()
-      }
-      abort(reason = new Error('This operation was aborted')) {
-        this.signal.reason = this.signal.reason || reason
-        this.signal.aborted = true
-        this.signal.dispatchEvent({
-          type: 'abort',
-          target: this.signal,
-        })
-      }
-    }
-
-const hasAbortSignal = typeof AbortSignal === 'function'
-// Some polyfills put this on the AC class, not global
-const hasACAbortSignal = typeof AC.AbortSignal === 'function'
-const AS = hasAbortSignal
-  ? AbortSignal
-  : hasACAbortSignal
-  ? AC.AbortController
-  : class AbortSignal {
-      constructor() {
-        this.reason = undefined
-        this.aborted = false
-        this._listeners = []
-      }
-      dispatchEvent(e) {
-        if (e.type === 'abort') {
-          this.aborted = true
-          this.onabort(e)
-          this._listeners.forEach(f => f(e), this)
-        }
-      }
-      onabort() {}
-      addEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners.push(fn)
-        }
-      }
-      removeEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners = this._listeners.filter(f => f !== fn)
-        }
-      }
-    }
-
-const warned = new Set()
-const deprecatedOption = (opt, instead) => {
-  const code = `LRU_CACHE_OPTION_${opt}`
-  if (shouldWarn(code)) {
-    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
-  }
-}
-const deprecatedMethod = (method, instead) => {
-  const code = `LRU_CACHE_METHOD_${method}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
-    warn(code, `${method} method`, `cache.${instead}()`, get)
-  }
-}
-const deprecatedProperty = (field, instead) => {
-  const code = `LRU_CACHE_PROPERTY_${field}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
-    warn(code, `${field} property`, `cache.${instead}`, get)
-  }
-}
-
-const emitWarning = (...a) => {
-  typeof process === 'object' &&
-  process &&
-  typeof process.emitWarning === 'function'
-    ? process.emitWarning(...a)
-    : console.error(...a)
-}
-
-const shouldWarn = code => !warned.has(code)
-
-const warn = (code, what, instead, fn) => {
-  warned.add(code)
-  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
-  emitWarning(msg, 'DeprecationWarning', code, fn)
-}
-
-const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
-
-/* istanbul ignore next - This is a little bit ridiculous, tbh.
- * The maximum array length is 2^32-1 or thereabouts on most JS impls.
- * And well before that point, you're caching the entire world, I mean,
- * that's ~32GB of just integers for the next/prev links, plus whatever
- * else to hold that many keys and values.  Just filling the memory with
- * zeroes at init time is brutal when you get that big.
- * But why not be complete?
- * Maybe in the future, these limits will have expanded. */
-const getUintArray = max =>
-  !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-    ? Uint8Array
-    : max <= Math.pow(2, 16)
-    ? Uint16Array
-    : max <= Math.pow(2, 32)
-    ? Uint32Array
-    : max <= Number.MAX_SAFE_INTEGER
-    ? ZeroArray
-    : null
-
-class ZeroArray extends Array {
-  constructor(size) {
-    super(size)
-    this.fill(0)
-  }
-}
-
-class Stack {
-  constructor(max) {
-    if (max === 0) {
-      return []
-    }
-    const UintArray = getUintArray(max)
-    this.heap = new UintArray(max)
-    this.length = 0
-  }
-  push(n) {
-    this.heap[this.length++] = n
-  }
-  pop() {
-    return this.heap[--this.length]
-  }
-}
-
-class LRUCache {
-  constructor(options = {}) {
-    const {
-      max = 0,
-      ttl,
-      ttlResolution = 1,
-      ttlAutopurge,
-      updateAgeOnGet,
-      updateAgeOnHas,
-      allowStale,
-      dispose,
-      disposeAfter,
-      noDisposeOnSet,
-      noUpdateTTL,
-      maxSize = 0,
-      maxEntrySize = 0,
-      sizeCalculation,
-      fetchMethod,
-      fetchContext,
-      noDeleteOnFetchRejection,
-      noDeleteOnStaleGet,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-    } = options
-
-    // deprecated options, don't trigger a warning for getting them if
-    // the thing being passed in is another LRUCache we're copying.
-    const { length, maxAge, stale } =
-      options instanceof LRUCache ? {} : options
-
-    if (max !== 0 && !isPosInt(max)) {
-      throw new TypeError('max option must be a nonnegative integer')
-    }
-
-    const UintArray = max ? getUintArray(max) : Array
-    if (!UintArray) {
-      throw new Error('invalid max value: ' + max)
-    }
-
-    this.max = max
-    this.maxSize = maxSize
-    this.maxEntrySize = maxEntrySize || this.maxSize
-    this.sizeCalculation = sizeCalculation || length
-    if (this.sizeCalculation) {
-      if (!this.maxSize && !this.maxEntrySize) {
-        throw new TypeError(
-          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
-        )
-      }
-      if (typeof this.sizeCalculation !== 'function') {
-        throw new TypeError('sizeCalculation set to non-function')
-      }
-    }
-
-    this.fetchMethod = fetchMethod || null
-    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
-      throw new TypeError(
-        'fetchMethod must be a function if specified'
-      )
-    }
-
-    this.fetchContext = fetchContext
-    if (!this.fetchMethod && fetchContext !== undefined) {
-      throw new TypeError(
-        'cannot set fetchContext without fetchMethod'
-      )
-    }
-
-    this.keyMap = new Map()
-    this.keyList = new Array(max).fill(null)
-    this.valList = new Array(max).fill(null)
-    this.next = new UintArray(max)
-    this.prev = new UintArray(max)
-    this.head = 0
-    this.tail = 0
-    this.free = new Stack(max)
-    this.initialFill = 1
-    this.size = 0
-
-    if (typeof dispose === 'function') {
-      this.dispose = dispose
-    }
-    if (typeof disposeAfter === 'function') {
-      this.disposeAfter = disposeAfter
-      this.disposed = []
-    } else {
-      this.disposeAfter = null
-      this.disposed = null
-    }
-    this.noDisposeOnSet = !!noDisposeOnSet
-    this.noUpdateTTL = !!noUpdateTTL
-    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
-    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
-    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
-    this.ignoreFetchAbort = !!ignoreFetchAbort
-
-    // NB: maxEntrySize is set to maxSize if it's set
-    if (this.maxEntrySize !== 0) {
-      if (this.maxSize !== 0) {
-        if (!isPosInt(this.maxSize)) {
-          throw new TypeError(
-            'maxSize must be a positive integer if specified'
-          )
-        }
-      }
-      if (!isPosInt(this.maxEntrySize)) {
-        throw new TypeError(
-          'maxEntrySize must be a positive integer if specified'
-        )
-      }
-      this.initializeSizeTracking()
-    }
-
-    this.allowStale = !!allowStale || !!stale
-    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
-    this.updateAgeOnGet = !!updateAgeOnGet
-    this.updateAgeOnHas = !!updateAgeOnHas
-    this.ttlResolution =
-      isPosInt(ttlResolution) || ttlResolution === 0
-        ? ttlResolution
-        : 1
-    this.ttlAutopurge = !!ttlAutopurge
-    this.ttl = ttl || maxAge || 0
-    if (this.ttl) {
-      if (!isPosInt(this.ttl)) {
-        throw new TypeError(
-          'ttl must be a positive integer if specified'
-        )
-      }
-      this.initializeTTLTracking()
-    }
-
-    // do not allow completely unbounded caches
-    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
-      throw new TypeError(
-        'At least one of max, maxSize, or ttl is required'
-      )
-    }
-    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
-      const code = 'LRU_CACHE_UNBOUNDED'
-      if (shouldWarn(code)) {
-        warned.add(code)
-        const msg =
-          'TTL caching without ttlAutopurge, max, or maxSize can ' +
-          'result in unbounded memory consumption.'
-        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
-      }
-    }
-
-    if (stale) {
-      deprecatedOption('stale', 'allowStale')
-    }
-    if (maxAge) {
-      deprecatedOption('maxAge', 'ttl')
-    }
-    if (length) {
-      deprecatedOption('length', 'sizeCalculation')
-    }
-  }
-
-  getRemainingTTL(key) {
-    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
-  }
-
-  initializeTTLTracking() {
-    this.ttls = new ZeroArray(this.max)
-    this.starts = new ZeroArray(this.max)
-
-    this.setItemTTL = (index, ttl, start = perf.now()) => {
-      this.starts[index] = ttl !== 0 ? start : 0
-      this.ttls[index] = ttl
-      if (ttl !== 0 && this.ttlAutopurge) {
-        const t = setTimeout(() => {
-          if (this.isStale(index)) {
-            this.delete(this.keyList[index])
-          }
-        }, ttl + 1)
-        /* istanbul ignore else - unref() not supported on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-    }
-
-    this.updateItemAge = index => {
-      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
-    }
-
-    this.statusTTL = (status, index) => {
-      if (status) {
-        status.ttl = this.ttls[index]
-        status.start = this.starts[index]
-        status.now = cachedNow || getNow()
-        status.remainingTTL = status.now + status.ttl - status.start
-      }
-    }
-
-    // debounce calls to perf.now() to 1s so we're not hitting
-    // that costly call repeatedly.
-    let cachedNow = 0
-    const getNow = () => {
-      const n = perf.now()
-      if (this.ttlResolution > 0) {
-        cachedNow = n
-        const t = setTimeout(
-          () => (cachedNow = 0),
-          this.ttlResolution
-        )
-        /* istanbul ignore else - not available on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-      return n
-    }
-
-    this.getRemainingTTL = key => {
-      const index = this.keyMap.get(key)
-      if (index === undefined) {
-        return 0
-      }
-      return this.ttls[index] === 0 || this.starts[index] === 0
-        ? Infinity
-        : this.starts[index] +
-            this.ttls[index] -
-            (cachedNow || getNow())
-    }
-
-    this.isStale = index => {
-      return (
-        this.ttls[index] !== 0 &&
-        this.starts[index] !== 0 &&
-        (cachedNow || getNow()) - this.starts[index] >
-          this.ttls[index]
-      )
-    }
-  }
-  updateItemAge(_index) {}
-  statusTTL(_status, _index) {}
-  setItemTTL(_index, _ttl, _start) {}
-  isStale(_index) {
-    return false
-  }
-
-  initializeSizeTracking() {
-    this.calculatedSize = 0
-    this.sizes = new ZeroArray(this.max)
-    this.removeItemSize = index => {
-      this.calculatedSize -= this.sizes[index]
-      this.sizes[index] = 0
-    }
-    this.requireSize = (k, v, size, sizeCalculation) => {
-      // provisionally accept background fetches.
-      // actual value size will be checked when they return.
-      if (this.isBackgroundFetch(v)) {
-        return 0
-      }
-      if (!isPosInt(size)) {
-        if (sizeCalculation) {
-          if (typeof sizeCalculation !== 'function') {
-            throw new TypeError('sizeCalculation must be a function')
-          }
-          size = sizeCalculation(v, k)
-          if (!isPosInt(size)) {
-            throw new TypeError(
-              'sizeCalculation return invalid (expect positive integer)'
-            )
-          }
-        } else {
-          throw new TypeError(
-            'invalid size value (must be positive integer). ' +
-              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
-              'must be set.'
-          )
-        }
-      }
-      return size
-    }
-    this.addItemSize = (index, size, status) => {
-      this.sizes[index] = size
-      if (this.maxSize) {
-        const maxSize = this.maxSize - this.sizes[index]
-        while (this.calculatedSize > maxSize) {
-          this.evict(true)
-        }
-      }
-      this.calculatedSize += this.sizes[index]
-      if (status) {
-        status.entrySize = size
-        status.totalCalculatedSize = this.calculatedSize
-      }
-    }
-  }
-  removeItemSize(_index) {}
-  addItemSize(_index, _size) {}
-  requireSize(_k, _v, size, sizeCalculation) {
-    if (size || sizeCalculation) {
-      throw new TypeError(
-        'cannot set size without setting maxSize or maxEntrySize on cache'
-      )
-    }
-  }
-
-  *indexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.tail; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.head) {
-          break
-        } else {
-          i = this.prev[i]
-        }
-      }
-    }
-  }
-
-  *rindexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.head; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.tail) {
-          break
-        } else {
-          i = this.next[i]
-        }
-      }
-    }
-  }
-
-  isValidIndex(index) {
-    return (
-      index !== undefined &&
-      this.keyMap.get(this.keyList[index]) === index
-    )
-  }
-
-  *entries() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-  *rentries() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-
-  *keys() {
-    for (const i of this.indexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-  *rkeys() {
-    for (const i of this.rindexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-
-  *values() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-  *rvalues() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-
-  [Symbol.iterator]() {
-    return this.entries()
-  }
-
-  find(fn, getOptions) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      if (fn(value, this.keyList[i], this)) {
-        return this.get(this.keyList[i], getOptions)
-      }
-    }
-  }
-
-  forEach(fn, thisp = this) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  rforEach(fn, thisp = this) {
-    for (const i of this.rindexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  get prune() {
-    deprecatedMethod('prune', 'purgeStale')
-    return this.purgeStale
-  }
-
-  purgeStale() {
-    let deleted = false
-    for (const i of this.rindexes({ allowStale: true })) {
-      if (this.isStale(i)) {
-        this.delete(this.keyList[i])
-        deleted = true
-      }
-    }
-    return deleted
-  }
-
-  dump() {
-    const arr = []
-    for (const i of this.indexes({ allowStale: true })) {
-      const key = this.keyList[i]
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      const entry = { value }
-      if (this.ttls) {
-        entry.ttl = this.ttls[i]
-        // always dump the start relative to a portable timestamp
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = perf.now() - this.starts[i]
-        entry.start = Math.floor(Date.now() - age)
-      }
-      if (this.sizes) {
-        entry.size = this.sizes[i]
-      }
-      arr.unshift([key, entry])
-    }
-    return arr
-  }
-
-  load(arr) {
-    this.clear()
-    for (const [key, entry] of arr) {
-      if (entry.start) {
-        // entry.start is a portable timestamp, but we may be using
-        // node's performance.now(), so calculate the offset.
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = Date.now() - entry.start
-        entry.start = perf.now() - age
-      }
-      this.set(key, entry.value, entry)
-    }
-  }
-
-  dispose(_v, _k, _reason) {}
-
-  set(
-    k,
-    v,
-    {
-      ttl = this.ttl,
-      start,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      status,
-    } = {}
-  ) {
-    size = this.requireSize(k, v, size, sizeCalculation)
-    // if the item doesn't fit, don't do anything
-    // NB: maxEntrySize set to maxSize by default
-    if (this.maxEntrySize && size > this.maxEntrySize) {
-      if (status) {
-        status.set = 'miss'
-        status.maxEntrySizeExceeded = true
-      }
-      // have to delete, in case a background fetch is there already.
-      // in non-async cases, this is a no-op
-      this.delete(k)
-      return this
-    }
-    let index = this.size === 0 ? undefined : this.keyMap.get(k)
-    if (index === undefined) {
-      // addition
-      index = this.newIndex()
-      this.keyList[index] = k
-      this.valList[index] = v
-      this.keyMap.set(k, index)
-      this.next[this.tail] = index
-      this.prev[index] = this.tail
-      this.tail = index
-      this.size++
-      this.addItemSize(index, size, status)
-      if (status) {
-        status.set = 'add'
-      }
-      noUpdateTTL = false
-    } else {
-      // update
-      this.moveToTail(index)
-      const oldVal = this.valList[index]
-      if (v !== oldVal) {
-        if (this.isBackgroundFetch(oldVal)) {
-          oldVal.__abortController.abort(new Error('replaced'))
-        } else {
-          if (!noDisposeOnSet) {
-            this.dispose(oldVal, k, 'set')
-            if (this.disposeAfter) {
-              this.disposed.push([oldVal, k, 'set'])
-            }
-          }
-        }
-        this.removeItemSize(index)
-        this.valList[index] = v
-        this.addItemSize(index, size, status)
-        if (status) {
-          status.set = 'replace'
-          const oldValue =
-            oldVal && this.isBackgroundFetch(oldVal)
-              ? oldVal.__staleWhileFetching
-              : oldVal
-          if (oldValue !== undefined) status.oldValue = oldValue
-        }
-      } else if (status) {
-        status.set = 'update'
-      }
-    }
-    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
-      this.initializeTTLTracking()
-    }
-    if (!noUpdateTTL) {
-      this.setItemTTL(index, ttl, start)
-    }
-    this.statusTTL(status, index)
-    if (this.disposeAfter) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return this
-  }
-
-  newIndex() {
-    if (this.size === 0) {
-      return this.tail
-    }
-    if (this.size === this.max && this.max !== 0) {
-      return this.evict(false)
-    }
-    if (this.free.length !== 0) {
-      return this.free.pop()
-    }
-    // initial fill, just keep writing down the list
-    return this.initialFill++
-  }
-
-  pop() {
-    if (this.size) {
-      const val = this.valList[this.head]
-      this.evict(true)
-      return val
-    }
-  }
-
-  evict(free) {
-    const head = this.head
-    const k = this.keyList[head]
-    const v = this.valList[head]
-    if (this.isBackgroundFetch(v)) {
-      v.__abortController.abort(new Error('evicted'))
-    } else {
-      this.dispose(v, k, 'evict')
-      if (this.disposeAfter) {
-        this.disposed.push([v, k, 'evict'])
-      }
-    }
-    this.removeItemSize(head)
-    // if we aren't about to use the index, then null these out
-    if (free) {
-      this.keyList[head] = null
-      this.valList[head] = null
-      this.free.push(head)
-    }
-    this.head = this.next[head]
-    this.keyMap.delete(k)
-    this.size--
-    return head
-  }
-
-  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      if (!this.isStale(index)) {
-        if (updateAgeOnHas) {
-          this.updateItemAge(index)
-        }
-        if (status) status.has = 'hit'
-        this.statusTTL(status, index)
-        return true
-      } else if (status) {
-        status.has = 'stale'
-        this.statusTTL(status, index)
-      }
-    } else if (status) {
-      status.has = 'miss'
-    }
-    return false
-  }
-
-  // like get(), but without any LRU updating or TTL expiration
-  peek(k, { allowStale = this.allowStale } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined && (allowStale || !this.isStale(index))) {
-      const v = this.valList[index]
-      // either stale and allowed, or forcing a refresh of non-stale value
-      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
-    }
-  }
-
-  backgroundFetch(k, index, options, context) {
-    const v = index === undefined ? undefined : this.valList[index]
-    if (this.isBackgroundFetch(v)) {
-      return v
-    }
-    const ac = new AC()
-    if (options.signal) {
-      options.signal.addEventListener('abort', () =>
-        ac.abort(options.signal.reason)
-      )
-    }
-    const fetchOpts = {
-      signal: ac.signal,
-      options,
-      context,
-    }
-    const cb = (v, updateCache = false) => {
-      const { aborted } = ac.signal
-      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
-      if (options.status) {
-        if (aborted && !updateCache) {
-          options.status.fetchAborted = true
-          options.status.fetchError = ac.signal.reason
-          if (ignoreAbort) options.status.fetchAbortIgnored = true
-        } else {
-          options.status.fetchResolved = true
-        }
-      }
-      if (aborted && !ignoreAbort && !updateCache) {
-        return fetchFail(ac.signal.reason)
-      }
-      // either we didn't abort, and are still here, or we did, and ignored
-      if (this.valList[index] === p) {
-        if (v === undefined) {
-          if (p.__staleWhileFetching) {
-            this.valList[index] = p.__staleWhileFetching
-          } else {
-            this.delete(k)
-          }
-        } else {
-          if (options.status) options.status.fetchUpdated = true
-          this.set(k, v, fetchOpts.options)
-        }
-      }
-      return v
-    }
-    const eb = er => {
-      if (options.status) {
-        options.status.fetchRejected = true
-        options.status.fetchError = er
-      }
-      return fetchFail(er)
-    }
-    const fetchFail = er => {
-      const { aborted } = ac.signal
-      const allowStaleAborted =
-        aborted && options.allowStaleOnFetchAbort
-      const allowStale =
-        allowStaleAborted || options.allowStaleOnFetchRejection
-      const noDelete = allowStale || options.noDeleteOnFetchRejection
-      if (this.valList[index] === p) {
-        // if we allow stale on fetch rejections, then we need to ensure that
-        // the stale value is not removed from the cache when the fetch fails.
-        const del = !noDelete || p.__staleWhileFetching === undefined
-        if (del) {
-          this.delete(k)
-        } else if (!allowStaleAborted) {
-          // still replace the *promise* with the stale value,
-          // since we are done with the promise at this point.
-          // leave it untouched if we're still waiting for an
-          // aborted background fetch that hasn't yet returned.
-          this.valList[index] = p.__staleWhileFetching
-        }
-      }
-      if (allowStale) {
-        if (options.status && p.__staleWhileFetching !== undefined) {
-          options.status.returnedStale = true
-        }
-        return p.__staleWhileFetching
-      } else if (p.__returned === p) {
-        throw er
-      }
-    }
-    const pcall = (res, rej) => {
-      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
-      // ignored, we go until we finish, regardless.
-      // defer check until we are actually aborting,
-      // so fetchMethod can override.
-      ac.signal.addEventListener('abort', () => {
-        if (
-          !options.ignoreFetchAbort ||
-          options.allowStaleOnFetchAbort
-        ) {
-          res()
-          // when it eventually resolves, update the cache.
-          if (options.allowStaleOnFetchAbort) {
-            res = v => cb(v, true)
-          }
-        }
-      })
-    }
-    if (options.status) options.status.fetchDispatched = true
-    const p = new Promise(pcall).then(cb, eb)
-    p.__abortController = ac
-    p.__staleWhileFetching = v
-    p.__returned = null
-    if (index === undefined) {
-      // internal, don't expose status.
-      this.set(k, p, { ...fetchOpts.options, status: undefined })
-      index = this.keyMap.get(k)
-    } else {
-      this.valList[index] = p
-    }
-    return p
-  }
-
-  isBackgroundFetch(p) {
-    return (
-      p &&
-      typeof p === 'object' &&
-      typeof p.then === 'function' &&
-      Object.prototype.hasOwnProperty.call(
-        p,
-        '__staleWhileFetching'
-      ) &&
-      Object.prototype.hasOwnProperty.call(p, '__returned') &&
-      (p.__returned === p || p.__returned === null)
-    )
-  }
-
-  // this takes the union of get() and set() opts, because it does both
-  async fetch(
-    k,
-    {
-      // get options
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      // set options
-      ttl = this.ttl,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      // fetch exclusive options
-      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
-      ignoreFetchAbort = this.ignoreFetchAbort,
-      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
-      fetchContext = this.fetchContext,
-      forceRefresh = false,
-      status,
-      signal,
-    } = {}
-  ) {
-    if (!this.fetchMethod) {
-      if (status) status.fetch = 'get'
-      return this.get(k, {
-        allowStale,
-        updateAgeOnGet,
-        noDeleteOnStaleGet,
-        status,
-      })
-    }
-
-    const options = {
-      allowStale,
-      updateAgeOnGet,
-      noDeleteOnStaleGet,
-      ttl,
-      noDisposeOnSet,
-      size,
-      sizeCalculation,
-      noUpdateTTL,
-      noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-      status,
-      signal,
-    }
-
-    let index = this.keyMap.get(k)
-    if (index === undefined) {
-      if (status) status.fetch = 'miss'
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      return (p.__returned = p)
-    } else {
-      // in cache, maybe already fetching
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        const stale =
-          allowStale && v.__staleWhileFetching !== undefined
-        if (status) {
-          status.fetch = 'inflight'
-          if (stale) status.returnedStale = true
-        }
-        return stale ? v.__staleWhileFetching : (v.__returned = v)
-      }
-
-      // if we force a refresh, that means do NOT serve the cached value,
-      // unless we are already in the process of refreshing the cache.
-      const isStale = this.isStale(index)
-      if (!forceRefresh && !isStale) {
-        if (status) status.fetch = 'hit'
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        this.statusTTL(status, index)
-        return v
-      }
-
-      // ok, it is stale or a forced refresh, and not already fetching.
-      // refresh the cache.
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      const hasStale = p.__staleWhileFetching !== undefined
-      const staleVal = hasStale && allowStale
-      if (status) {
-        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
-        if (staleVal && isStale) status.returnedStale = true
-      }
-      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
-    }
-  }
-
-  get(
-    k,
-    {
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      status,
-    } = {}
-  ) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      const value = this.valList[index]
-      const fetching = this.isBackgroundFetch(value)
-      this.statusTTL(status, index)
-      if (this.isStale(index)) {
-        if (status) status.get = 'stale'
-        // delete only if not an in-flight background fetch
-        if (!fetching) {
-          if (!noDeleteOnStaleGet) {
-            this.delete(k)
-          }
-          if (status) status.returnedStale = allowStale
-          return allowStale ? value : undefined
-        } else {
-          if (status) {
-            status.returnedStale =
-              allowStale && value.__staleWhileFetching !== undefined
-          }
-          return allowStale ? value.__staleWhileFetching : undefined
-        }
-      } else {
-        if (status) status.get = 'hit'
-        // if we're currently fetching it, we don't actually have it yet
-        // it's not stale, which means this isn't a staleWhileRefetching.
-        // If it's not stale, and fetching, AND has a __staleWhileFetching
-        // value, then that means the user fetched with {forceRefresh:true},
-        // so it's safe to return that value.
-        if (fetching) {
-          return value.__staleWhileFetching
-        }
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        return value
-      }
-    } else if (status) {
-      status.get = 'miss'
-    }
-  }
-
-  connect(p, n) {
-    this.prev[n] = p
-    this.next[p] = n
-  }
-
-  moveToTail(index) {
-    // if tail already, nothing to do
-    // if head, move head to next[index]
-    // else
-    //   move next[prev[index]] to next[index] (head has no prev)
-    //   move prev[next[index]] to prev[index]
-    // prev[index] = tail
-    // next[tail] = index
-    // tail = index
-    if (index !== this.tail) {
-      if (index === this.head) {
-        this.head = this.next[index]
-      } else {
-        this.connect(this.prev[index], this.next[index])
-      }
-      this.connect(this.tail, index)
-      this.tail = index
-    }
-  }
-
-  get del() {
-    deprecatedMethod('del', 'delete')
-    return this.delete
-  }
-
-  delete(k) {
-    let deleted = false
-    if (this.size !== 0) {
-      const index = this.keyMap.get(k)
-      if (index !== undefined) {
-        deleted = true
-        if (this.size === 1) {
-          this.clear()
-        } else {
-          this.removeItemSize(index)
-          const v = this.valList[index]
-          if (this.isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('deleted'))
-          } else {
-            this.dispose(v, k, 'delete')
-            if (this.disposeAfter) {
-              this.disposed.push([v, k, 'delete'])
-            }
-          }
-          this.keyMap.delete(k)
-          this.keyList[index] = null
-          this.valList[index] = null
-          if (index === this.tail) {
-            this.tail = this.prev[index]
-          } else if (index === this.head) {
-            this.head = this.next[index]
-          } else {
-            this.next[this.prev[index]] = this.next[index]
-            this.prev[this.next[index]] = this.prev[index]
-          }
-          this.size--
-          this.free.push(index)
-        }
-      }
-    }
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return deleted
-  }
-
-  clear() {
-    for (const index of this.rindexes({ allowStale: true })) {
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        v.__abortController.abort(new Error('deleted'))
-      } else {
-        const k = this.keyList[index]
-        this.dispose(v, k, 'delete')
-        if (this.disposeAfter) {
-          this.disposed.push([v, k, 'delete'])
-        }
-      }
-    }
-
-    this.keyMap.clear()
-    this.valList.fill(null)
-    this.keyList.fill(null)
-    if (this.ttls) {
-      this.ttls.fill(0)
-      this.starts.fill(0)
-    }
-    if (this.sizes) {
-      this.sizes.fill(0)
-    }
-    this.head = 0
-    this.tail = 0
-    this.initialFill = 1
-    this.free.length = 0
-    this.calculatedSize = 0
-    this.size = 0
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-  }
-
-  get reset() {
-    deprecatedMethod('reset', 'clear')
-    return this.clear
-  }
-
-  get length() {
-    deprecatedProperty('length', 'size')
-    return this.size
-  }
-
-  static get AbortController() {
-    return AC
-  }
-  static get AbortSignal() {
-    return AS
-  }
-}
-
-module.exports = LRUCache
diff --git a/node_modules/npm-profile/node_modules/lru-cache/index.mjs b/node_modules/npm-profile/node_modules/lru-cache/index.mjs
deleted file mode 100644
index 4a0b4813ec515..0000000000000
--- a/node_modules/npm-profile/node_modules/lru-cache/index.mjs
+++ /dev/null
@@ -1,1227 +0,0 @@
-const perf =
-  typeof performance === 'object' &&
-  performance &&
-  typeof performance.now === 'function'
-    ? performance
-    : Date
-
-const hasAbortController = typeof AbortController === 'function'
-
-// minimal backwards-compatibility polyfill
-// this doesn't have nearly all the checks and whatnot that
-// actual AbortController/Signal has, but it's enough for
-// our purposes, and if used properly, behaves the same.
-const AC = hasAbortController
-  ? AbortController
-  : class AbortController {
-      constructor() {
-        this.signal = new AS()
-      }
-      abort(reason = new Error('This operation was aborted')) {
-        this.signal.reason = this.signal.reason || reason
-        this.signal.aborted = true
-        this.signal.dispatchEvent({
-          type: 'abort',
-          target: this.signal,
-        })
-      }
-    }
-
-const hasAbortSignal = typeof AbortSignal === 'function'
-// Some polyfills put this on the AC class, not global
-const hasACAbortSignal = typeof AC.AbortSignal === 'function'
-const AS = hasAbortSignal
-  ? AbortSignal
-  : hasACAbortSignal
-  ? AC.AbortController
-  : class AbortSignal {
-      constructor() {
-        this.reason = undefined
-        this.aborted = false
-        this._listeners = []
-      }
-      dispatchEvent(e) {
-        if (e.type === 'abort') {
-          this.aborted = true
-          this.onabort(e)
-          this._listeners.forEach(f => f(e), this)
-        }
-      }
-      onabort() {}
-      addEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners.push(fn)
-        }
-      }
-      removeEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners = this._listeners.filter(f => f !== fn)
-        }
-      }
-    }
-
-const warned = new Set()
-const deprecatedOption = (opt, instead) => {
-  const code = `LRU_CACHE_OPTION_${opt}`
-  if (shouldWarn(code)) {
-    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
-  }
-}
-const deprecatedMethod = (method, instead) => {
-  const code = `LRU_CACHE_METHOD_${method}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
-    warn(code, `${method} method`, `cache.${instead}()`, get)
-  }
-}
-const deprecatedProperty = (field, instead) => {
-  const code = `LRU_CACHE_PROPERTY_${field}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
-    warn(code, `${field} property`, `cache.${instead}`, get)
-  }
-}
-
-const emitWarning = (...a) => {
-  typeof process === 'object' &&
-  process &&
-  typeof process.emitWarning === 'function'
-    ? process.emitWarning(...a)
-    : console.error(...a)
-}
-
-const shouldWarn = code => !warned.has(code)
-
-const warn = (code, what, instead, fn) => {
-  warned.add(code)
-  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
-  emitWarning(msg, 'DeprecationWarning', code, fn)
-}
-
-const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
-
-/* istanbul ignore next - This is a little bit ridiculous, tbh.
- * The maximum array length is 2^32-1 or thereabouts on most JS impls.
- * And well before that point, you're caching the entire world, I mean,
- * that's ~32GB of just integers for the next/prev links, plus whatever
- * else to hold that many keys and values.  Just filling the memory with
- * zeroes at init time is brutal when you get that big.
- * But why not be complete?
- * Maybe in the future, these limits will have expanded. */
-const getUintArray = max =>
-  !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-    ? Uint8Array
-    : max <= Math.pow(2, 16)
-    ? Uint16Array
-    : max <= Math.pow(2, 32)
-    ? Uint32Array
-    : max <= Number.MAX_SAFE_INTEGER
-    ? ZeroArray
-    : null
-
-class ZeroArray extends Array {
-  constructor(size) {
-    super(size)
-    this.fill(0)
-  }
-}
-
-class Stack {
-  constructor(max) {
-    if (max === 0) {
-      return []
-    }
-    const UintArray = getUintArray(max)
-    this.heap = new UintArray(max)
-    this.length = 0
-  }
-  push(n) {
-    this.heap[this.length++] = n
-  }
-  pop() {
-    return this.heap[--this.length]
-  }
-}
-
-class LRUCache {
-  constructor(options = {}) {
-    const {
-      max = 0,
-      ttl,
-      ttlResolution = 1,
-      ttlAutopurge,
-      updateAgeOnGet,
-      updateAgeOnHas,
-      allowStale,
-      dispose,
-      disposeAfter,
-      noDisposeOnSet,
-      noUpdateTTL,
-      maxSize = 0,
-      maxEntrySize = 0,
-      sizeCalculation,
-      fetchMethod,
-      fetchContext,
-      noDeleteOnFetchRejection,
-      noDeleteOnStaleGet,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-    } = options
-
-    // deprecated options, don't trigger a warning for getting them if
-    // the thing being passed in is another LRUCache we're copying.
-    const { length, maxAge, stale } =
-      options instanceof LRUCache ? {} : options
-
-    if (max !== 0 && !isPosInt(max)) {
-      throw new TypeError('max option must be a nonnegative integer')
-    }
-
-    const UintArray = max ? getUintArray(max) : Array
-    if (!UintArray) {
-      throw new Error('invalid max value: ' + max)
-    }
-
-    this.max = max
-    this.maxSize = maxSize
-    this.maxEntrySize = maxEntrySize || this.maxSize
-    this.sizeCalculation = sizeCalculation || length
-    if (this.sizeCalculation) {
-      if (!this.maxSize && !this.maxEntrySize) {
-        throw new TypeError(
-          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
-        )
-      }
-      if (typeof this.sizeCalculation !== 'function') {
-        throw new TypeError('sizeCalculation set to non-function')
-      }
-    }
-
-    this.fetchMethod = fetchMethod || null
-    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
-      throw new TypeError(
-        'fetchMethod must be a function if specified'
-      )
-    }
-
-    this.fetchContext = fetchContext
-    if (!this.fetchMethod && fetchContext !== undefined) {
-      throw new TypeError(
-        'cannot set fetchContext without fetchMethod'
-      )
-    }
-
-    this.keyMap = new Map()
-    this.keyList = new Array(max).fill(null)
-    this.valList = new Array(max).fill(null)
-    this.next = new UintArray(max)
-    this.prev = new UintArray(max)
-    this.head = 0
-    this.tail = 0
-    this.free = new Stack(max)
-    this.initialFill = 1
-    this.size = 0
-
-    if (typeof dispose === 'function') {
-      this.dispose = dispose
-    }
-    if (typeof disposeAfter === 'function') {
-      this.disposeAfter = disposeAfter
-      this.disposed = []
-    } else {
-      this.disposeAfter = null
-      this.disposed = null
-    }
-    this.noDisposeOnSet = !!noDisposeOnSet
-    this.noUpdateTTL = !!noUpdateTTL
-    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
-    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
-    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
-    this.ignoreFetchAbort = !!ignoreFetchAbort
-
-    // NB: maxEntrySize is set to maxSize if it's set
-    if (this.maxEntrySize !== 0) {
-      if (this.maxSize !== 0) {
-        if (!isPosInt(this.maxSize)) {
-          throw new TypeError(
-            'maxSize must be a positive integer if specified'
-          )
-        }
-      }
-      if (!isPosInt(this.maxEntrySize)) {
-        throw new TypeError(
-          'maxEntrySize must be a positive integer if specified'
-        )
-      }
-      this.initializeSizeTracking()
-    }
-
-    this.allowStale = !!allowStale || !!stale
-    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
-    this.updateAgeOnGet = !!updateAgeOnGet
-    this.updateAgeOnHas = !!updateAgeOnHas
-    this.ttlResolution =
-      isPosInt(ttlResolution) || ttlResolution === 0
-        ? ttlResolution
-        : 1
-    this.ttlAutopurge = !!ttlAutopurge
-    this.ttl = ttl || maxAge || 0
-    if (this.ttl) {
-      if (!isPosInt(this.ttl)) {
-        throw new TypeError(
-          'ttl must be a positive integer if specified'
-        )
-      }
-      this.initializeTTLTracking()
-    }
-
-    // do not allow completely unbounded caches
-    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
-      throw new TypeError(
-        'At least one of max, maxSize, or ttl is required'
-      )
-    }
-    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
-      const code = 'LRU_CACHE_UNBOUNDED'
-      if (shouldWarn(code)) {
-        warned.add(code)
-        const msg =
-          'TTL caching without ttlAutopurge, max, or maxSize can ' +
-          'result in unbounded memory consumption.'
-        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
-      }
-    }
-
-    if (stale) {
-      deprecatedOption('stale', 'allowStale')
-    }
-    if (maxAge) {
-      deprecatedOption('maxAge', 'ttl')
-    }
-    if (length) {
-      deprecatedOption('length', 'sizeCalculation')
-    }
-  }
-
-  getRemainingTTL(key) {
-    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
-  }
-
-  initializeTTLTracking() {
-    this.ttls = new ZeroArray(this.max)
-    this.starts = new ZeroArray(this.max)
-
-    this.setItemTTL = (index, ttl, start = perf.now()) => {
-      this.starts[index] = ttl !== 0 ? start : 0
-      this.ttls[index] = ttl
-      if (ttl !== 0 && this.ttlAutopurge) {
-        const t = setTimeout(() => {
-          if (this.isStale(index)) {
-            this.delete(this.keyList[index])
-          }
-        }, ttl + 1)
-        /* istanbul ignore else - unref() not supported on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-    }
-
-    this.updateItemAge = index => {
-      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
-    }
-
-    this.statusTTL = (status, index) => {
-      if (status) {
-        status.ttl = this.ttls[index]
-        status.start = this.starts[index]
-        status.now = cachedNow || getNow()
-        status.remainingTTL = status.now + status.ttl - status.start
-      }
-    }
-
-    // debounce calls to perf.now() to 1s so we're not hitting
-    // that costly call repeatedly.
-    let cachedNow = 0
-    const getNow = () => {
-      const n = perf.now()
-      if (this.ttlResolution > 0) {
-        cachedNow = n
-        const t = setTimeout(
-          () => (cachedNow = 0),
-          this.ttlResolution
-        )
-        /* istanbul ignore else - not available on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-      return n
-    }
-
-    this.getRemainingTTL = key => {
-      const index = this.keyMap.get(key)
-      if (index === undefined) {
-        return 0
-      }
-      return this.ttls[index] === 0 || this.starts[index] === 0
-        ? Infinity
-        : this.starts[index] +
-            this.ttls[index] -
-            (cachedNow || getNow())
-    }
-
-    this.isStale = index => {
-      return (
-        this.ttls[index] !== 0 &&
-        this.starts[index] !== 0 &&
-        (cachedNow || getNow()) - this.starts[index] >
-          this.ttls[index]
-      )
-    }
-  }
-  updateItemAge(_index) {}
-  statusTTL(_status, _index) {}
-  setItemTTL(_index, _ttl, _start) {}
-  isStale(_index) {
-    return false
-  }
-
-  initializeSizeTracking() {
-    this.calculatedSize = 0
-    this.sizes = new ZeroArray(this.max)
-    this.removeItemSize = index => {
-      this.calculatedSize -= this.sizes[index]
-      this.sizes[index] = 0
-    }
-    this.requireSize = (k, v, size, sizeCalculation) => {
-      // provisionally accept background fetches.
-      // actual value size will be checked when they return.
-      if (this.isBackgroundFetch(v)) {
-        return 0
-      }
-      if (!isPosInt(size)) {
-        if (sizeCalculation) {
-          if (typeof sizeCalculation !== 'function') {
-            throw new TypeError('sizeCalculation must be a function')
-          }
-          size = sizeCalculation(v, k)
-          if (!isPosInt(size)) {
-            throw new TypeError(
-              'sizeCalculation return invalid (expect positive integer)'
-            )
-          }
-        } else {
-          throw new TypeError(
-            'invalid size value (must be positive integer). ' +
-              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
-              'must be set.'
-          )
-        }
-      }
-      return size
-    }
-    this.addItemSize = (index, size, status) => {
-      this.sizes[index] = size
-      if (this.maxSize) {
-        const maxSize = this.maxSize - this.sizes[index]
-        while (this.calculatedSize > maxSize) {
-          this.evict(true)
-        }
-      }
-      this.calculatedSize += this.sizes[index]
-      if (status) {
-        status.entrySize = size
-        status.totalCalculatedSize = this.calculatedSize
-      }
-    }
-  }
-  removeItemSize(_index) {}
-  addItemSize(_index, _size) {}
-  requireSize(_k, _v, size, sizeCalculation) {
-    if (size || sizeCalculation) {
-      throw new TypeError(
-        'cannot set size without setting maxSize or maxEntrySize on cache'
-      )
-    }
-  }
-
-  *indexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.tail; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.head) {
-          break
-        } else {
-          i = this.prev[i]
-        }
-      }
-    }
-  }
-
-  *rindexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.head; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.tail) {
-          break
-        } else {
-          i = this.next[i]
-        }
-      }
-    }
-  }
-
-  isValidIndex(index) {
-    return (
-      index !== undefined &&
-      this.keyMap.get(this.keyList[index]) === index
-    )
-  }
-
-  *entries() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-  *rentries() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-
-  *keys() {
-    for (const i of this.indexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-  *rkeys() {
-    for (const i of this.rindexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-
-  *values() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-  *rvalues() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-
-  [Symbol.iterator]() {
-    return this.entries()
-  }
-
-  find(fn, getOptions) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      if (fn(value, this.keyList[i], this)) {
-        return this.get(this.keyList[i], getOptions)
-      }
-    }
-  }
-
-  forEach(fn, thisp = this) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  rforEach(fn, thisp = this) {
-    for (const i of this.rindexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  get prune() {
-    deprecatedMethod('prune', 'purgeStale')
-    return this.purgeStale
-  }
-
-  purgeStale() {
-    let deleted = false
-    for (const i of this.rindexes({ allowStale: true })) {
-      if (this.isStale(i)) {
-        this.delete(this.keyList[i])
-        deleted = true
-      }
-    }
-    return deleted
-  }
-
-  dump() {
-    const arr = []
-    for (const i of this.indexes({ allowStale: true })) {
-      const key = this.keyList[i]
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      const entry = { value }
-      if (this.ttls) {
-        entry.ttl = this.ttls[i]
-        // always dump the start relative to a portable timestamp
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = perf.now() - this.starts[i]
-        entry.start = Math.floor(Date.now() - age)
-      }
-      if (this.sizes) {
-        entry.size = this.sizes[i]
-      }
-      arr.unshift([key, entry])
-    }
-    return arr
-  }
-
-  load(arr) {
-    this.clear()
-    for (const [key, entry] of arr) {
-      if (entry.start) {
-        // entry.start is a portable timestamp, but we may be using
-        // node's performance.now(), so calculate the offset.
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = Date.now() - entry.start
-        entry.start = perf.now() - age
-      }
-      this.set(key, entry.value, entry)
-    }
-  }
-
-  dispose(_v, _k, _reason) {}
-
-  set(
-    k,
-    v,
-    {
-      ttl = this.ttl,
-      start,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      status,
-    } = {}
-  ) {
-    size = this.requireSize(k, v, size, sizeCalculation)
-    // if the item doesn't fit, don't do anything
-    // NB: maxEntrySize set to maxSize by default
-    if (this.maxEntrySize && size > this.maxEntrySize) {
-      if (status) {
-        status.set = 'miss'
-        status.maxEntrySizeExceeded = true
-      }
-      // have to delete, in case a background fetch is there already.
-      // in non-async cases, this is a no-op
-      this.delete(k)
-      return this
-    }
-    let index = this.size === 0 ? undefined : this.keyMap.get(k)
-    if (index === undefined) {
-      // addition
-      index = this.newIndex()
-      this.keyList[index] = k
-      this.valList[index] = v
-      this.keyMap.set(k, index)
-      this.next[this.tail] = index
-      this.prev[index] = this.tail
-      this.tail = index
-      this.size++
-      this.addItemSize(index, size, status)
-      if (status) {
-        status.set = 'add'
-      }
-      noUpdateTTL = false
-    } else {
-      // update
-      this.moveToTail(index)
-      const oldVal = this.valList[index]
-      if (v !== oldVal) {
-        if (this.isBackgroundFetch(oldVal)) {
-          oldVal.__abortController.abort(new Error('replaced'))
-        } else {
-          if (!noDisposeOnSet) {
-            this.dispose(oldVal, k, 'set')
-            if (this.disposeAfter) {
-              this.disposed.push([oldVal, k, 'set'])
-            }
-          }
-        }
-        this.removeItemSize(index)
-        this.valList[index] = v
-        this.addItemSize(index, size, status)
-        if (status) {
-          status.set = 'replace'
-          const oldValue =
-            oldVal && this.isBackgroundFetch(oldVal)
-              ? oldVal.__staleWhileFetching
-              : oldVal
-          if (oldValue !== undefined) status.oldValue = oldValue
-        }
-      } else if (status) {
-        status.set = 'update'
-      }
-    }
-    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
-      this.initializeTTLTracking()
-    }
-    if (!noUpdateTTL) {
-      this.setItemTTL(index, ttl, start)
-    }
-    this.statusTTL(status, index)
-    if (this.disposeAfter) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return this
-  }
-
-  newIndex() {
-    if (this.size === 0) {
-      return this.tail
-    }
-    if (this.size === this.max && this.max !== 0) {
-      return this.evict(false)
-    }
-    if (this.free.length !== 0) {
-      return this.free.pop()
-    }
-    // initial fill, just keep writing down the list
-    return this.initialFill++
-  }
-
-  pop() {
-    if (this.size) {
-      const val = this.valList[this.head]
-      this.evict(true)
-      return val
-    }
-  }
-
-  evict(free) {
-    const head = this.head
-    const k = this.keyList[head]
-    const v = this.valList[head]
-    if (this.isBackgroundFetch(v)) {
-      v.__abortController.abort(new Error('evicted'))
-    } else {
-      this.dispose(v, k, 'evict')
-      if (this.disposeAfter) {
-        this.disposed.push([v, k, 'evict'])
-      }
-    }
-    this.removeItemSize(head)
-    // if we aren't about to use the index, then null these out
-    if (free) {
-      this.keyList[head] = null
-      this.valList[head] = null
-      this.free.push(head)
-    }
-    this.head = this.next[head]
-    this.keyMap.delete(k)
-    this.size--
-    return head
-  }
-
-  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      if (!this.isStale(index)) {
-        if (updateAgeOnHas) {
-          this.updateItemAge(index)
-        }
-        if (status) status.has = 'hit'
-        this.statusTTL(status, index)
-        return true
-      } else if (status) {
-        status.has = 'stale'
-        this.statusTTL(status, index)
-      }
-    } else if (status) {
-      status.has = 'miss'
-    }
-    return false
-  }
-
-  // like get(), but without any LRU updating or TTL expiration
-  peek(k, { allowStale = this.allowStale } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined && (allowStale || !this.isStale(index))) {
-      const v = this.valList[index]
-      // either stale and allowed, or forcing a refresh of non-stale value
-      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
-    }
-  }
-
-  backgroundFetch(k, index, options, context) {
-    const v = index === undefined ? undefined : this.valList[index]
-    if (this.isBackgroundFetch(v)) {
-      return v
-    }
-    const ac = new AC()
-    if (options.signal) {
-      options.signal.addEventListener('abort', () =>
-        ac.abort(options.signal.reason)
-      )
-    }
-    const fetchOpts = {
-      signal: ac.signal,
-      options,
-      context,
-    }
-    const cb = (v, updateCache = false) => {
-      const { aborted } = ac.signal
-      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
-      if (options.status) {
-        if (aborted && !updateCache) {
-          options.status.fetchAborted = true
-          options.status.fetchError = ac.signal.reason
-          if (ignoreAbort) options.status.fetchAbortIgnored = true
-        } else {
-          options.status.fetchResolved = true
-        }
-      }
-      if (aborted && !ignoreAbort && !updateCache) {
-        return fetchFail(ac.signal.reason)
-      }
-      // either we didn't abort, and are still here, or we did, and ignored
-      if (this.valList[index] === p) {
-        if (v === undefined) {
-          if (p.__staleWhileFetching) {
-            this.valList[index] = p.__staleWhileFetching
-          } else {
-            this.delete(k)
-          }
-        } else {
-          if (options.status) options.status.fetchUpdated = true
-          this.set(k, v, fetchOpts.options)
-        }
-      }
-      return v
-    }
-    const eb = er => {
-      if (options.status) {
-        options.status.fetchRejected = true
-        options.status.fetchError = er
-      }
-      return fetchFail(er)
-    }
-    const fetchFail = er => {
-      const { aborted } = ac.signal
-      const allowStaleAborted =
-        aborted && options.allowStaleOnFetchAbort
-      const allowStale =
-        allowStaleAborted || options.allowStaleOnFetchRejection
-      const noDelete = allowStale || options.noDeleteOnFetchRejection
-      if (this.valList[index] === p) {
-        // if we allow stale on fetch rejections, then we need to ensure that
-        // the stale value is not removed from the cache when the fetch fails.
-        const del = !noDelete || p.__staleWhileFetching === undefined
-        if (del) {
-          this.delete(k)
-        } else if (!allowStaleAborted) {
-          // still replace the *promise* with the stale value,
-          // since we are done with the promise at this point.
-          // leave it untouched if we're still waiting for an
-          // aborted background fetch that hasn't yet returned.
-          this.valList[index] = p.__staleWhileFetching
-        }
-      }
-      if (allowStale) {
-        if (options.status && p.__staleWhileFetching !== undefined) {
-          options.status.returnedStale = true
-        }
-        return p.__staleWhileFetching
-      } else if (p.__returned === p) {
-        throw er
-      }
-    }
-    const pcall = (res, rej) => {
-      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
-      // ignored, we go until we finish, regardless.
-      // defer check until we are actually aborting,
-      // so fetchMethod can override.
-      ac.signal.addEventListener('abort', () => {
-        if (
-          !options.ignoreFetchAbort ||
-          options.allowStaleOnFetchAbort
-        ) {
-          res()
-          // when it eventually resolves, update the cache.
-          if (options.allowStaleOnFetchAbort) {
-            res = v => cb(v, true)
-          }
-        }
-      })
-    }
-    if (options.status) options.status.fetchDispatched = true
-    const p = new Promise(pcall).then(cb, eb)
-    p.__abortController = ac
-    p.__staleWhileFetching = v
-    p.__returned = null
-    if (index === undefined) {
-      // internal, don't expose status.
-      this.set(k, p, { ...fetchOpts.options, status: undefined })
-      index = this.keyMap.get(k)
-    } else {
-      this.valList[index] = p
-    }
-    return p
-  }
-
-  isBackgroundFetch(p) {
-    return (
-      p &&
-      typeof p === 'object' &&
-      typeof p.then === 'function' &&
-      Object.prototype.hasOwnProperty.call(
-        p,
-        '__staleWhileFetching'
-      ) &&
-      Object.prototype.hasOwnProperty.call(p, '__returned') &&
-      (p.__returned === p || p.__returned === null)
-    )
-  }
-
-  // this takes the union of get() and set() opts, because it does both
-  async fetch(
-    k,
-    {
-      // get options
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      // set options
-      ttl = this.ttl,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      // fetch exclusive options
-      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
-      ignoreFetchAbort = this.ignoreFetchAbort,
-      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
-      fetchContext = this.fetchContext,
-      forceRefresh = false,
-      status,
-      signal,
-    } = {}
-  ) {
-    if (!this.fetchMethod) {
-      if (status) status.fetch = 'get'
-      return this.get(k, {
-        allowStale,
-        updateAgeOnGet,
-        noDeleteOnStaleGet,
-        status,
-      })
-    }
-
-    const options = {
-      allowStale,
-      updateAgeOnGet,
-      noDeleteOnStaleGet,
-      ttl,
-      noDisposeOnSet,
-      size,
-      sizeCalculation,
-      noUpdateTTL,
-      noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-      status,
-      signal,
-    }
-
-    let index = this.keyMap.get(k)
-    if (index === undefined) {
-      if (status) status.fetch = 'miss'
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      return (p.__returned = p)
-    } else {
-      // in cache, maybe already fetching
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        const stale =
-          allowStale && v.__staleWhileFetching !== undefined
-        if (status) {
-          status.fetch = 'inflight'
-          if (stale) status.returnedStale = true
-        }
-        return stale ? v.__staleWhileFetching : (v.__returned = v)
-      }
-
-      // if we force a refresh, that means do NOT serve the cached value,
-      // unless we are already in the process of refreshing the cache.
-      const isStale = this.isStale(index)
-      if (!forceRefresh && !isStale) {
-        if (status) status.fetch = 'hit'
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        this.statusTTL(status, index)
-        return v
-      }
-
-      // ok, it is stale or a forced refresh, and not already fetching.
-      // refresh the cache.
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      const hasStale = p.__staleWhileFetching !== undefined
-      const staleVal = hasStale && allowStale
-      if (status) {
-        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
-        if (staleVal && isStale) status.returnedStale = true
-      }
-      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
-    }
-  }
-
-  get(
-    k,
-    {
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      status,
-    } = {}
-  ) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      const value = this.valList[index]
-      const fetching = this.isBackgroundFetch(value)
-      this.statusTTL(status, index)
-      if (this.isStale(index)) {
-        if (status) status.get = 'stale'
-        // delete only if not an in-flight background fetch
-        if (!fetching) {
-          if (!noDeleteOnStaleGet) {
-            this.delete(k)
-          }
-          if (status) status.returnedStale = allowStale
-          return allowStale ? value : undefined
-        } else {
-          if (status) {
-            status.returnedStale =
-              allowStale && value.__staleWhileFetching !== undefined
-          }
-          return allowStale ? value.__staleWhileFetching : undefined
-        }
-      } else {
-        if (status) status.get = 'hit'
-        // if we're currently fetching it, we don't actually have it yet
-        // it's not stale, which means this isn't a staleWhileRefetching.
-        // If it's not stale, and fetching, AND has a __staleWhileFetching
-        // value, then that means the user fetched with {forceRefresh:true},
-        // so it's safe to return that value.
-        if (fetching) {
-          return value.__staleWhileFetching
-        }
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        return value
-      }
-    } else if (status) {
-      status.get = 'miss'
-    }
-  }
-
-  connect(p, n) {
-    this.prev[n] = p
-    this.next[p] = n
-  }
-
-  moveToTail(index) {
-    // if tail already, nothing to do
-    // if head, move head to next[index]
-    // else
-    //   move next[prev[index]] to next[index] (head has no prev)
-    //   move prev[next[index]] to prev[index]
-    // prev[index] = tail
-    // next[tail] = index
-    // tail = index
-    if (index !== this.tail) {
-      if (index === this.head) {
-        this.head = this.next[index]
-      } else {
-        this.connect(this.prev[index], this.next[index])
-      }
-      this.connect(this.tail, index)
-      this.tail = index
-    }
-  }
-
-  get del() {
-    deprecatedMethod('del', 'delete')
-    return this.delete
-  }
-
-  delete(k) {
-    let deleted = false
-    if (this.size !== 0) {
-      const index = this.keyMap.get(k)
-      if (index !== undefined) {
-        deleted = true
-        if (this.size === 1) {
-          this.clear()
-        } else {
-          this.removeItemSize(index)
-          const v = this.valList[index]
-          if (this.isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('deleted'))
-          } else {
-            this.dispose(v, k, 'delete')
-            if (this.disposeAfter) {
-              this.disposed.push([v, k, 'delete'])
-            }
-          }
-          this.keyMap.delete(k)
-          this.keyList[index] = null
-          this.valList[index] = null
-          if (index === this.tail) {
-            this.tail = this.prev[index]
-          } else if (index === this.head) {
-            this.head = this.next[index]
-          } else {
-            this.next[this.prev[index]] = this.next[index]
-            this.prev[this.next[index]] = this.prev[index]
-          }
-          this.size--
-          this.free.push(index)
-        }
-      }
-    }
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return deleted
-  }
-
-  clear() {
-    for (const index of this.rindexes({ allowStale: true })) {
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        v.__abortController.abort(new Error('deleted'))
-      } else {
-        const k = this.keyList[index]
-        this.dispose(v, k, 'delete')
-        if (this.disposeAfter) {
-          this.disposed.push([v, k, 'delete'])
-        }
-      }
-    }
-
-    this.keyMap.clear()
-    this.valList.fill(null)
-    this.keyList.fill(null)
-    if (this.ttls) {
-      this.ttls.fill(0)
-      this.starts.fill(0)
-    }
-    if (this.sizes) {
-      this.sizes.fill(0)
-    }
-    this.head = 0
-    this.tail = 0
-    this.initialFill = 1
-    this.free.length = 0
-    this.calculatedSize = 0
-    this.size = 0
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-  }
-
-  get reset() {
-    deprecatedMethod('reset', 'clear')
-    return this.clear
-  }
-
-  get length() {
-    deprecatedProperty('length', 'size')
-    return this.size
-  }
-
-  static get AbortController() {
-    return AC
-  }
-  static get AbortSignal() {
-    return AS
-  }
-}
-
-export default LRUCache
diff --git a/node_modules/npm-profile/node_modules/lru-cache/package.json b/node_modules/npm-profile/node_modules/lru-cache/package.json
deleted file mode 100644
index 9684991727e7a..0000000000000
--- a/node_modules/npm-profile/node_modules/lru-cache/package.json
+++ /dev/null
@@ -1,96 +0,0 @@
-{
-  "name": "lru-cache",
-  "description": "A cache object that deletes the least-recently-used items.",
-  "version": "7.18.3",
-  "author": "Isaac Z. Schlueter ",
-  "keywords": [
-    "mru",
-    "lru",
-    "cache"
-  ],
-  "sideEffects": false,
-  "scripts": {
-    "build": "npm run prepare",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "prepare": "node ./scripts/transpile-to-esm.js",
-    "size": "size-limit",
-    "test": "tap",
-    "snap": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write .",
-    "typedoc": "typedoc ./index.d.ts"
-  },
-  "type": "commonjs",
-  "main": "./index.js",
-  "module": "./index.mjs",
-  "types": "./index.d.ts",
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./index.d.ts",
-        "default": "./index.mjs"
-      },
-      "require": {
-        "types": "./index.d.ts",
-        "default": "./index.js"
-      }
-    },
-    "./package.json": "./package.json"
-  },
-  "repository": "git://github.com/isaacs/node-lru-cache.git",
-  "devDependencies": {
-    "@size-limit/preset-small-lib": "^7.0.8",
-    "@types/node": "^17.0.31",
-    "@types/tap": "^15.0.6",
-    "benchmark": "^2.1.4",
-    "c8": "^7.11.2",
-    "clock-mock": "^1.0.6",
-    "eslint-config-prettier": "^8.5.0",
-    "prettier": "^2.6.2",
-    "size-limit": "^7.0.8",
-    "tap": "^16.3.4",
-    "ts-node": "^10.7.0",
-    "tslib": "^2.4.0",
-    "typedoc": "^0.23.24",
-    "typescript": "^4.6.4"
-  },
-  "license": "ISC",
-  "files": [
-    "index.js",
-    "index.mjs",
-    "index.d.ts"
-  ],
-  "engines": {
-    "node": ">=12"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tap": {
-    "nyc-arg": [
-      "--include=index.js"
-    ],
-    "node-arg": [
-      "--expose-gc",
-      "--require",
-      "ts-node/register"
-    ],
-    "ts": false
-  },
-  "size-limit": [
-    {
-      "path": "./index.js"
-    }
-  ]
-}
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/LICENSE b/node_modules/npm-profile/node_modules/make-fetch-happen/LICENSE
deleted file mode 100644
index 1808eb2844231..0000000000000
--- a/node_modules/npm-profile/node_modules/make-fetch-happen/LICENSE
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright 2017-2022 (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/entry.js
deleted file mode 100644
index 45141095074ec..0000000000000
--- a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/entry.js
+++ /dev/null
@@ -1,469 +0,0 @@
-const { Request, Response } = require('minipass-fetch')
-const { Minipass } = require('minipass')
-const MinipassFlush = require('minipass-flush')
-const cacache = require('cacache')
-const url = require('url')
-
-const CachingMinipassPipeline = require('../pipeline.js')
-const CachePolicy = require('./policy.js')
-const cacheKey = require('./key.js')
-const remote = require('../remote.js')
-
-const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop)
-
-// allow list for request headers that will be written to the cache index
-// note: we will also store any request headers
-// that are named in a response's vary header
-const KEEP_REQUEST_HEADERS = [
-  'accept-charset',
-  'accept-encoding',
-  'accept-language',
-  'accept',
-  'cache-control',
-]
-
-// allow list for response headers that will be written to the cache index
-// note: we must not store the real response's age header, or when we load
-// a cache policy based on the metadata it will think the cached response
-// is always stale
-const KEEP_RESPONSE_HEADERS = [
-  'cache-control',
-  'content-encoding',
-  'content-language',
-  'content-type',
-  'date',
-  'etag',
-  'expires',
-  'last-modified',
-  'link',
-  'location',
-  'pragma',
-  'vary',
-]
-
-// return an object containing all metadata to be written to the index
-const getMetadata = (request, response, options) => {
-  const metadata = {
-    time: Date.now(),
-    url: request.url,
-    reqHeaders: {},
-    resHeaders: {},
-
-    // options on which we must match the request and vary the response
-    options: {
-      compress: options.compress != null ? options.compress : request.compress,
-    },
-  }
-
-  // only save the status if it's not a 200 or 304
-  if (response.status !== 200 && response.status !== 304) {
-    metadata.status = response.status
-  }
-
-  for (const name of KEEP_REQUEST_HEADERS) {
-    if (request.headers.has(name)) {
-      metadata.reqHeaders[name] = request.headers.get(name)
-    }
-  }
-
-  // if the request's host header differs from the host in the url
-  // we need to keep it, otherwise it's just noise and we ignore it
-  const host = request.headers.get('host')
-  const parsedUrl = new url.URL(request.url)
-  if (host && parsedUrl.host !== host) {
-    metadata.reqHeaders.host = host
-  }
-
-  // if the response has a vary header, make sure
-  // we store the relevant request headers too
-  if (response.headers.has('vary')) {
-    const vary = response.headers.get('vary')
-    // a vary of "*" means every header causes a different response.
-    // in that scenario, we do not include any additional headers
-    // as the freshness check will always fail anyway and we don't
-    // want to bloat the cache indexes
-    if (vary !== '*') {
-      // copy any other request headers that will vary the response
-      const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/)
-      for (const name of varyHeaders) {
-        if (request.headers.has(name)) {
-          metadata.reqHeaders[name] = request.headers.get(name)
-        }
-      }
-    }
-  }
-
-  for (const name of KEEP_RESPONSE_HEADERS) {
-    if (response.headers.has(name)) {
-      metadata.resHeaders[name] = response.headers.get(name)
-    }
-  }
-
-  for (const name of options.cacheAdditionalHeaders) {
-    if (response.headers.has(name)) {
-      metadata.resHeaders[name] = response.headers.get(name)
-    }
-  }
-
-  return metadata
-}
-
-// symbols used to hide objects that may be lazily evaluated in a getter
-const _request = Symbol('request')
-const _response = Symbol('response')
-const _policy = Symbol('policy')
-
-class CacheEntry {
-  constructor ({ entry, request, response, options }) {
-    if (entry) {
-      this.key = entry.key
-      this.entry = entry
-      // previous versions of this module didn't write an explicit timestamp in
-      // the metadata, so fall back to the entry's timestamp. we can't use the
-      // entry timestamp to determine staleness because cacache will update it
-      // when it verifies its data
-      this.entry.metadata.time = this.entry.metadata.time || this.entry.time
-    } else {
-      this.key = cacheKey(request)
-    }
-
-    this.options = options
-
-    // these properties are behind getters that lazily evaluate
-    this[_request] = request
-    this[_response] = response
-    this[_policy] = null
-  }
-
-  // returns a CacheEntry instance that satisfies the given request
-  // or undefined if no existing entry satisfies
-  static async find (request, options) {
-    try {
-      // compacts the index and returns an array of unique entries
-      var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => {
-        const entryA = new CacheEntry({ entry: A, options })
-        const entryB = new CacheEntry({ entry: B, options })
-        return entryA.policy.satisfies(entryB.request)
-      }, {
-        validateEntry: (entry) => {
-          // clean out entries with a buggy content-encoding value
-          if (entry.metadata &&
-              entry.metadata.resHeaders &&
-              entry.metadata.resHeaders['content-encoding'] === null) {
-            return false
-          }
-
-          // if an integrity is null, it needs to have a status specified
-          if (entry.integrity === null) {
-            return !!(entry.metadata && entry.metadata.status)
-          }
-
-          return true
-        },
-      })
-    } catch (err) {
-      // if the compact request fails, ignore the error and return
-      return
-    }
-
-    // a cache mode of 'reload' means to behave as though we have no cache
-    // on the way to the network. return undefined to allow cacheFetch to
-    // create a brand new request no matter what.
-    if (options.cache === 'reload') {
-      return
-    }
-
-    // find the specific entry that satisfies the request
-    let match
-    for (const entry of matches) {
-      const _entry = new CacheEntry({
-        entry,
-        options,
-      })
-
-      if (_entry.policy.satisfies(request)) {
-        match = _entry
-        break
-      }
-    }
-
-    return match
-  }
-
-  // if the user made a PUT/POST/PATCH then we invalidate our
-  // cache for the same url by deleting the index entirely
-  static async invalidate (request, options) {
-    const key = cacheKey(request)
-    try {
-      await cacache.rm.entry(options.cachePath, key, { removeFully: true })
-    } catch (err) {
-      // ignore errors
-    }
-  }
-
-  get request () {
-    if (!this[_request]) {
-      this[_request] = new Request(this.entry.metadata.url, {
-        method: 'GET',
-        headers: this.entry.metadata.reqHeaders,
-        ...this.entry.metadata.options,
-      })
-    }
-
-    return this[_request]
-  }
-
-  get response () {
-    if (!this[_response]) {
-      this[_response] = new Response(null, {
-        url: this.entry.metadata.url,
-        counter: this.options.counter,
-        status: this.entry.metadata.status || 200,
-        headers: {
-          ...this.entry.metadata.resHeaders,
-          'content-length': this.entry.size,
-        },
-      })
-    }
-
-    return this[_response]
-  }
-
-  get policy () {
-    if (!this[_policy]) {
-      this[_policy] = new CachePolicy({
-        entry: this.entry,
-        request: this.request,
-        response: this.response,
-        options: this.options,
-      })
-    }
-
-    return this[_policy]
-  }
-
-  // wraps the response in a pipeline that stores the data
-  // in the cache while the user consumes it
-  async store (status) {
-    // if we got a status other than 200, 301, or 308,
-    // or the CachePolicy forbid storage, append the
-    // cache status header and return it untouched
-    if (
-      this.request.method !== 'GET' ||
-      ![200, 301, 308].includes(this.response.status) ||
-      !this.policy.storable()
-    ) {
-      this.response.headers.set('x-local-cache-status', 'skip')
-      return this.response
-    }
-
-    const size = this.response.headers.get('content-length')
-    const cacheOpts = {
-      algorithms: this.options.algorithms,
-      metadata: getMetadata(this.request, this.response, this.options),
-      size,
-      integrity: this.options.integrity,
-      integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body,
-    }
-
-    let body = null
-    // we only set a body if the status is a 200, redirects are
-    // stored as metadata only
-    if (this.response.status === 200) {
-      let cacheWriteResolve, cacheWriteReject
-      const cacheWritePromise = new Promise((resolve, reject) => {
-        cacheWriteResolve = resolve
-        cacheWriteReject = reject
-      })
-
-      body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({
-        flush () {
-          return cacheWritePromise
-        },
-      }))
-      // this is always true since if we aren't reusing the one from the remote fetch, we
-      // are using the one from cacache
-      body.hasIntegrityEmitter = true
-
-      const onResume = () => {
-        const tee = new Minipass()
-        const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts)
-        // re-emit the integrity and size events on our new response body so they can be reused
-        cacheStream.on('integrity', i => body.emit('integrity', i))
-        cacheStream.on('size', s => body.emit('size', s))
-        // stick a flag on here so downstream users will know if they can expect integrity events
-        tee.pipe(cacheStream)
-        // TODO if the cache write fails, log a warning but return the response anyway
-        // eslint-disable-next-line promise/catch-or-return
-        cacheStream.promise().then(cacheWriteResolve, cacheWriteReject)
-        body.unshift(tee)
-        body.unshift(this.response.body)
-      }
-
-      body.once('resume', onResume)
-      body.once('end', () => body.removeListener('resume', onResume))
-    } else {
-      await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts)
-    }
-
-    // note: we do not set the x-local-cache-hash header because we do not know
-    // the hash value until after the write to the cache completes, which doesn't
-    // happen until after the response has been sent and it's too late to write
-    // the header anyway
-    this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
-    this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
-    this.response.headers.set('x-local-cache-mode', 'stream')
-    this.response.headers.set('x-local-cache-status', status)
-    this.response.headers.set('x-local-cache-time', new Date().toISOString())
-    const newResponse = new Response(body, {
-      url: this.response.url,
-      status: this.response.status,
-      headers: this.response.headers,
-      counter: this.options.counter,
-    })
-    return newResponse
-  }
-
-  // use the cached data to create a response and return it
-  async respond (method, options, status) {
-    let response
-    if (method === 'HEAD' || [301, 308].includes(this.response.status)) {
-      // if the request is a HEAD, or the response is a redirect,
-      // then the metadata in the entry already includes everything
-      // we need to build a response
-      response = this.response
-    } else {
-      // we're responding with a full cached response, so create a body
-      // that reads from cacache and attach it to a new Response
-      const body = new Minipass()
-      const headers = { ...this.policy.responseHeaders() }
-
-      const onResume = () => {
-        const cacheStream = cacache.get.stream.byDigest(
-          this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
-        )
-        cacheStream.on('error', async (err) => {
-          cacheStream.pause()
-          if (err.code === 'EINTEGRITY') {
-            await cacache.rm.content(
-              this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
-            )
-          }
-          if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') {
-            await CacheEntry.invalidate(this.request, this.options)
-          }
-          body.emit('error', err)
-          cacheStream.resume()
-        })
-        // emit the integrity and size events based on our metadata so we're consistent
-        body.emit('integrity', this.entry.integrity)
-        body.emit('size', Number(headers['content-length']))
-        cacheStream.pipe(body)
-      }
-
-      body.once('resume', onResume)
-      body.once('end', () => body.removeListener('resume', onResume))
-      response = new Response(body, {
-        url: this.entry.metadata.url,
-        counter: options.counter,
-        status: 200,
-        headers,
-      })
-    }
-
-    response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
-    response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity))
-    response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
-    response.headers.set('x-local-cache-mode', 'stream')
-    response.headers.set('x-local-cache-status', status)
-    response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString())
-    return response
-  }
-
-  // use the provided request along with this cache entry to
-  // revalidate the stored response. returns a response, either
-  // from the cache or from the update
-  async revalidate (request, options) {
-    const revalidateRequest = new Request(request, {
-      headers: this.policy.revalidationHeaders(request),
-    })
-
-    try {
-      // NOTE: be sure to remove the headers property from the
-      // user supplied options, since we have already defined
-      // them on the new request object. if they're still in the
-      // options then those will overwrite the ones from the policy
-      var response = await remote(revalidateRequest, {
-        ...options,
-        headers: undefined,
-      })
-    } catch (err) {
-      // if the network fetch fails, return the stale
-      // cached response unless it has a cache-control
-      // of 'must-revalidate'
-      if (!this.policy.mustRevalidate) {
-        return this.respond(request.method, options, 'stale')
-      }
-
-      throw err
-    }
-
-    if (this.policy.revalidated(revalidateRequest, response)) {
-      // we got a 304, write a new index to the cache and respond from cache
-      const metadata = getMetadata(request, response, options)
-      // 304 responses do not include headers that are specific to the response data
-      // since they do not include a body, so we copy values for headers that were
-      // in the old cache entry to the new one, if the new metadata does not already
-      // include that header
-      for (const name of KEEP_RESPONSE_HEADERS) {
-        if (
-          !hasOwnProperty(metadata.resHeaders, name) &&
-          hasOwnProperty(this.entry.metadata.resHeaders, name)
-        ) {
-          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
-        }
-      }
-
-      for (const name of options.cacheAdditionalHeaders) {
-        const inMeta = hasOwnProperty(metadata.resHeaders, name)
-        const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name)
-        const inPolicy = hasOwnProperty(this.policy.response.headers, name)
-
-        // if the header is in the existing entry, but it is not in the metadata
-        // then we need to write it to the metadata as this will refresh the on-disk cache
-        if (!inMeta && inEntry) {
-          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
-        }
-        // if the header is in the metadata, but not in the policy, then we need to set
-        // it in the policy so that it's included in the immediate response. future
-        // responses will load a new cache entry, so we don't need to change that
-        if (!inPolicy && inMeta) {
-          this.policy.response.headers[name] = metadata.resHeaders[name]
-        }
-      }
-
-      try {
-        await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, {
-          size: this.entry.size,
-          metadata,
-        })
-      } catch (err) {
-        // if updating the cache index fails, we ignore it and
-        // respond anyway
-      }
-      return this.respond(request.method, options, 'revalidated')
-    }
-
-    // if we got a modified response, create a new entry based on it
-    const newEntry = new CacheEntry({
-      request,
-      response,
-      options,
-    })
-
-    // respond with the new entry while writing it to the cache
-    return newEntry.store('updated')
-  }
-}
-
-module.exports = CacheEntry
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/errors.js
deleted file mode 100644
index 67a66573bebe6..0000000000000
--- a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/errors.js
+++ /dev/null
@@ -1,11 +0,0 @@
-class NotCachedError extends Error {
-  constructor (url) {
-    /* eslint-disable-next-line max-len */
-    super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`)
-    this.code = 'ENOTCACHED'
-  }
-}
-
-module.exports = {
-  NotCachedError,
-}
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/index.js
deleted file mode 100644
index 0de49d23fb933..0000000000000
--- a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/index.js
+++ /dev/null
@@ -1,49 +0,0 @@
-const { NotCachedError } = require('./errors.js')
-const CacheEntry = require('./entry.js')
-const remote = require('../remote.js')
-
-// do whatever is necessary to get a Response and return it
-const cacheFetch = async (request, options) => {
-  // try to find a cached entry that satisfies this request
-  const entry = await CacheEntry.find(request, options)
-  if (!entry) {
-    // no cached result, if the cache mode is 'only-if-cached' that's a failure
-    if (options.cache === 'only-if-cached') {
-      throw new NotCachedError(request.url)
-    }
-
-    // otherwise, we make a request, store it and return it
-    const response = await remote(request, options)
-    const newEntry = new CacheEntry({ request, response, options })
-    return newEntry.store('miss')
-  }
-
-  // we have a cached response that satisfies this request, however if the cache
-  // mode is 'no-cache' then we send the revalidation request no matter what
-  if (options.cache === 'no-cache') {
-    return entry.revalidate(request, options)
-  }
-
-  // if the cached entry is not stale, or if the cache mode is 'force-cache' or
-  // 'only-if-cached' we can respond with the cached entry. set the status
-  // based on the result of needsRevalidation and respond
-  const _needsRevalidation = entry.policy.needsRevalidation(request)
-  if (options.cache === 'force-cache' ||
-      options.cache === 'only-if-cached' ||
-      !_needsRevalidation) {
-    return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit')
-  }
-
-  // if we got here, the cache entry is stale so revalidate it
-  return entry.revalidate(request, options)
-}
-
-cacheFetch.invalidate = async (request, options) => {
-  if (!options.cachePath) {
-    return
-  }
-
-  return CacheEntry.invalidate(request, options)
-}
-
-module.exports = cacheFetch
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/key.js
deleted file mode 100644
index f7684d562b7fa..0000000000000
--- a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/key.js
+++ /dev/null
@@ -1,17 +0,0 @@
-const { URL, format } = require('url')
-
-// options passed to url.format() when generating a key
-const formatOptions = {
-  auth: false,
-  fragment: false,
-  search: true,
-  unicode: false,
-}
-
-// returns a string to be used as the cache key for the Request
-const cacheKey = (request) => {
-  const parsed = new URL(request.url)
-  return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}`
-}
-
-module.exports = cacheKey
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/policy.js
deleted file mode 100644
index ada3c8600dae9..0000000000000
--- a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/policy.js
+++ /dev/null
@@ -1,161 +0,0 @@
-const CacheSemantics = require('http-cache-semantics')
-const Negotiator = require('negotiator')
-const ssri = require('ssri')
-
-// options passed to http-cache-semantics constructor
-const policyOptions = {
-  shared: false,
-  ignoreCargoCult: true,
-}
-
-// a fake empty response, used when only testing the
-// request for storability
-const emptyResponse = { status: 200, headers: {} }
-
-// returns a plain object representation of the Request
-const requestObject = (request) => {
-  const _obj = {
-    method: request.method,
-    url: request.url,
-    headers: {},
-    compress: request.compress,
-  }
-
-  request.headers.forEach((value, key) => {
-    _obj.headers[key] = value
-  })
-
-  return _obj
-}
-
-// returns a plain object representation of the Response
-const responseObject = (response) => {
-  const _obj = {
-    status: response.status,
-    headers: {},
-  }
-
-  response.headers.forEach((value, key) => {
-    _obj.headers[key] = value
-  })
-
-  return _obj
-}
-
-class CachePolicy {
-  constructor ({ entry, request, response, options }) {
-    this.entry = entry
-    this.request = requestObject(request)
-    this.response = responseObject(response)
-    this.options = options
-    this.policy = new CacheSemantics(this.request, this.response, policyOptions)
-
-    if (this.entry) {
-      // if we have an entry, copy the timestamp to the _responseTime
-      // this is necessary because the CacheSemantics constructor forces
-      // the value to Date.now() which means a policy created from a
-      // cache entry is likely to always identify itself as stale
-      this.policy._responseTime = this.entry.metadata.time
-    }
-  }
-
-  // static method to quickly determine if a request alone is storable
-  static storable (request, options) {
-    // no cachePath means no caching
-    if (!options.cachePath) {
-      return false
-    }
-
-    // user explicitly asked not to cache
-    if (options.cache === 'no-store') {
-      return false
-    }
-
-    // we only cache GET and HEAD requests
-    if (!['GET', 'HEAD'].includes(request.method)) {
-      return false
-    }
-
-    // otherwise, let http-cache-semantics make the decision
-    // based on the request's headers
-    const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions)
-    return policy.storable()
-  }
-
-  // returns true if the policy satisfies the request
-  satisfies (request) {
-    const _req = requestObject(request)
-    if (this.request.headers.host !== _req.headers.host) {
-      return false
-    }
-
-    if (this.request.compress !== _req.compress) {
-      return false
-    }
-
-    const negotiatorA = new Negotiator(this.request)
-    const negotiatorB = new Negotiator(_req)
-
-    if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) {
-      return false
-    }
-
-    if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) {
-      return false
-    }
-
-    if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) {
-      return false
-    }
-
-    if (this.options.integrity) {
-      return ssri.parse(this.options.integrity).match(this.entry.integrity)
-    }
-
-    return true
-  }
-
-  // returns true if the request and response allow caching
-  storable () {
-    return this.policy.storable()
-  }
-
-  // NOTE: this is a hack to avoid parsing the cache-control
-  // header ourselves, it returns true if the response's
-  // cache-control contains must-revalidate
-  get mustRevalidate () {
-    return !!this.policy._rescc['must-revalidate']
-  }
-
-  // returns true if the cached response requires revalidation
-  // for the given request
-  needsRevalidation (request) {
-    const _req = requestObject(request)
-    // force method to GET because we only cache GETs
-    // but can serve a HEAD from a cached GET
-    _req.method = 'GET'
-    return !this.policy.satisfiesWithoutRevalidation(_req)
-  }
-
-  responseHeaders () {
-    return this.policy.responseHeaders()
-  }
-
-  // returns a new object containing the appropriate headers
-  // to send a revalidation request
-  revalidationHeaders (request) {
-    const _req = requestObject(request)
-    return this.policy.revalidationHeaders(_req)
-  }
-
-  // returns true if the request/response was revalidated
-  // successfully. returns false if a new response was received
-  revalidated (request, response) {
-    const _req = requestObject(request)
-    const _res = responseObject(response)
-    const policy = this.policy.revalidatedPolicy(_req, _res)
-    return !policy.modified
-  }
-}
-
-module.exports = CachePolicy
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/fetch.js
deleted file mode 100644
index 233ba67e16550..0000000000000
--- a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/fetch.js
+++ /dev/null
@@ -1,118 +0,0 @@
-'use strict'
-
-const { FetchError, Request, isRedirect } = require('minipass-fetch')
-const url = require('url')
-
-const CachePolicy = require('./cache/policy.js')
-const cache = require('./cache/index.js')
-const remote = require('./remote.js')
-
-// given a Request, a Response and user options
-// return true if the response is a redirect that
-// can be followed. we throw errors that will result
-// in the fetch being rejected if the redirect is
-// possible but invalid for some reason
-const canFollowRedirect = (request, response, options) => {
-  if (!isRedirect(response.status)) {
-    return false
-  }
-
-  if (options.redirect === 'manual') {
-    return false
-  }
-
-  if (options.redirect === 'error') {
-    throw new FetchError(`redirect mode is set to error: ${request.url}`,
-      'no-redirect', { code: 'ENOREDIRECT' })
-  }
-
-  if (!response.headers.has('location')) {
-    throw new FetchError(`redirect location header missing for: ${request.url}`,
-      'no-location', { code: 'EINVALIDREDIRECT' })
-  }
-
-  if (request.counter >= request.follow) {
-    throw new FetchError(`maximum redirect reached at: ${request.url}`,
-      'max-redirect', { code: 'EMAXREDIRECT' })
-  }
-
-  return true
-}
-
-// given a Request, a Response, and the user's options return an object
-// with a new Request and a new options object that will be used for
-// following the redirect
-const getRedirect = (request, response, options) => {
-  const _opts = { ...options }
-  const location = response.headers.get('location')
-  const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url)
-  // Comment below is used under the following license:
-  /**
-   * @license
-   * Copyright (c) 2010-2012 Mikeal Rogers
-   * Licensed under the Apache License, Version 2.0 (the "License");
-   * you may not use this file except in compliance with the License.
-   * You may obtain a copy of the License at
-   * http://www.apache.org/licenses/LICENSE-2.0
-   * Unless required by applicable law or agreed to in writing,
-   * software distributed under the License is distributed on an "AS
-   * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
-   * express or implied. See the License for the specific language
-   * governing permissions and limitations under the License.
-   */
-
-  // Remove authorization if changing hostnames (but not if just
-  // changing ports or protocols).  This matches the behavior of request:
-  // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
-  if (new url.URL(request.url).hostname !== redirectUrl.hostname) {
-    request.headers.delete('authorization')
-    request.headers.delete('cookie')
-  }
-
-  // for POST request with 301/302 response, or any request with 303 response,
-  // use GET when following redirect
-  if (
-    response.status === 303 ||
-    (request.method === 'POST' && [301, 302].includes(response.status))
-  ) {
-    _opts.method = 'GET'
-    _opts.body = null
-    request.headers.delete('content-length')
-  }
-
-  _opts.headers = {}
-  request.headers.forEach((value, key) => {
-    _opts.headers[key] = value
-  })
-
-  _opts.counter = ++request.counter
-  const redirectReq = new Request(url.format(redirectUrl), _opts)
-  return {
-    request: redirectReq,
-    options: _opts,
-  }
-}
-
-const fetch = async (request, options) => {
-  const response = CachePolicy.storable(request, options)
-    ? await cache(request, options)
-    : await remote(request, options)
-
-  // if the request wasn't a GET or HEAD, and the response
-  // status is between 200 and 399 inclusive, invalidate the
-  // request url
-  if (!['GET', 'HEAD'].includes(request.method) &&
-      response.status >= 200 &&
-      response.status <= 399) {
-    await cache.invalidate(request, options)
-  }
-
-  if (!canFollowRedirect(request, response, options)) {
-    return response
-  }
-
-  const redirect = getRedirect(request, response, options)
-  return fetch(redirect.request, redirect.options)
-}
-
-module.exports = fetch
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/index.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/index.js
deleted file mode 100644
index 2f12e8e1b6113..0000000000000
--- a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/index.js
+++ /dev/null
@@ -1,41 +0,0 @@
-const { FetchError, Headers, Request, Response } = require('minipass-fetch')
-
-const configureOptions = require('./options.js')
-const fetch = require('./fetch.js')
-
-const makeFetchHappen = (url, opts) => {
-  const options = configureOptions(opts)
-
-  const request = new Request(url, options)
-  return fetch(request, options)
-}
-
-makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => {
-  if (typeof defaultUrl === 'object') {
-    defaultOptions = defaultUrl
-    defaultUrl = null
-  }
-
-  const defaultedFetch = (url, options = {}) => {
-    const finalUrl = url || defaultUrl
-    const finalOptions = {
-      ...defaultOptions,
-      ...options,
-      headers: {
-        ...defaultOptions.headers,
-        ...options.headers,
-      },
-    }
-    return wrappedFetch(finalUrl, finalOptions)
-  }
-
-  defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) =>
-    makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch)
-  return defaultedFetch
-}
-
-module.exports = makeFetchHappen
-module.exports.FetchError = FetchError
-module.exports.Headers = Headers
-module.exports.Request = Request
-module.exports.Response = Response
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/options.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/options.js
deleted file mode 100644
index f77511279f831..0000000000000
--- a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/options.js
+++ /dev/null
@@ -1,54 +0,0 @@
-const dns = require('dns')
-
-const conditionalHeaders = [
-  'if-modified-since',
-  'if-none-match',
-  'if-unmodified-since',
-  'if-match',
-  'if-range',
-]
-
-const configureOptions = (opts) => {
-  const { strictSSL, ...options } = { ...opts }
-  options.method = options.method ? options.method.toUpperCase() : 'GET'
-  options.rejectUnauthorized = strictSSL !== false
-
-  if (!options.retry) {
-    options.retry = { retries: 0 }
-  } else if (typeof options.retry === 'string') {
-    const retries = parseInt(options.retry, 10)
-    if (isFinite(retries)) {
-      options.retry = { retries }
-    } else {
-      options.retry = { retries: 0 }
-    }
-  } else if (typeof options.retry === 'number') {
-    options.retry = { retries: options.retry }
-  } else {
-    options.retry = { retries: 0, ...options.retry }
-  }
-
-  options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns }
-
-  options.cache = options.cache || 'default'
-  if (options.cache === 'default') {
-    const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => {
-      return conditionalHeaders.includes(name.toLowerCase())
-    })
-    if (hasConditionalHeader) {
-      options.cache = 'no-store'
-    }
-  }
-
-  options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || []
-
-  // cacheManager is deprecated, but if it's set and
-  // cachePath is not we should copy it to the new field
-  if (options.cacheManager && !options.cachePath) {
-    options.cachePath = options.cacheManager
-  }
-
-  return options
-}
-
-module.exports = configureOptions
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/pipeline.js
deleted file mode 100644
index b1d221b2d0ce3..0000000000000
--- a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/pipeline.js
+++ /dev/null
@@ -1,41 +0,0 @@
-'use strict'
-
-const MinipassPipeline = require('minipass-pipeline')
-
-class CachingMinipassPipeline extends MinipassPipeline {
-  #events = []
-  #data = new Map()
-
-  constructor (opts, ...streams) {
-    // CRITICAL: do NOT pass the streams to the call to super(), this will start
-    // the flow of data and potentially cause the events we need to catch to emit
-    // before we've finished our own setup. instead we call super() with no args,
-    // finish our setup, and then push the streams into ourselves to start the
-    // data flow
-    super()
-    this.#events = opts.events
-
-    /* istanbul ignore next - coverage disabled because this is pointless to test here */
-    if (streams.length) {
-      this.push(...streams)
-    }
-  }
-
-  on (event, handler) {
-    if (this.#events.includes(event) && this.#data.has(event)) {
-      return handler(...this.#data.get(event))
-    }
-
-    return super.on(event, handler)
-  }
-
-  emit (event, ...data) {
-    if (this.#events.includes(event)) {
-      this.#data.set(event, data)
-    }
-
-    return super.emit(event, ...data)
-  }
-}
-
-module.exports = CachingMinipassPipeline
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/remote.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/remote.js
deleted file mode 100644
index 2aef9f8f969b0..0000000000000
--- a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/remote.js
+++ /dev/null
@@ -1,127 +0,0 @@
-const { Minipass } = require('minipass')
-const fetch = require('minipass-fetch')
-const promiseRetry = require('promise-retry')
-const ssri = require('ssri')
-
-const CachingMinipassPipeline = require('./pipeline.js')
-const { getAgent } = require('@npmcli/agent')
-const pkg = require('../package.json')
-
-const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
-
-const RETRY_ERRORS = [
-  'ECONNRESET', // remote socket closed on us
-  'ECONNREFUSED', // remote host refused to open connection
-  'EADDRINUSE', // failed to bind to a local port (proxy?)
-  'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW
-  // from @npmcli/agent
-  'ECONNECTIONTIMEOUT',
-  'EIDLETIMEOUT',
-  'ERESPONSETIMEOUT',
-  'ETRANSFERTIMEOUT',
-  // Known codes we do NOT retry on:
-  // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
-  // EINVALIDPROXY // invalid protocol from @npmcli/agent
-  // EINVALIDRESPONSE // invalid status code from @npmcli/agent
-]
-
-const RETRY_TYPES = [
-  'request-timeout',
-]
-
-// make a request directly to the remote source,
-// retrying certain classes of errors as well as
-// following redirects (through the cache if necessary)
-// and verifying response integrity
-const remoteFetch = (request, options) => {
-  const agent = getAgent(request.url, options)
-  if (!request.headers.has('connection')) {
-    request.headers.set('connection', agent ? 'keep-alive' : 'close')
-  }
-
-  if (!request.headers.has('user-agent')) {
-    request.headers.set('user-agent', USER_AGENT)
-  }
-
-  // keep our own options since we're overriding the agent
-  // and the redirect mode
-  const _opts = {
-    ...options,
-    agent,
-    redirect: 'manual',
-  }
-
-  return promiseRetry(async (retryHandler, attemptNum) => {
-    const req = new fetch.Request(request, _opts)
-    try {
-      let res = await fetch(req, _opts)
-      if (_opts.integrity && res.status === 200) {
-        // we got a 200 response and the user has specified an expected
-        // integrity value, so wrap the response in an ssri stream to verify it
-        const integrityStream = ssri.integrityStream({
-          algorithms: _opts.algorithms,
-          integrity: _opts.integrity,
-          size: _opts.size,
-        })
-        const pipeline = new CachingMinipassPipeline({
-          events: ['integrity', 'size'],
-        }, res.body, integrityStream)
-        // we also propagate the integrity and size events out to the pipeline so we can use
-        // this new response body as an integrityEmitter for cacache
-        integrityStream.on('integrity', i => pipeline.emit('integrity', i))
-        integrityStream.on('size', s => pipeline.emit('size', s))
-        res = new fetch.Response(pipeline, res)
-        // set an explicit flag so we know if our response body will emit integrity and size
-        res.body.hasIntegrityEmitter = true
-      }
-
-      res.headers.set('x-fetch-attempts', attemptNum)
-
-      // do not retry POST requests, or requests with a streaming body
-      // do retry requests with a 408, 420, 429 or 500+ status in the response
-      const isStream = Minipass.isStream(req.body)
-      const isRetriable = req.method !== 'POST' &&
-          !isStream &&
-          ([408, 420, 429].includes(res.status) || res.status >= 500)
-
-      if (isRetriable) {
-        if (typeof options.onRetry === 'function') {
-          options.onRetry(res)
-        }
-
-        return retryHandler(res)
-      }
-
-      return res
-    } catch (err) {
-      const code = (err.code === 'EPROMISERETRY')
-        ? err.retried.code
-        : err.code
-
-      // err.retried will be the thing that was thrown from above
-      // if it's a response, we just got a bad status code and we
-      // can re-throw to allow the retry
-      const isRetryError = err.retried instanceof fetch.Response ||
-        (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type))
-
-      if (req.method === 'POST' || isRetryError) {
-        throw err
-      }
-
-      if (typeof options.onRetry === 'function') {
-        options.onRetry(err)
-      }
-
-      return retryHandler(err)
-    }
-  }, options.retry).catch((err) => {
-    // don't reject for http errors, just return them
-    if (err.status >= 400 && err.type !== 'system') {
-      return err
-    }
-
-    throw err
-  })
-}
-
-module.exports = remoteFetch
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/package.json b/node_modules/npm-profile/node_modules/make-fetch-happen/package.json
deleted file mode 100644
index 419db8fbb1289..0000000000000
--- a/node_modules/npm-profile/node_modules/make-fetch-happen/package.json
+++ /dev/null
@@ -1,80 +0,0 @@
-{
-  "name": "make-fetch-happen",
-  "version": "12.0.0",
-  "description": "Opinionated, caching, retrying fetch client",
-  "main": "lib/index.js",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "test": "tap",
-    "posttest": "npm run lint",
-    "eslint": "eslint",
-    "lint": "eslint \"**/*.js\"",
-    "lintfix": "npm run lint -- --fix",
-    "postlint": "template-oss-check",
-    "snap": "tap",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/make-fetch-happen.git"
-  },
-  "keywords": [
-    "http",
-    "request",
-    "fetch",
-    "mean girls",
-    "caching",
-    "cache",
-    "subresource integrity"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "dependencies": {
-    "@npmcli/agent": "^1.1.0",
-    "cacache": "^17.0.0",
-    "http-cache-semantics": "^4.1.1",
-    "is-lambda": "^1.0.1",
-    "minipass": "^7.0.2",
-    "minipass-fetch": "^3.0.0",
-    "minipass-flush": "^1.0.5",
-    "minipass-pipeline": "^1.2.4",
-    "negotiator": "^0.6.3",
-    "promise-retry": "^2.0.1",
-    "ssri": "^10.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.18.0",
-    "nock": "^13.2.4",
-    "safe-buffer": "^5.2.1",
-    "standard-version": "^9.3.2",
-    "tap": "^16.0.0"
-  },
-  "engines": {
-    "node": "^16.13.0 || >=18.0.0"
-  },
-  "tap": {
-    "color": 1,
-    "files": "test/*.js",
-    "check-coverage": true,
-    "timeout": 60,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "ciVersions": [
-      "16.13.0",
-      "16.x",
-      "18.0.0",
-      "18.x"
-    ],
-    "version": "4.18.0",
-    "publish": "true"
-  }
-}
diff --git a/node_modules/npm-profile/node_modules/npm-package-arg/LICENSE b/node_modules/npm-profile/node_modules/npm-package-arg/LICENSE
deleted file mode 100644
index 19cec97b18468..0000000000000
--- a/node_modules/npm-profile/node_modules/npm-package-arg/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-profile/node_modules/npm-package-arg/lib/npa.js b/node_modules/npm-profile/node_modules/npm-package-arg/lib/npa.js
deleted file mode 100644
index 36bd18cd9f9a6..0000000000000
--- a/node_modules/npm-profile/node_modules/npm-package-arg/lib/npa.js
+++ /dev/null
@@ -1,431 +0,0 @@
-'use strict'
-module.exports = npa
-module.exports.resolve = resolve
-module.exports.toPurl = toPurl
-module.exports.Result = Result
-
-const url = require('url')
-const HostedGit = require('hosted-git-info')
-const semver = require('semver')
-const path = global.FAKE_WINDOWS ? require('path').win32 : require('path')
-const validatePackageName = require('validate-npm-package-name')
-const { homedir } = require('os')
-const log = require('proc-log')
-
-const isWindows = process.platform === 'win32' || global.FAKE_WINDOWS
-const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
-const isURL = /^(?:git[+])?[a-z]+:/i
-const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
-const isFilename = /[.](?:tgz|tar.gz|tar)$/i
-
-function npa (arg, where) {
-  let name
-  let spec
-  if (typeof arg === 'object') {
-    if (arg instanceof Result && (!where || where === arg.where)) {
-      return arg
-    } else if (arg.name && arg.rawSpec) {
-      return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
-    } else {
-      return npa(arg.raw, where || arg.where)
-    }
-  }
-  const nameEndsAt = arg[0] === '@' ? arg.slice(1).indexOf('@') + 1 : arg.indexOf('@')
-  const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
-  if (isURL.test(arg)) {
-    spec = arg
-  } else if (isGit.test(arg)) {
-    spec = `git+ssh://${arg}`
-  } else if (namePart[0] !== '@' && (hasSlashes.test(namePart) || isFilename.test(namePart))) {
-    spec = arg
-  } else if (nameEndsAt > 0) {
-    name = namePart
-    spec = arg.slice(nameEndsAt + 1) || '*'
-  } else {
-    const valid = validatePackageName(arg)
-    if (valid.validForOldPackages) {
-      name = arg
-      spec = '*'
-    } else {
-      spec = arg
-    }
-  }
-  return resolve(name, spec, where, arg)
-}
-
-const isFilespec = isWindows ? /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/ : /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
-
-function resolve (name, spec, where, arg) {
-  const res = new Result({
-    raw: arg,
-    name: name,
-    rawSpec: spec,
-    fromArgument: arg != null,
-  })
-
-  if (name) {
-    res.setName(name)
-  }
-
-  if (spec && (isFilespec.test(spec) || /^file:/i.test(spec))) {
-    return fromFile(res, where)
-  } else if (spec && /^npm:/i.test(spec)) {
-    return fromAlias(res, where)
-  }
-
-  const hosted = HostedGit.fromUrl(spec, {
-    noGitPlus: true,
-    noCommittish: true,
-  })
-  if (hosted) {
-    return fromHostedGit(res, hosted)
-  } else if (spec && isURL.test(spec)) {
-    return fromURL(res)
-  } else if (spec && (hasSlashes.test(spec) || isFilename.test(spec))) {
-    return fromFile(res, where)
-  } else {
-    return fromRegistry(res)
-  }
-}
-
-const defaultRegistry = 'https://registry.npmjs.org'
-
-function toPurl (arg, reg = defaultRegistry) {
-  const res = npa(arg)
-
-  if (res.type !== 'version') {
-    throw invalidPurlType(res.type, res.raw)
-  }
-
-  // URI-encode leading @ of scoped packages
-  let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec
-  if (reg !== defaultRegistry) {
-    purl += '?repository_url=' + reg
-  }
-
-  return purl
-}
-
-function invalidPackageName (name, valid, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`)
-  err.code = 'EINVALIDPACKAGENAME'
-  return err
-}
-
-function invalidTagName (name, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`)
-  err.code = 'EINVALIDTAGNAME'
-  return err
-}
-
-function invalidPurlType (type, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`)
-  err.code = 'EINVALIDPURLTYPE'
-  return err
-}
-
-function Result (opts) {
-  this.type = opts.type
-  this.registry = opts.registry
-  this.where = opts.where
-  if (opts.raw == null) {
-    this.raw = opts.name ? opts.name + '@' + opts.rawSpec : opts.rawSpec
-  } else {
-    this.raw = opts.raw
-  }
-
-  this.name = undefined
-  this.escapedName = undefined
-  this.scope = undefined
-  this.rawSpec = opts.rawSpec || ''
-  this.saveSpec = opts.saveSpec
-  this.fetchSpec = opts.fetchSpec
-  if (opts.name) {
-    this.setName(opts.name)
-  }
-  this.gitRange = opts.gitRange
-  this.gitCommittish = opts.gitCommittish
-  this.gitSubdir = opts.gitSubdir
-  this.hosted = opts.hosted
-}
-
-Result.prototype.setName = function (name) {
-  const valid = validatePackageName(name)
-  if (!valid.validForOldPackages) {
-    throw invalidPackageName(name, valid, this.raw)
-  }
-
-  this.name = name
-  this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
-  // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
-  this.escapedName = name.replace('/', '%2f')
-  return this
-}
-
-Result.prototype.toString = function () {
-  const full = []
-  if (this.name != null && this.name !== '') {
-    full.push(this.name)
-  }
-  const spec = this.saveSpec || this.fetchSpec || this.rawSpec
-  if (spec != null && spec !== '') {
-    full.push(spec)
-  }
-  return full.length ? full.join('@') : this.raw
-}
-
-Result.prototype.toJSON = function () {
-  const result = Object.assign({}, this)
-  delete result.hosted
-  return result
-}
-
-function setGitCommittish (res, committish) {
-  if (!committish) {
-    res.gitCommittish = null
-    return res
-  }
-
-  // for each :: separated item:
-  for (const part of committish.split('::')) {
-    // if the item has no : the n it is a commit-ish
-    if (!part.includes(':')) {
-      if (res.gitRange) {
-        throw new Error('cannot override existing semver range with a committish')
-      }
-      if (res.gitCommittish) {
-        throw new Error('cannot override existing committish with a second committish')
-      }
-      res.gitCommittish = part
-      continue
-    }
-    // split on name:value
-    const [name, value] = part.split(':')
-    // if name is semver do semver lookup of ref or tag
-    if (name === 'semver') {
-      if (res.gitCommittish) {
-        throw new Error('cannot override existing committish with a semver range')
-      }
-      if (res.gitRange) {
-        throw new Error('cannot override existing semver range with a second semver range')
-      }
-      res.gitRange = decodeURIComponent(value)
-      continue
-    }
-    if (name === 'path') {
-      if (res.gitSubdir) {
-        throw new Error('cannot override existing path with a second path')
-      }
-      res.gitSubdir = `/${value}`
-      continue
-    }
-    log.warn('npm-package-arg', `ignoring unknown key "${name}"`)
-  }
-
-  return res
-}
-
-function fromFile (res, where) {
-  if (!where) {
-    where = process.cwd()
-  }
-  res.type = isFilename.test(res.rawSpec) ? 'file' : 'directory'
-  res.where = where
-
-  // always put the '/' on where when resolving urls, or else
-  // file:foo from /path/to/bar goes to /path/to/foo, when we want
-  // it to be /path/to/bar/foo
-
-  let specUrl
-  let resolvedUrl
-  const prefix = (!/^file:/.test(res.rawSpec) ? 'file:' : '')
-  const rawWithPrefix = prefix + res.rawSpec
-  let rawNoPrefix = rawWithPrefix.replace(/^file:/, '')
-  try {
-    resolvedUrl = new url.URL(rawWithPrefix, `file://${path.resolve(where)}/`)
-    specUrl = new url.URL(rawWithPrefix)
-  } catch (originalError) {
-    const er = new Error('Invalid file: URL, must comply with RFC 8909')
-    throw Object.assign(er, {
-      raw: res.rawSpec,
-      spec: res,
-      where,
-      originalError,
-    })
-  }
-
-  // environment switch for testing
-  if (process.env.NPM_PACKAGE_ARG_8909_STRICT !== '1') {
-    // XXX backwards compatibility lack of compliance with 8909
-    // Remove when we want a breaking change to come into RFC compliance.
-    if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
-      const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///')
-      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
-      specUrl = new url.URL(rawSpec)
-      rawNoPrefix = rawSpec.replace(/^file:/, '')
-    }
-    // turn file:/../foo into file:../foo
-    // for 1, 2 or 3 leading slashes since we attempted
-    // in the previous step to make it a file protocol url with a leading slash
-    if (/^\/{1,3}\.\.?(\/|$)/.test(rawNoPrefix)) {
-      const rawSpec = res.rawSpec.replace(/^file:\/{1,3}/, 'file:')
-      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
-      specUrl = new url.URL(rawSpec)
-      rawNoPrefix = rawSpec.replace(/^file:/, '')
-    }
-    // XXX end 8909 violation backwards compatibility section
-  }
-
-  // file:foo - relative url to ./foo
-  // file:/foo - absolute path /foo
-  // file:///foo - absolute path to /foo, no authority host
-  // file://localhost/foo - absolute path to /foo, on localhost
-  // file://foo - absolute path to / on foo host (error!)
-  if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
-    const msg = `Invalid file: URL, must be absolute if // present`
-    throw Object.assign(new Error(msg), {
-      raw: res.rawSpec,
-      parsed: resolvedUrl,
-    })
-  }
-
-  // turn /C:/blah into just C:/blah on windows
-  let specPath = decodeURIComponent(specUrl.pathname)
-  let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
-  if (isWindows) {
-    specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1')
-    resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1')
-  }
-
-  // replace ~ with homedir, but keep the ~ in the saveSpec
-  // otherwise, make it relative to where param
-  if (/^\/~(\/|$)/.test(specPath)) {
-    res.saveSpec = `file:${specPath.substr(1)}`
-    resolvedPath = path.resolve(homedir(), specPath.substr(3))
-  } else if (!path.isAbsolute(rawNoPrefix)) {
-    res.saveSpec = `file:${path.relative(where, resolvedPath)}`
-  } else {
-    res.saveSpec = `file:${path.resolve(resolvedPath)}`
-  }
-
-  res.fetchSpec = path.resolve(where, resolvedPath)
-  return res
-}
-
-function fromHostedGit (res, hosted) {
-  res.type = 'git'
-  res.hosted = hosted
-  res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
-  res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
-  return setGitCommittish(res, hosted.committish)
-}
-
-function unsupportedURLType (protocol, spec) {
-  const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
-  err.code = 'EUNSUPPORTEDPROTOCOL'
-  return err
-}
-
-function matchGitScp (spec) {
-  // git ssh specifiers are overloaded to also use scp-style git
-  // specifiers, so we have to parse those out and treat them special.
-  // They are NOT true URIs, so we can't hand them to `url.parse`.
-  //
-  // This regex looks for things that look like:
-  // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
-  //
-  // ...and various combinations. The username in the beginning is *required*.
-  const matched = spec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
-  return matched && !matched[1].match(/:[0-9]+\/?.*$/i) && {
-    fetchSpec: matched[1],
-    gitCommittish: matched[2] == null ? null : matched[2],
-  }
-}
-
-function fromURL (res) {
-  // eslint-disable-next-line node/no-deprecated-api
-  const urlparse = url.parse(res.rawSpec)
-  res.saveSpec = res.rawSpec
-  // check the protocol, and then see if it's git or not
-  switch (urlparse.protocol) {
-    case 'git:':
-    case 'git+http:':
-    case 'git+https:':
-    case 'git+rsync:':
-    case 'git+ftp:':
-    case 'git+file:':
-    case 'git+ssh:': {
-      res.type = 'git'
-      const match = urlparse.protocol === 'git+ssh:' ? matchGitScp(res.rawSpec)
-        : null
-      if (match) {
-        setGitCommittish(res, match.gitCommittish)
-        res.fetchSpec = match.fetchSpec
-      } else {
-        setGitCommittish(res, urlparse.hash != null ? urlparse.hash.slice(1) : '')
-        urlparse.protocol = urlparse.protocol.replace(/^git[+]/, '')
-        if (urlparse.protocol === 'file:' && /^git\+file:\/\/[a-z]:/i.test(res.rawSpec)) {
-          // keep the drive letter : on windows file paths
-          urlparse.host += ':'
-          urlparse.hostname += ':'
-        }
-        delete urlparse.hash
-        res.fetchSpec = url.format(urlparse)
-      }
-      break
-    }
-    case 'http:':
-    case 'https:':
-      res.type = 'remote'
-      res.fetchSpec = res.saveSpec
-      break
-
-    default:
-      throw unsupportedURLType(urlparse.protocol, res.rawSpec)
-  }
-
-  return res
-}
-
-function fromAlias (res, where) {
-  const subSpec = npa(res.rawSpec.substr(4), where)
-  if (subSpec.type === 'alias') {
-    throw new Error('nested aliases not supported')
-  }
-
-  if (!subSpec.registry) {
-    throw new Error('aliases only work for registry deps')
-  }
-
-  res.subSpec = subSpec
-  res.registry = true
-  res.type = 'alias'
-  res.saveSpec = null
-  res.fetchSpec = null
-  return res
-}
-
-function fromRegistry (res) {
-  res.registry = true
-  const spec = res.rawSpec.trim()
-  // no save spec for registry components as we save based on the fetched
-  // version, not on the argument so this can't compute that.
-  res.saveSpec = null
-  res.fetchSpec = spec
-  const version = semver.valid(spec, true)
-  const range = semver.validRange(spec, true)
-  if (version) {
-    res.type = 'version'
-  } else if (range) {
-    res.type = 'range'
-  } else {
-    if (encodeURIComponent(spec) !== spec) {
-      throw invalidTagName(spec, res.raw)
-    }
-    res.type = 'tag'
-  }
-  return res
-}
diff --git a/node_modules/npm-profile/node_modules/npm-package-arg/package.json b/node_modules/npm-profile/node_modules/npm-package-arg/package.json
deleted file mode 100644
index bb9e71b258a93..0000000000000
--- a/node_modules/npm-profile/node_modules/npm-package-arg/package.json
+++ /dev/null
@@ -1,59 +0,0 @@
-{
-  "name": "npm-package-arg",
-  "version": "10.1.0",
-  "description": "Parse the things that can be arguments to `npm install`",
-  "main": "./lib/npa.js",
-  "directories": {
-    "test": "test"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "dependencies": {
-    "hosted-git-info": "^6.0.0",
-    "proc-log": "^3.0.0",
-    "semver": "^7.3.5",
-    "validate-npm-package-name": "^5.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.10.0",
-    "tap": "^16.0.1"
-  },
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "npmclilint": "npmcli-lint",
-    "lint": "eslint \"**/*.js\"",
-    "lintfix": "npm run lint -- --fix",
-    "posttest": "npm run lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/npm-package-arg.git"
-  },
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/npm-package-arg/issues"
-  },
-  "homepage": "https://github.com/npm/npm-package-arg",
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "tap": {
-    "branches": 97,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.10.0"
-  }
-}
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md b/node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md
deleted file mode 100644
index 5fc208ff122e0..0000000000000
--- a/node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md
+++ /dev/null
@@ -1,20 +0,0 @@
-
-
-ISC License
-
-Copyright npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this
-software for any purpose with or without fee is hereby
-granted, provided that the above copyright notice and this
-permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
-WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
-EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
-WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
-TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js
deleted file mode 100644
index 870ce0d923cd0..0000000000000
--- a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js
+++ /dev/null
@@ -1,145 +0,0 @@
-'use strict'
-const fs = require('fs')
-const npa = require('npm-package-arg')
-const { URL } = require('url')
-
-// Find the longest registry key that is used for some kind of auth
-// in the options.
-const regKeyFromURI = (uri, opts) => {
-  const parsed = new URL(uri)
-  // try to find a config key indicating we have auth for this registry
-  // can be one of :_authToken, :_auth, :_password and :username, or
-  // :certfile and :keyfile
-  // We walk up the "path" until we're left with just //[:],
-  // stopping when we reach '//'.
-  let regKey = `//${parsed.host}${parsed.pathname}`
-  while (regKey.length > '//'.length) {
-    // got some auth for this URI
-    if (hasAuth(regKey, opts)) {
-      return regKey
-    }
-
-    // can be either //host/some/path/:_auth or //host/some/path:_auth
-    // walk up by removing EITHER what's after the slash OR the slash itself
-    regKey = regKey.replace(/([^/]+|\/)$/, '')
-  }
-}
-
-const hasAuth = (regKey, opts) => (
-  opts[`${regKey}:_authToken`] ||
-  opts[`${regKey}:_auth`] ||
-  opts[`${regKey}:username`] && opts[`${regKey}:_password`] ||
-  opts[`${regKey}:certfile`] && opts[`${regKey}:keyfile`]
-)
-
-const sameHost = (a, b) => {
-  const parsedA = new URL(a)
-  const parsedB = new URL(b)
-  return parsedA.host === parsedB.host
-}
-
-const getRegistry = opts => {
-  const { spec } = opts
-  const { scope: specScope, subSpec } = spec ? npa(spec) : {}
-  const subSpecScope = subSpec && subSpec.scope
-  const scope = subSpec ? subSpecScope : specScope
-  const scopeReg = scope && opts[`${scope}:registry`]
-  return scopeReg || opts.registry
-}
-
-const maybeReadFile = file => {
-  try {
-    return fs.readFileSync(file, 'utf8')
-  } catch (er) {
-    if (er.code !== 'ENOENT') {
-      throw er
-    }
-    return null
-  }
-}
-
-const getAuth = (uri, opts = {}) => {
-  const { forceAuth } = opts
-  if (!uri) {
-    throw new Error('URI is required')
-  }
-  const regKey = regKeyFromURI(uri, forceAuth || opts)
-
-  // we are only allowed to use what's in forceAuth if specified
-  if (forceAuth && !regKey) {
-    return new Auth({
-      scopeAuthKey: null,
-      token: forceAuth._authToken || forceAuth.token,
-      username: forceAuth.username,
-      password: forceAuth._password || forceAuth.password,
-      auth: forceAuth._auth || forceAuth.auth,
-      certfile: forceAuth.certfile,
-      keyfile: forceAuth.keyfile,
-    })
-  }
-
-  // no auth for this URI, but might have it for the registry
-  if (!regKey) {
-    const registry = getRegistry(opts)
-    if (registry && uri !== registry && sameHost(uri, registry)) {
-      return getAuth(registry, opts)
-    } else if (registry !== opts.registry) {
-      // If making a tarball request to a different base URI than the
-      // registry where we logged in, but the same auth SHOULD be sent
-      // to that artifact host, then we track where it was coming in from,
-      // and warn the user if we get a 4xx error on it.
-      const scopeAuthKey = regKeyFromURI(registry, opts)
-      return new Auth({ scopeAuthKey })
-    }
-  }
-
-  const {
-    [`${regKey}:_authToken`]: token,
-    [`${regKey}:username`]: username,
-    [`${regKey}:_password`]: password,
-    [`${regKey}:_auth`]: auth,
-    [`${regKey}:certfile`]: certfile,
-    [`${regKey}:keyfile`]: keyfile,
-  } = opts
-
-  return new Auth({
-    scopeAuthKey: null,
-    token,
-    auth,
-    username,
-    password,
-    certfile,
-    keyfile,
-  })
-}
-
-class Auth {
-  constructor ({ token, auth, username, password, scopeAuthKey, certfile, keyfile }) {
-    this.scopeAuthKey = scopeAuthKey
-    this.token = null
-    this.auth = null
-    this.isBasicAuth = false
-    this.cert = null
-    this.key = null
-    if (token) {
-      this.token = token
-    } else if (auth) {
-      this.auth = auth
-    } else if (username && password) {
-      const p = Buffer.from(password, 'base64').toString('utf8')
-      this.auth = Buffer.from(`${username}:${p}`, 'utf8').toString('base64')
-      this.isBasicAuth = true
-    }
-    // mTLS may be used in conjunction with another auth method above
-    if (certfile && keyfile) {
-      const cert = maybeReadFile(certfile, 'utf-8')
-      const key = maybeReadFile(keyfile, 'utf-8')
-      if (cert && key) {
-        this.cert = cert
-        this.key = key
-      }
-    }
-  }
-}
-
-module.exports = getAuth
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js
deleted file mode 100644
index 066ac3c32420f..0000000000000
--- a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js
+++ /dev/null
@@ -1,100 +0,0 @@
-'use strict'
-
-const errors = require('./errors.js')
-const { Response } = require('minipass-fetch')
-const defaultOpts = require('./default-opts.js')
-const log = require('proc-log')
-const cleanUrl = require('./clean-url.js')
-
-/* eslint-disable-next-line max-len */
-const moreInfoUrl = 'https://github.com/npm/cli/wiki/No-auth-for-URI,-but-auth-present-for-scoped-registry'
-const checkResponse =
-  async ({ method, uri, res, startTime, auth, opts }) => {
-    opts = { ...defaultOpts, ...opts }
-    if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) {
-      log.notice('', res.headers.get('npm-notice'))
-    }
-
-    if (res.status >= 400) {
-      logRequest(method, res, startTime)
-      if (auth && auth.scopeAuthKey && !auth.token && !auth.auth) {
-      // we didn't have auth for THIS request, but we do have auth for
-      // requests to the registry indicated by the spec's scope value.
-      // Warn the user.
-        log.warn('registry', `No auth for URI, but auth present for scoped registry.
-
-URI: ${uri}
-Scoped Registry Key: ${auth.scopeAuthKey}
-
-More info here: ${moreInfoUrl}`)
-      }
-      return checkErrors(method, res, startTime, opts)
-    } else {
-      res.body.on('end', () => logRequest(method, res, startTime, opts))
-      if (opts.ignoreBody) {
-        res.body.resume()
-        return new Response(null, res)
-      }
-      return res
-    }
-  }
-module.exports = checkResponse
-
-function logRequest (method, res, startTime) {
-  const elapsedTime = Date.now() - startTime
-  const attempt = res.headers.get('x-fetch-attempts')
-  const attemptStr = attempt && attempt > 1 ? ` attempt #${attempt}` : ''
-  const cacheStatus = res.headers.get('x-local-cache-status')
-  const cacheStr = cacheStatus ? ` (cache ${cacheStatus})` : ''
-  const urlStr = cleanUrl(res.url)
-
-  log.http(
-    'fetch',
-    `${method.toUpperCase()} ${res.status} ${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}`
-  )
-}
-
-function checkErrors (method, res, startTime, opts) {
-  return res.buffer()
-    .catch(() => null)
-    .then(body => {
-      let parsed = body
-      try {
-        parsed = JSON.parse(body.toString('utf8'))
-      } catch {
-        // ignore errors
-      }
-      if (res.status === 401 && res.headers.get('www-authenticate')) {
-        const auth = res.headers.get('www-authenticate')
-          .split(/,\s*/)
-          .map(s => s.toLowerCase())
-        if (auth.indexOf('ipaddress') !== -1) {
-          throw new errors.HttpErrorAuthIPAddress(
-            method, res, parsed, opts.spec
-          )
-        } else if (auth.indexOf('otp') !== -1) {
-          throw new errors.HttpErrorAuthOTP(
-            method, res, parsed, opts.spec
-          )
-        } else {
-          throw new errors.HttpErrorAuthUnknown(
-            method, res, parsed, opts.spec
-          )
-        }
-      } else if (
-        res.status === 401 &&
-        body != null &&
-        /one-time pass/.test(body.toString('utf8'))
-      ) {
-        // Heuristic for malformed OTP responses that don't include the
-        // www-authenticate header.
-        throw new errors.HttpErrorAuthOTP(
-          method, res, parsed, opts.spec
-        )
-      } else {
-        throw new errors.HttpErrorGeneral(
-          method, res, parsed, opts.spec
-        )
-      }
-    })
-}
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/clean-url.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/clean-url.js
deleted file mode 100644
index 0c2656b5653a0..0000000000000
--- a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/clean-url.js
+++ /dev/null
@@ -1,27 +0,0 @@
-const { URL } = require('url')
-
-const replace = '***'
-const tokenRegex = /\bnpm_[a-zA-Z0-9]{36}\b/g
-const guidRegex = /\b[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\b/g
-
-const cleanUrl = (str) => {
-  if (typeof str !== 'string' || !str) {
-    return str
-  }
-
-  try {
-    const url = new URL(str)
-    if (url.password) {
-      url.password = replace
-      str = url.toString()
-    }
-  } catch {
-    // ignore errors
-  }
-
-  return str
-    .replace(tokenRegex, `npm_${replace}`)
-    .replace(guidRegex, `npm_${replace}`)
-}
-
-module.exports = cleanUrl
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js
deleted file mode 100644
index f0847f0b507e2..0000000000000
--- a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js
+++ /dev/null
@@ -1,19 +0,0 @@
-const pkg = require('../package.json')
-module.exports = {
-  maxSockets: 12,
-  method: 'GET',
-  registry: 'https://registry.npmjs.org/',
-  timeout: 5 * 60 * 1000, // 5 minutes
-  strictSSL: true,
-  noProxy: process.env.NOPROXY,
-  userAgent: `${pkg.name
-    }@${
-      pkg.version
-    }/node@${
-      process.version
-    }+${
-      process.arch
-    } (${
-      process.platform
-    })`,
-}
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js
deleted file mode 100644
index cf5ddba6f300c..0000000000000
--- a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js
+++ /dev/null
@@ -1,80 +0,0 @@
-'use strict'
-
-const url = require('url')
-
-function packageName (href) {
-  try {
-    let basePath = new url.URL(href).pathname.slice(1)
-    if (!basePath.match(/^-/)) {
-      basePath = basePath.split('/')
-      var index = basePath.indexOf('_rewrite')
-      if (index === -1) {
-        index = basePath.length - 1
-      } else {
-        index++
-      }
-      return decodeURIComponent(basePath[index])
-    }
-  } catch (_) {
-    // this is ok
-  }
-}
-
-class HttpErrorBase extends Error {
-  constructor (method, res, body, spec) {
-    super()
-    this.name = this.constructor.name
-    this.headers = res.headers.raw()
-    this.statusCode = res.status
-    this.code = `E${res.status}`
-    this.method = method
-    this.uri = res.url
-    this.body = body
-    this.pkgid = spec ? spec.toString() : packageName(res.url)
-  }
-}
-module.exports.HttpErrorBase = HttpErrorBase
-
-class HttpErrorGeneral extends HttpErrorBase {
-  constructor (method, res, body, spec) {
-    super(method, res, body, spec)
-    this.message = `${res.status} ${res.statusText} - ${
-      this.method.toUpperCase()
-    } ${
-      this.spec || this.uri
-    }${
-      (body && body.error) ? ' - ' + body.error : ''
-    }`
-    Error.captureStackTrace(this, HttpErrorGeneral)
-  }
-}
-module.exports.HttpErrorGeneral = HttpErrorGeneral
-
-class HttpErrorAuthOTP extends HttpErrorBase {
-  constructor (method, res, body, spec) {
-    super(method, res, body, spec)
-    this.message = 'OTP required for authentication'
-    this.code = 'EOTP'
-    Error.captureStackTrace(this, HttpErrorAuthOTP)
-  }
-}
-module.exports.HttpErrorAuthOTP = HttpErrorAuthOTP
-
-class HttpErrorAuthIPAddress extends HttpErrorBase {
-  constructor (method, res, body, spec) {
-    super(method, res, body, spec)
-    this.message = 'Login is not allowed from your IP address'
-    this.code = 'EAUTHIP'
-    Error.captureStackTrace(this, HttpErrorAuthIPAddress)
-  }
-}
-module.exports.HttpErrorAuthIPAddress = HttpErrorAuthIPAddress
-
-class HttpErrorAuthUnknown extends HttpErrorBase {
-  constructor (method, res, body, spec) {
-    super(method, res, body, spec)
-    this.message = 'Unable to authenticate, need: ' + res.headers.get('www-authenticate')
-    Error.captureStackTrace(this, HttpErrorAuthUnknown)
-  }
-}
-module.exports.HttpErrorAuthUnknown = HttpErrorAuthUnknown
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js
deleted file mode 100644
index 23e349c5c5b96..0000000000000
--- a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js
+++ /dev/null
@@ -1,247 +0,0 @@
-'use strict'
-
-const { HttpErrorAuthOTP } = require('./errors.js')
-const checkResponse = require('./check-response.js')
-const getAuth = require('./auth.js')
-const fetch = require('make-fetch-happen')
-const JSONStream = require('minipass-json-stream')
-const npa = require('npm-package-arg')
-const qs = require('querystring')
-const url = require('url')
-const zlib = require('minizlib')
-const { Minipass } = require('minipass')
-
-const defaultOpts = require('./default-opts.js')
-
-// WhatWG URL throws if it's not fully resolved
-const urlIsValid = u => {
-  try {
-    return !!new url.URL(u)
-  } catch (_) {
-    return false
-  }
-}
-
-module.exports = regFetch
-function regFetch (uri, /* istanbul ignore next */ opts_ = {}) {
-  const opts = {
-    ...defaultOpts,
-    ...opts_,
-  }
-
-  // if we did not get a fully qualified URI, then we look at the registry
-  // config or relevant scope to resolve it.
-  const uriValid = urlIsValid(uri)
-  let registry = opts.registry || defaultOpts.registry
-  if (!uriValid) {
-    registry = opts.registry = (
-      (opts.spec && pickRegistry(opts.spec, opts)) ||
-      opts.registry ||
-      registry
-    )
-    uri = `${
-      registry.trim().replace(/\/?$/g, '')
-    }/${
-      uri.trim().replace(/^\//, '')
-    }`
-    // asserts that this is now valid
-    new url.URL(uri)
-  }
-
-  const method = opts.method || 'GET'
-
-  // through that takes into account the scope, the prefix of `uri`, etc
-  const startTime = Date.now()
-  const auth = getAuth(uri, opts)
-  const headers = getHeaders(uri, auth, opts)
-  let body = opts.body
-  const bodyIsStream = Minipass.isStream(body)
-  const bodyIsPromise = body &&
-    typeof body === 'object' &&
-    typeof body.then === 'function'
-
-  if (
-    body && !bodyIsStream && !bodyIsPromise && typeof body !== 'string' && !Buffer.isBuffer(body)
-  ) {
-    headers['content-type'] = headers['content-type'] || 'application/json'
-    body = JSON.stringify(body)
-  } else if (body && !headers['content-type']) {
-    headers['content-type'] = 'application/octet-stream'
-  }
-
-  if (opts.gzip) {
-    headers['content-encoding'] = 'gzip'
-    if (bodyIsStream) {
-      const gz = new zlib.Gzip()
-      body.on('error', /* istanbul ignore next: unlikely and hard to test */
-        err => gz.emit('error', err))
-      body = body.pipe(gz)
-    } else if (!bodyIsPromise) {
-      body = new zlib.Gzip().end(body).concat()
-    }
-  }
-
-  const parsed = new url.URL(uri)
-
-  if (opts.query) {
-    const q = typeof opts.query === 'string' ? qs.parse(opts.query)
-      : opts.query
-
-    Object.keys(q).forEach(key => {
-      if (q[key] !== undefined) {
-        parsed.searchParams.set(key, q[key])
-      }
-    })
-    uri = url.format(parsed)
-  }
-
-  if (parsed.searchParams.get('write') === 'true' && method === 'GET') {
-    // do not cache, because this GET is fetching a rev that will be
-    // used for a subsequent PUT or DELETE, so we need to conditionally
-    // update cache.
-    opts.offline = false
-    opts.preferOffline = false
-    opts.preferOnline = true
-  }
-
-  const doFetch = async fetchBody => {
-    const p = fetch(uri, {
-      agent: opts.agent,
-      algorithms: opts.algorithms,
-      body: fetchBody,
-      cache: getCacheMode(opts),
-      cachePath: opts.cache,
-      ca: opts.ca,
-      cert: auth.cert || opts.cert,
-      headers,
-      integrity: opts.integrity,
-      key: auth.key || opts.key,
-      localAddress: opts.localAddress,
-      maxSockets: opts.maxSockets,
-      memoize: opts.memoize,
-      method: method,
-      noProxy: opts.noProxy,
-      proxy: opts.httpsProxy || opts.proxy,
-      retry: opts.retry ? opts.retry : {
-        retries: opts.fetchRetries,
-        factor: opts.fetchRetryFactor,
-        minTimeout: opts.fetchRetryMintimeout,
-        maxTimeout: opts.fetchRetryMaxtimeout,
-      },
-      strictSSL: opts.strictSSL,
-      timeout: opts.timeout || 30 * 1000,
-    }).then(res => checkResponse({
-      method,
-      uri,
-      res,
-      registry,
-      startTime,
-      auth,
-      opts,
-    }))
-
-    if (typeof opts.otpPrompt === 'function') {
-      return p.catch(async er => {
-        if (er instanceof HttpErrorAuthOTP) {
-          let otp
-          // if otp fails to complete, we fail with that failure
-          try {
-            otp = await opts.otpPrompt()
-          } catch (_) {
-            // ignore this error
-          }
-          // if no otp provided, or otpPrompt errored, throw the original HTTP error
-          if (!otp) {
-            throw er
-          }
-          return regFetch(uri, { ...opts, otp })
-        }
-        throw er
-      })
-    } else {
-      return p
-    }
-  }
-
-  return Promise.resolve(body).then(doFetch)
-}
-
-module.exports.json = fetchJSON
-function fetchJSON (uri, opts) {
-  return regFetch(uri, opts).then(res => res.json())
-}
-
-module.exports.json.stream = fetchJSONStream
-function fetchJSONStream (uri, jsonPath,
-  /* istanbul ignore next */ opts_ = {}) {
-  const opts = { ...defaultOpts, ...opts_ }
-  const parser = JSONStream.parse(jsonPath, opts.mapJSON)
-  regFetch(uri, opts).then(res =>
-    res.body.on('error',
-      /* istanbul ignore next: unlikely and difficult to test */
-      er => parser.emit('error', er)).pipe(parser)
-  ).catch(er => parser.emit('error', er))
-  return parser
-}
-
-module.exports.pickRegistry = pickRegistry
-function pickRegistry (spec, opts = {}) {
-  spec = npa(spec)
-  let registry = spec.scope &&
-    opts[spec.scope.replace(/^@?/, '@') + ':registry']
-
-  if (!registry && opts.scope) {
-    registry = opts[opts.scope.replace(/^@?/, '@') + ':registry']
-  }
-
-  if (!registry) {
-    registry = opts.registry || defaultOpts.registry
-  }
-
-  return registry
-}
-
-function getCacheMode (opts) {
-  return opts.offline ? 'only-if-cached'
-    : opts.preferOffline ? 'force-cache'
-    : opts.preferOnline ? 'no-cache'
-    : 'default'
-}
-
-function getHeaders (uri, auth, opts) {
-  const headers = Object.assign({
-    'user-agent': opts.userAgent,
-  }, opts.headers || {})
-
-  if (opts.authType) {
-    headers['npm-auth-type'] = opts.authType
-  }
-
-  if (opts.scope) {
-    headers['npm-scope'] = opts.scope
-  }
-
-  if (opts.npmSession) {
-    headers['npm-session'] = opts.npmSession
-  }
-
-  if (opts.npmCommand) {
-    headers['npm-command'] = opts.npmCommand
-  }
-
-  // If a tarball is hosted on a different place than the manifest, only send
-  // credentials on `alwaysAuth`
-  if (auth.token) {
-    headers.authorization = `Bearer ${auth.token}`
-  } else if (auth.auth) {
-    headers.authorization = `Basic ${auth.auth}`
-  }
-
-  if (opts.otp) {
-    headers['npm-otp'] = opts.otp
-  }
-
-  return headers
-}
-
-module.exports.cleanUrl = require('./clean-url.js')
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/package.json b/node_modules/npm-profile/node_modules/npm-registry-fetch/package.json
deleted file mode 100644
index 8832c8a2e95d3..0000000000000
--- a/node_modules/npm-profile/node_modules/npm-registry-fetch/package.json
+++ /dev/null
@@ -1,73 +0,0 @@
-{
-  "name": "npm-registry-fetch",
-  "version": "15.0.0",
-  "description": "Fetch-based http client for use with npm registry APIs",
-  "main": "lib",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "eslint": "eslint",
-    "lint": "eslint \"**/*.js\"",
-    "lintfix": "npm run lint -- --fix",
-    "test": "tap",
-    "posttest": "npm run lint",
-    "npmclilint": "npmcli-lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "snap": "tap",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/npm-registry-fetch.git"
-  },
-  "keywords": [
-    "npm",
-    "registry",
-    "fetch"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "dependencies": {
-    "make-fetch-happen": "^12.0.0",
-    "minipass": "^7.0.2",
-    "minipass-fetch": "^3.0.0",
-    "minipass-json-stream": "^1.0.1",
-    "minizlib": "^2.1.2",
-    "npm-package-arg": "^10.0.0",
-    "proc-log": "^3.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.18.0",
-    "cacache": "^17.0.0",
-    "nock": "^13.2.4",
-    "require-inject": "^1.4.4",
-    "ssri": "^10.0.0",
-    "tap": "^16.0.1"
-  },
-  "tap": {
-    "check-coverage": true,
-    "test-ignore": "test[\\\\/](util|cache)[\\\\/]",
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "engines": {
-    "node": "^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "ciVersions": [
-      "16.13.0",
-      "16.x",
-      "18.0.0",
-      "18.x"
-    ],
-    "version": "4.18.0",
-    "publish": "true"
-  }
-}
diff --git a/node_modules/npm-profile/package.json b/node_modules/npm-profile/package.json
index c3b9bc2a8dbb2..af57e9e73509c 100644
--- a/node_modules/npm-profile/package.json
+++ b/node_modules/npm-profile/package.json
@@ -1,12 +1,12 @@
 {
   "name": "npm-profile",
-  "version": "8.0.0",
+  "version": "9.0.0",
   "description": "Library for updating an npmjs.com profile",
   "keywords": [],
   "author": "GitHub Inc.",
   "license": "ISC",
   "dependencies": {
-    "npm-registry-fetch": "^15.0.0",
+    "npm-registry-fetch": "^16.0.0",
     "proc-log": "^3.0.0"
   },
   "main": "./lib/index.js",
@@ -41,13 +41,13 @@
     ]
   },
   "engines": {
-    "node": "^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "version": "4.18.0",
     "ciVersions": [
-      "16.13.0",
+      "16.14.0",
       "16.x",
       "18.0.0",
       "18.x"
diff --git a/package-lock.json b/package-lock.json
index 5ebe3251dcd84..e85d6598a0778 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -133,7 +133,7 @@
         "npm-install-checks": "^6.2.0",
         "npm-package-arg": "^11.0.0",
         "npm-pick-manifest": "^9.0.0",
-        "npm-profile": "^8.0.0",
+        "npm-profile": "^9.0.0",
         "npm-registry-fetch": "^16.0.0",
         "npm-user-validate": "^2.0.0",
         "npmlog": "^7.0.1",
@@ -9816,128 +9816,16 @@
       }
     },
     "node_modules/npm-profile": {
-      "version": "8.0.0",
-      "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-8.0.0.tgz",
-      "integrity": "sha512-3I/URYO4xI4PBRE9OMsxrTPT357n4ygEb5KqjZC31DU2tbdkOPBHCjRY5Xj7SXlPYvsx83tY3ia86EZ3LKkMzw==",
-      "inBundle": true,
-      "dependencies": {
-        "npm-registry-fetch": "^15.0.0",
-        "proc-log": "^3.0.0"
-      },
-      "engines": {
-        "node": "^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/npm-profile/node_modules/@npmcli/agent": {
-      "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-1.1.0.tgz",
-      "integrity": "sha512-I9g/2XFOkflxm5IDrGSjCcR2d12Jmic0di9w/WpJBbzYuSXmfgoL+WwEV7zY/ajxzQr7o4vSkEJh6piyFLYtuQ==",
-      "inBundle": true,
-      "dependencies": {
-        "lru-cache": "^7.18.3",
-        "socks": "^2.7.1"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/npm-profile/node_modules/cacache": {
-      "version": "17.1.4",
-      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
-      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
-      "inBundle": true,
-      "dependencies": {
-        "@npmcli/fs": "^3.1.0",
-        "fs-minipass": "^3.0.0",
-        "glob": "^10.2.2",
-        "lru-cache": "^7.7.1",
-        "minipass": "^7.0.3",
-        "minipass-collect": "^1.0.2",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "p-map": "^4.0.0",
-        "ssri": "^10.0.0",
-        "tar": "^6.1.11",
-        "unique-filename": "^3.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/npm-profile/node_modules/hosted-git-info": {
-      "version": "6.1.1",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
-      "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==",
-      "inBundle": true,
-      "dependencies": {
-        "lru-cache": "^7.5.1"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/npm-profile/node_modules/lru-cache": {
-      "version": "7.18.3",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
-      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
-      "inBundle": true,
-      "engines": {
-        "node": ">=12"
-      }
-    },
-    "node_modules/npm-profile/node_modules/make-fetch-happen": {
-      "version": "12.0.0",
-      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-12.0.0.tgz",
-      "integrity": "sha512-xpuA2kA8Z66uGQjaSXd7rffqJOv60iYpP8X0TsZl3uwXlqxUVmHETImjM71JOPA694TlcX37GhlaCsl6z6fNVg==",
-      "inBundle": true,
-      "dependencies": {
-        "@npmcli/agent": "^1.1.0",
-        "cacache": "^17.0.0",
-        "http-cache-semantics": "^4.1.1",
-        "is-lambda": "^1.0.1",
-        "minipass": "^7.0.2",
-        "minipass-fetch": "^3.0.0",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "negotiator": "^0.6.3",
-        "promise-retry": "^2.0.1",
-        "ssri": "^10.0.0"
-      },
-      "engines": {
-        "node": "^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/npm-profile/node_modules/npm-package-arg": {
-      "version": "10.1.0",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz",
-      "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==",
-      "inBundle": true,
-      "dependencies": {
-        "hosted-git-info": "^6.0.0",
-        "proc-log": "^3.0.0",
-        "semver": "^7.3.5",
-        "validate-npm-package-name": "^5.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/npm-profile/node_modules/npm-registry-fetch": {
-      "version": "15.0.0",
-      "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-15.0.0.tgz",
-      "integrity": "sha512-CMFzk0HMDQ3fmFZ4v62C05g6eBwoU3PxpzFf4QiE360vfmtKZJkj+iCpgLx+I4oJT6Kx8g67Coyk729Q27M2JQ==",
+      "version": "9.0.0",
+      "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-9.0.0.tgz",
+      "integrity": "sha512-qv43ixsJ7vndzfxD3XsPNu1Njck6dhO7q1efksTo+0DiOQysKSOsIhK/qDD1/xO2o+2jDOA4Rv/zOJ9KQFs9nw==",
       "inBundle": true,
       "dependencies": {
-        "make-fetch-happen": "^12.0.0",
-        "minipass": "^7.0.2",
-        "minipass-fetch": "^3.0.0",
-        "minipass-json-stream": "^1.0.1",
-        "minizlib": "^2.1.2",
-        "npm-package-arg": "^10.0.0",
+        "npm-registry-fetch": "^16.0.0",
         "proc-log": "^3.0.0"
       },
       "engines": {
-        "node": "^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "node_modules/npm-registry-fetch": {
diff --git a/package.json b/package.json
index 8078dde05b6bb..9c61ee0c3c458 100644
--- a/package.json
+++ b/package.json
@@ -98,7 +98,7 @@
     "npm-install-checks": "^6.2.0",
     "npm-package-arg": "^11.0.0",
     "npm-pick-manifest": "^9.0.0",
-    "npm-profile": "^8.0.0",
+    "npm-profile": "^9.0.0",
     "npm-registry-fetch": "^16.0.0",
     "npm-user-validate": "^2.0.0",
     "npmlog": "^7.0.1",

From 0815f9f1563b7994fbc821efcc2c4c55e667958e Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Tue, 15 Aug 2023 17:20:03 -0700
Subject: [PATCH 48/68] fix: set objectMode for search filter stream

---
 lib/commands/search.js | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/lib/commands/search.js b/lib/commands/search.js
index 5fb0a12bce138..85ff7db2b7884 100644
--- a/lib/commands/search.js
+++ b/lib/commands/search.js
@@ -68,6 +68,10 @@ class Search extends BaseCommand {
     let anyOutput = false
 
     class FilterStream extends Minipass {
+      constructor () {
+        super({ objectMode: true })
+      }
+
       write (pkg) {
         if (filter(pkg, opts.include, opts.exclude)) {
           super.write(pkg)

From d58b2fbebf512e61430198076cd622ca0e84aaec Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Tue, 15 Aug 2023 17:27:30 -0700
Subject: [PATCH 49/68] chore: update doctor snapshots for new proxy messages

---
 tap-snapshots/test/lib/commands/doctor.js.test.cjs | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/tap-snapshots/test/lib/commands/doctor.js.test.cjs b/tap-snapshots/test/lib/commands/doctor.js.test.cjs
index 0cf7133bdc4bf..b14ef6ebfbd90 100644
--- a/tap-snapshots/test/lib/commands/doctor.js.test.cjs
+++ b/tap-snapshots/test/lib/commands/doctor.js.test.cjs
@@ -180,9 +180,9 @@ Object {
 
 exports[`test/lib/commands/doctor.js TAP bad proxy > output 1`] = `
 Check                               Value   Recommendation/Notes
-npm ping                            not ok  unsupported proxy protocol: 'ssh:'
-npm -v                              not ok  Error: unsupported proxy protocol: 'ssh:'
-node -v                             not ok  Error: unsupported proxy protocol: 'ssh:'
+npm ping                            not ok  Invalid protocol \`ssh:\` connecting to proxy \`npmjs.org\`
+npm -v                              not ok  Error: Invalid protocol \`ssh:\` connecting to proxy \`npmjs.org\`
+node -v                             not ok  Error: Invalid protocol \`ssh:\` connecting to proxy \`npmjs.org\`
 npm config get registry             ok      using default registry (https://registry.npmjs.org/)
 git executable in PATH              ok      /path/to/git
 global bin folder in PATH           ok      {CWD}/global/bin

From 37a28d0e9ac4b508daab25ab00dde6bff4114597 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Wed, 16 Aug 2023 15:20:15 -0700
Subject: [PATCH 50/68] chore: check git status after deps

---
 .github/workflows/audit.yml                   |  2 ++
 .github/workflows/ci-libnpmaccess.yml         |  4 ++++
 .github/workflows/ci-libnpmdiff.yml           |  4 ++++
 .github/workflows/ci-libnpmexec.yml           |  4 ++++
 .github/workflows/ci-libnpmfund.yml           |  4 ++++
 .github/workflows/ci-libnpmhook.yml           |  4 ++++
 .github/workflows/ci-libnpmorg.yml            |  4 ++++
 .github/workflows/ci-libnpmpack.yml           |  4 ++++
 .github/workflows/ci-libnpmpublish.yml        |  4 ++++
 .github/workflows/ci-libnpmsearch.yml         |  4 ++++
 .github/workflows/ci-libnpmteam.yml           |  4 ++++
 .github/workflows/ci-libnpmversion.yml        |  4 ++++
 .github/workflows/ci-npmcli-arborist.yml      |  4 ++++
 .github/workflows/ci-npmcli-config.yml        |  4 ++++
 .github/workflows/ci-npmcli-docs.yml          |  6 ++++++
 .github/workflows/ci-npmcli-mock-globals.yml  |  4 ++++
 .github/workflows/ci-npmcli-mock-registry.yml |  4 ++++
 .github/workflows/ci-npmcli-smoke-tests.yml   |  4 ++++
 .github/workflows/ci-release.yml              |  6 ++++++
 .github/workflows/ci.yml                      | 10 ++++++++++
 .github/workflows/create-node-pr.yml          |  2 ++
 .github/workflows/pull-request.yml            |  2 ++
 .github/workflows/release.yml                 |  4 ++++
 scripts/template-oss/_step-deps.yml           |  2 ++
 24 files changed, 98 insertions(+)

diff --git a/.github/workflows/audit.yml b/.github/workflows/audit.yml
index cb12bc3e5c269..e03ac191ae6d3 100644
--- a/.github/workflows/audit.yml
+++ b/.github/workflows/audit.yml
@@ -33,6 +33,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js --package-lock
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Run Production Audit
         run: node . audit --omit=dev
       - name: Run Full Audit
diff --git a/.github/workflows/ci-libnpmaccess.yml b/.github/workflows/ci-libnpmaccess.yml
index 0f530a4cddaca..3a6662ea109c0 100644
--- a/.github/workflows/ci-libnpmaccess.yml
+++ b/.github/workflows/ci-libnpmaccess.yml
@@ -42,6 +42,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Lint
         run: node . run lint --ignore-scripts -w libnpmaccess
       - name: Post Lint
@@ -89,6 +91,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Add Problem Matcher
         run: echo "::add-matcher::.github/matchers/tap.json"
       - name: Test
diff --git a/.github/workflows/ci-libnpmdiff.yml b/.github/workflows/ci-libnpmdiff.yml
index 6171ebdfd26f4..60a25248dfaca 100644
--- a/.github/workflows/ci-libnpmdiff.yml
+++ b/.github/workflows/ci-libnpmdiff.yml
@@ -42,6 +42,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Lint
         run: node . run lint --ignore-scripts -w libnpmdiff
       - name: Post Lint
@@ -89,6 +91,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Add Problem Matcher
         run: echo "::add-matcher::.github/matchers/tap.json"
       - name: Test
diff --git a/.github/workflows/ci-libnpmexec.yml b/.github/workflows/ci-libnpmexec.yml
index e216f74c4a54f..0eebbb4bb4175 100644
--- a/.github/workflows/ci-libnpmexec.yml
+++ b/.github/workflows/ci-libnpmexec.yml
@@ -42,6 +42,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Lint
         run: node . run lint --ignore-scripts -w libnpmexec
       - name: Post Lint
@@ -89,6 +91,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Add Problem Matcher
         run: echo "::add-matcher::.github/matchers/tap.json"
       - name: Test
diff --git a/.github/workflows/ci-libnpmfund.yml b/.github/workflows/ci-libnpmfund.yml
index e1daa28bd270a..0e6514696443e 100644
--- a/.github/workflows/ci-libnpmfund.yml
+++ b/.github/workflows/ci-libnpmfund.yml
@@ -42,6 +42,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Lint
         run: node . run lint --ignore-scripts -w libnpmfund
       - name: Post Lint
@@ -91,6 +93,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Add Problem Matcher
         run: echo "::add-matcher::.github/matchers/tap.json"
       - name: Test
diff --git a/.github/workflows/ci-libnpmhook.yml b/.github/workflows/ci-libnpmhook.yml
index d384f7127b103..6e0d09081c851 100644
--- a/.github/workflows/ci-libnpmhook.yml
+++ b/.github/workflows/ci-libnpmhook.yml
@@ -42,6 +42,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Lint
         run: node . run lint --ignore-scripts -w libnpmhook
       - name: Post Lint
@@ -89,6 +91,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Add Problem Matcher
         run: echo "::add-matcher::.github/matchers/tap.json"
       - name: Test
diff --git a/.github/workflows/ci-libnpmorg.yml b/.github/workflows/ci-libnpmorg.yml
index 2860ad34c145e..0ba1bccd40b87 100644
--- a/.github/workflows/ci-libnpmorg.yml
+++ b/.github/workflows/ci-libnpmorg.yml
@@ -42,6 +42,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Lint
         run: node . run lint --ignore-scripts -w libnpmorg
       - name: Post Lint
@@ -89,6 +91,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Add Problem Matcher
         run: echo "::add-matcher::.github/matchers/tap.json"
       - name: Test
diff --git a/.github/workflows/ci-libnpmpack.yml b/.github/workflows/ci-libnpmpack.yml
index bf5471e58c941..46326e61f8b49 100644
--- a/.github/workflows/ci-libnpmpack.yml
+++ b/.github/workflows/ci-libnpmpack.yml
@@ -42,6 +42,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Lint
         run: node . run lint --ignore-scripts -w libnpmpack
       - name: Post Lint
@@ -89,6 +91,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Add Problem Matcher
         run: echo "::add-matcher::.github/matchers/tap.json"
       - name: Test
diff --git a/.github/workflows/ci-libnpmpublish.yml b/.github/workflows/ci-libnpmpublish.yml
index 33a21ef83735d..43922b5eb3d79 100644
--- a/.github/workflows/ci-libnpmpublish.yml
+++ b/.github/workflows/ci-libnpmpublish.yml
@@ -42,6 +42,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Lint
         run: node . run lint --ignore-scripts -w libnpmpublish
       - name: Post Lint
@@ -89,6 +91,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Add Problem Matcher
         run: echo "::add-matcher::.github/matchers/tap.json"
       - name: Test
diff --git a/.github/workflows/ci-libnpmsearch.yml b/.github/workflows/ci-libnpmsearch.yml
index 58d58b523ad4a..bc29c37d15d03 100644
--- a/.github/workflows/ci-libnpmsearch.yml
+++ b/.github/workflows/ci-libnpmsearch.yml
@@ -42,6 +42,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Lint
         run: node . run lint --ignore-scripts -w libnpmsearch
       - name: Post Lint
@@ -89,6 +91,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Add Problem Matcher
         run: echo "::add-matcher::.github/matchers/tap.json"
       - name: Test
diff --git a/.github/workflows/ci-libnpmteam.yml b/.github/workflows/ci-libnpmteam.yml
index c1dc3219c5cbd..7aef5bd823260 100644
--- a/.github/workflows/ci-libnpmteam.yml
+++ b/.github/workflows/ci-libnpmteam.yml
@@ -42,6 +42,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Lint
         run: node . run lint --ignore-scripts -w libnpmteam
       - name: Post Lint
@@ -89,6 +91,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Add Problem Matcher
         run: echo "::add-matcher::.github/matchers/tap.json"
       - name: Test
diff --git a/.github/workflows/ci-libnpmversion.yml b/.github/workflows/ci-libnpmversion.yml
index f78a1df3acc5b..1bd8f2c6b357a 100644
--- a/.github/workflows/ci-libnpmversion.yml
+++ b/.github/workflows/ci-libnpmversion.yml
@@ -42,6 +42,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Lint
         run: node . run lint --ignore-scripts -w libnpmversion
       - name: Post Lint
@@ -89,6 +91,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Add Problem Matcher
         run: echo "::add-matcher::.github/matchers/tap.json"
       - name: Test
diff --git a/.github/workflows/ci-npmcli-arborist.yml b/.github/workflows/ci-npmcli-arborist.yml
index 7ba0f170e576d..4618dece5df77 100644
--- a/.github/workflows/ci-npmcli-arborist.yml
+++ b/.github/workflows/ci-npmcli-arborist.yml
@@ -42,6 +42,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Lint
         run: node . run lint --ignore-scripts -w @npmcli/arborist
       - name: Post Lint
@@ -89,6 +91,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Add Problem Matcher
         run: echo "::add-matcher::.github/matchers/tap.json"
       - name: Test
diff --git a/.github/workflows/ci-npmcli-config.yml b/.github/workflows/ci-npmcli-config.yml
index 0aa7e07b2f229..041674ece4bfa 100644
--- a/.github/workflows/ci-npmcli-config.yml
+++ b/.github/workflows/ci-npmcli-config.yml
@@ -42,6 +42,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Lint
         run: node . run lint --ignore-scripts -w @npmcli/config
       - name: Post Lint
@@ -91,6 +93,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Add Problem Matcher
         run: echo "::add-matcher::.github/matchers/tap.json"
       - name: Test
diff --git a/.github/workflows/ci-npmcli-docs.yml b/.github/workflows/ci-npmcli-docs.yml
index ad6cabf1932a4..f93245c04ef70 100644
--- a/.github/workflows/ci-npmcli-docs.yml
+++ b/.github/workflows/ci-npmcli-docs.yml
@@ -42,6 +42,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Lint
         run: node . run lint --ignore-scripts -w @npmcli/docs
       - name: Post Lint
@@ -86,6 +88,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Add Problem Matcher
         run: echo "::add-matcher::.github/matchers/tap.json"
       - name: Test
@@ -119,6 +123,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Build Docs
         run: |
           node . run build -w docs
diff --git a/.github/workflows/ci-npmcli-mock-globals.yml b/.github/workflows/ci-npmcli-mock-globals.yml
index b3b37036b47b1..11f8fcfad8357 100644
--- a/.github/workflows/ci-npmcli-mock-globals.yml
+++ b/.github/workflows/ci-npmcli-mock-globals.yml
@@ -42,6 +42,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Lint
         run: node . run lint --ignore-scripts -w @npmcli/mock-globals
       - name: Post Lint
@@ -91,6 +93,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Add Problem Matcher
         run: echo "::add-matcher::.github/matchers/tap.json"
       - name: Test
diff --git a/.github/workflows/ci-npmcli-mock-registry.yml b/.github/workflows/ci-npmcli-mock-registry.yml
index 8e23df0cb5657..e368025402432 100644
--- a/.github/workflows/ci-npmcli-mock-registry.yml
+++ b/.github/workflows/ci-npmcli-mock-registry.yml
@@ -42,6 +42,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Lint
         run: node . run lint --ignore-scripts -w @npmcli/mock-registry
       - name: Post Lint
@@ -89,6 +91,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Add Problem Matcher
         run: echo "::add-matcher::.github/matchers/tap.json"
       - name: Test
diff --git a/.github/workflows/ci-npmcli-smoke-tests.yml b/.github/workflows/ci-npmcli-smoke-tests.yml
index abb434b23fa79..b0c31b9e88ae3 100644
--- a/.github/workflows/ci-npmcli-smoke-tests.yml
+++ b/.github/workflows/ci-npmcli-smoke-tests.yml
@@ -42,6 +42,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Lint
         run: node . run lint --ignore-scripts -w @npmcli/smoke-tests
       - name: Post Lint
@@ -91,6 +93,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Add Problem Matcher
         run: echo "::add-matcher::.github/matchers/tap.json"
       - name: Test
diff --git a/.github/workflows/ci-release.yml b/.github/workflows/ci-release.yml
index 7a224177fe5d6..df410a7108c1a 100644
--- a/.github/workflows/ci-release.yml
+++ b/.github/workflows/ci-release.yml
@@ -88,6 +88,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Lint
         run: node . run lint --ignore-scripts -ws -iwr --if-present
       - name: Post Lint
@@ -187,6 +189,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Add Problem Matcher
         run: echo "::add-matcher::.github/matchers/tap.json"
       - name: Test
@@ -289,6 +293,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Pack
         env:
           SMOKE_PUBLISH_NPM: 1
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 7ad326099c57b..58763654ba144 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -50,6 +50,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Lint
         run: node . run lint --ignore-scripts
       - name: Post Lint
@@ -97,6 +99,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Add Problem Matcher
         run: echo "::add-matcher::.github/matchers/tap.json"
       - name: Test
@@ -128,6 +132,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Check Licenses
         run: node . run licenses
 
@@ -155,6 +161,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Run Smoke Tests
         run: node . test -w smoke-tests --ignore-scripts
       - name: Check Git Status
@@ -183,6 +191,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Setup WSL
         uses: Vampire/setup-wsl@v2.0.1
       - name: Setup Cygwin
diff --git a/.github/workflows/create-node-pr.yml b/.github/workflows/create-node-pr.yml
index b772ff2706bc1..4f867518cec82 100644
--- a/.github/workflows/create-node-pr.yml
+++ b/.github/workflows/create-node-pr.yml
@@ -43,6 +43,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Checkout Node
         uses: actions/checkout@v3
         with:
diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml
index f28d52cc09046..bb2b544a2c568 100644
--- a/.github/workflows/pull-request.yml
+++ b/.github/workflows/pull-request.yml
@@ -37,6 +37,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Run Commitlint on Commits
         id: commit
         continue-on-error: true
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 7bdfb8d30fa59..ac547956674fe 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -51,6 +51,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Release Please
         id: release
         env:
@@ -164,6 +166,8 @@ jobs:
         run: node scripts/git-dirty.js
       - name: Reset Deps
         run: node scripts/resetdeps.js
+      - name: Check Git Status
+        run: node scripts/git-dirty.js
       - name: Run Post Pull Request Actions
         env:
           RELEASE_PR_NUMBER: ${{ needs.release.outputs.pr-number }}
diff --git a/scripts/template-oss/_step-deps.yml b/scripts/template-oss/_step-deps.yml
index c36f9a2f2fb8a..d83726613ec76 100644
--- a/scripts/template-oss/_step-deps.yml
+++ b/scripts/template-oss/_step-deps.yml
@@ -2,3 +2,5 @@
   run: node scripts/git-dirty.js
 - name: Reset Deps
   run: node scripts/resetdeps.js {{~#if jobDepFlags}} {{ jobDepFlags }}{{/if}}
+- name: Check Git Status
+  run: node scripts/git-dirty.js

From e6c9c285eb135948842efe955486a7d7018df930 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Thu, 17 Aug 2023 12:51:29 -0700
Subject: [PATCH 51/68] chore: tap@16.3.8

---
 docs/package.json                     |   2 +-
 mock-globals/package.json             |   2 +-
 mock-registry/package.json            |   2 +-
 package-lock.json                     | 108 +++++++++++++-------------
 package.json                          |   2 +-
 smoke-tests/package.json              |   2 +-
 workspaces/arborist/package.json      |   2 +-
 workspaces/config/package.json        |   2 +-
 workspaces/libnpmaccess/package.json  |   2 +-
 workspaces/libnpmdiff/package.json    |   2 +-
 workspaces/libnpmexec/package.json    |   2 +-
 workspaces/libnpmfund/package.json    |   2 +-
 workspaces/libnpmhook/package.json    |   2 +-
 workspaces/libnpmorg/package.json     |   2 +-
 workspaces/libnpmpack/package.json    |   2 +-
 workspaces/libnpmpublish/package.json |   2 +-
 workspaces/libnpmsearch/package.json  |   2 +-
 workspaces/libnpmteam/package.json    |   2 +-
 workspaces/libnpmversion/package.json |   2 +-
 19 files changed, 72 insertions(+), 72 deletions(-)

diff --git a/docs/package.json b/docs/package.json
index 484e765157522..1d363cb1c25bb 100644
--- a/docs/package.json
+++ b/docs/package.json
@@ -32,7 +32,7 @@
     "remark-parse": "^10.0.1",
     "remark-rehype": "^10.1.0",
     "semver": "^7.3.8",
-    "tap": "^16.3.4",
+    "tap": "^16.3.8",
     "unified": "^10.1.2",
     "yaml": "^2.2.1"
   },
diff --git a/mock-globals/package.json b/mock-globals/package.json
index 6ea9b0c65936d..aa5fede81c9c5 100644
--- a/mock-globals/package.json
+++ b/mock-globals/package.json
@@ -49,6 +49,6 @@
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.1",
     "@npmcli/template-oss": "4.18.0",
-    "tap": "^16.3.2"
+    "tap": "^16.3.8"
   }
 }
diff --git a/mock-registry/package.json b/mock-registry/package.json
index c88ddc45c3608..841bf00dc6a63 100644
--- a/mock-registry/package.json
+++ b/mock-registry/package.json
@@ -57,6 +57,6 @@
     "nock": "^13.3.0",
     "npm-package-arg": "^11.0.0",
     "pacote": "^17.0.1",
-    "tap": "^16.3.4"
+    "tap": "^16.3.8"
   }
 }
diff --git a/package-lock.json b/package-lock.json
index e85d6598a0778..d468446084ac4 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -175,7 +175,7 @@
         "remark-gfm": "^3.0.1",
         "remark-github": "^11.2.4",
         "spawk": "^1.7.1",
-        "tap": "^16.3.4"
+        "tap": "^16.3.8"
       },
       "engines": {
         "node": "^18.17.0 || >=20.5.0"
@@ -198,7 +198,7 @@
         "remark-parse": "^10.0.1",
         "remark-rehype": "^10.1.0",
         "semver": "^7.3.8",
-        "tap": "^16.3.4",
+        "tap": "^16.3.8",
         "unified": "^10.1.2",
         "yaml": "^2.2.1"
       },
@@ -213,7 +213,7 @@
       "devDependencies": {
         "@npmcli/eslint-config": "^4.0.1",
         "@npmcli/template-oss": "4.18.0",
-        "tap": "^16.3.2"
+        "tap": "^16.3.8"
       },
       "engines": {
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
@@ -231,7 +231,7 @@
         "nock": "^13.3.0",
         "npm-package-arg": "^11.0.0",
         "pacote": "^17.0.1",
-        "tap": "^16.3.4"
+        "tap": "^16.3.8"
       },
       "engines": {
         "node": "^16.14.0 || >=18.0.0"
@@ -12357,9 +12357,9 @@
       "dev": true
     },
     "node_modules/tap": {
-      "version": "16.3.7",
-      "resolved": "https://registry.npmjs.org/tap/-/tap-16.3.7.tgz",
-      "integrity": "sha512-AaovVsfXVKcIf9eD1NxgwIqSDz5LauvybTpS6bjAKVYqz3+iavHC1abwxTkXmswb2n7eq8qKLt8DvY3D6iWcYA==",
+      "version": "16.3.8",
+      "resolved": "https://registry.npmjs.org/tap/-/tap-16.3.8.tgz",
+      "integrity": "sha512-ARpCLtOFST37MholnZm7JMFikGq0x/T9uBdZH83iuddPNgwDTZQiD8+4x7VABUfVWS0ozKUkmHZ5OOzMI3fLPg==",
       "bundleDependencies": [
         "ink",
         "treport",
@@ -12581,7 +12581,7 @@
       }
     },
     "node_modules/tap/node_modules/@babel/compat-data": {
-      "version": "7.22.5",
+      "version": "7.22.9",
       "dev": true,
       "inBundle": true,
       "license": "MIT",
@@ -12590,26 +12590,26 @@
       }
     },
     "node_modules/tap/node_modules/@babel/core": {
-      "version": "7.22.5",
+      "version": "7.22.9",
       "dev": true,
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
         "@ampproject/remapping": "^2.2.0",
         "@babel/code-frame": "^7.22.5",
-        "@babel/generator": "^7.22.5",
-        "@babel/helper-compilation-targets": "^7.22.5",
-        "@babel/helper-module-transforms": "^7.22.5",
-        "@babel/helpers": "^7.22.5",
-        "@babel/parser": "^7.22.5",
+        "@babel/generator": "^7.22.9",
+        "@babel/helper-compilation-targets": "^7.22.9",
+        "@babel/helper-module-transforms": "^7.22.9",
+        "@babel/helpers": "^7.22.6",
+        "@babel/parser": "^7.22.7",
         "@babel/template": "^7.22.5",
-        "@babel/traverse": "^7.22.5",
+        "@babel/traverse": "^7.22.8",
         "@babel/types": "^7.22.5",
         "convert-source-map": "^1.7.0",
         "debug": "^4.1.0",
         "gensync": "^1.0.0-beta.2",
         "json5": "^2.2.2",
-        "semver": "^6.3.0"
+        "semver": "^6.3.1"
       },
       "engines": {
         "node": ">=6.9.0"
@@ -12620,7 +12620,7 @@
       }
     },
     "node_modules/tap/node_modules/@babel/generator": {
-      "version": "7.22.5",
+      "version": "7.22.9",
       "dev": true,
       "inBundle": true,
       "license": "MIT",
@@ -12647,16 +12647,16 @@
       }
     },
     "node_modules/tap/node_modules/@babel/helper-compilation-targets": {
-      "version": "7.22.5",
+      "version": "7.22.9",
       "dev": true,
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
-        "@babel/compat-data": "^7.22.5",
+        "@babel/compat-data": "^7.22.9",
         "@babel/helper-validator-option": "^7.22.5",
-        "browserslist": "^4.21.3",
+        "browserslist": "^4.21.9",
         "lru-cache": "^5.1.1",
-        "semver": "^6.3.0"
+        "semver": "^6.3.1"
       },
       "engines": {
         "node": ">=6.9.0"
@@ -12712,7 +12712,7 @@
       }
     },
     "node_modules/tap/node_modules/@babel/helper-module-transforms": {
-      "version": "7.22.5",
+      "version": "7.22.9",
       "dev": true,
       "inBundle": true,
       "license": "MIT",
@@ -12720,14 +12720,14 @@
         "@babel/helper-environment-visitor": "^7.22.5",
         "@babel/helper-module-imports": "^7.22.5",
         "@babel/helper-simple-access": "^7.22.5",
-        "@babel/helper-split-export-declaration": "^7.22.5",
-        "@babel/helper-validator-identifier": "^7.22.5",
-        "@babel/template": "^7.22.5",
-        "@babel/traverse": "^7.22.5",
-        "@babel/types": "^7.22.5"
+        "@babel/helper-split-export-declaration": "^7.22.6",
+        "@babel/helper-validator-identifier": "^7.22.5"
       },
       "engines": {
         "node": ">=6.9.0"
+      },
+      "peerDependencies": {
+        "@babel/core": "^7.0.0"
       }
     },
     "node_modules/tap/node_modules/@babel/helper-plugin-utils": {
@@ -12752,7 +12752,7 @@
       }
     },
     "node_modules/tap/node_modules/@babel/helper-split-export-declaration": {
-      "version": "7.22.5",
+      "version": "7.22.6",
       "dev": true,
       "inBundle": true,
       "license": "MIT",
@@ -12791,13 +12791,13 @@
       }
     },
     "node_modules/tap/node_modules/@babel/helpers": {
-      "version": "7.22.5",
+      "version": "7.22.6",
       "dev": true,
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
         "@babel/template": "^7.22.5",
-        "@babel/traverse": "^7.22.5",
+        "@babel/traverse": "^7.22.6",
         "@babel/types": "^7.22.5"
       },
       "engines": {
@@ -12819,7 +12819,7 @@
       }
     },
     "node_modules/tap/node_modules/@babel/parser": {
-      "version": "7.22.5",
+      "version": "7.22.7",
       "dev": true,
       "inBundle": true,
       "license": "MIT",
@@ -12940,18 +12940,18 @@
       }
     },
     "node_modules/tap/node_modules/@babel/traverse": {
-      "version": "7.22.5",
+      "version": "7.22.8",
       "dev": true,
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
         "@babel/code-frame": "^7.22.5",
-        "@babel/generator": "^7.22.5",
+        "@babel/generator": "^7.22.7",
         "@babel/helper-environment-visitor": "^7.22.5",
         "@babel/helper-function-name": "^7.22.5",
         "@babel/helper-hoist-variables": "^7.22.5",
-        "@babel/helper-split-export-declaration": "^7.22.5",
-        "@babel/parser": "^7.22.5",
+        "@babel/helper-split-export-declaration": "^7.22.6",
+        "@babel/parser": "^7.22.7",
         "@babel/types": "^7.22.5",
         "debug": "^4.1.0",
         "globals": "^11.1.0"
@@ -13234,7 +13234,7 @@
       }
     },
     "node_modules/tap/node_modules/caniuse-lite": {
-      "version": "1.0.30001506",
+      "version": "1.0.30001517",
       "dev": true,
       "funding": [
         {
@@ -13465,7 +13465,7 @@
       }
     },
     "node_modules/tap/node_modules/electron-to-chromium": {
-      "version": "1.4.438",
+      "version": "1.4.477",
       "dev": true,
       "inBundle": true,
       "license": "ISC"
@@ -13910,7 +13910,7 @@
       "license": "MIT"
     },
     "node_modules/tap/node_modules/node-releases": {
-      "version": "2.0.12",
+      "version": "2.0.13",
       "dev": true,
       "inBundle": true,
       "license": "MIT"
@@ -14052,7 +14052,7 @@
       }
     },
     "node_modules/tap/node_modules/react-devtools-core": {
-      "version": "4.27.8",
+      "version": "4.28.0",
       "dev": true,
       "inBundle": true,
       "license": "MIT",
@@ -14135,7 +14135,7 @@
       }
     },
     "node_modules/tap/node_modules/semver": {
-      "version": "6.3.0",
+      "version": "6.3.1",
       "dev": true,
       "inBundle": true,
       "license": "ISC",
@@ -15972,7 +15972,7 @@
         "@npmcli/promise-spawn": "^6.0.2",
         "@npmcli/template-oss": "4.18.0",
         "http-proxy": "^1.18.1",
-        "tap": "^16.3.4",
+        "tap": "^16.3.8",
         "which": "^3.0.0"
       },
       "engines": {
@@ -16027,7 +16027,7 @@
         "benchmark": "^2.1.4",
         "minify-registry-metadata": "^3.0.0",
         "nock": "^13.3.0",
-        "tap": "^16.3.4",
+        "tap": "^16.3.8",
         "tar-stream": "^3.0.0",
         "tcompare": "^5.0.6"
       },
@@ -16053,7 +16053,7 @@
         "@npmcli/eslint-config": "^4.0.0",
         "@npmcli/mock-globals": "^1.0.0",
         "@npmcli/template-oss": "4.18.0",
-        "tap": "^16.3.4"
+        "tap": "^16.3.8"
       },
       "engines": {
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
@@ -16071,7 +16071,7 @@
         "@npmcli/mock-registry": "^1.0.0",
         "@npmcli/template-oss": "4.18.0",
         "nock": "^13.3.0",
-        "tap": "^16.3.4"
+        "tap": "^16.3.8"
       },
       "engines": {
         "node": "^16.14.0 || >=18.0.0"
@@ -16094,7 +16094,7 @@
       "devDependencies": {
         "@npmcli/eslint-config": "^4.0.0",
         "@npmcli/template-oss": "4.18.0",
-        "tap": "^16.3.4"
+        "tap": "^16.3.8"
       },
       "engines": {
         "node": "^16.14.0 || >=18.0.0"
@@ -16124,7 +16124,7 @@
         "chalk": "^5.2.0",
         "just-extend": "^6.2.0",
         "just-safe-set": "^4.2.1",
-        "tap": "^16.3.4"
+        "tap": "^16.3.8"
       },
       "engines": {
         "node": "^16.14.0 || >=18.0.0"
@@ -16139,7 +16139,7 @@
       "devDependencies": {
         "@npmcli/eslint-config": "^4.0.0",
         "@npmcli/template-oss": "4.18.0",
-        "tap": "^16.3.4"
+        "tap": "^16.3.8"
       },
       "engines": {
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
@@ -16156,7 +16156,7 @@
         "@npmcli/eslint-config": "^4.0.0",
         "@npmcli/template-oss": "4.18.0",
         "nock": "^13.3.0",
-        "tap": "^16.3.4"
+        "tap": "^16.3.8"
       },
       "engines": {
         "node": "^16.14.0 || >=18.0.0"
@@ -16174,7 +16174,7 @@
         "@npmcli/template-oss": "4.18.0",
         "minipass": "^7.0.3",
         "nock": "^13.3.0",
-        "tap": "^16.3.4"
+        "tap": "^16.3.8"
       },
       "engines": {
         "node": "^16.14.0 || >=18.0.0"
@@ -16194,7 +16194,7 @@
         "@npmcli/template-oss": "4.18.0",
         "nock": "^13.3.0",
         "spawk": "^1.7.1",
-        "tap": "^16.3.4"
+        "tap": "^16.3.8"
       },
       "engines": {
         "node": "^16.14.0 || >=18.0.0"
@@ -16219,7 +16219,7 @@
         "@npmcli/mock-registry": "^1.0.0",
         "@npmcli/template-oss": "4.18.0",
         "nock": "^13.3.0",
-        "tap": "^16.3.4"
+        "tap": "^16.3.8"
       },
       "engines": {
         "node": "^16.14.0 || >=18.0.0"
@@ -16235,7 +16235,7 @@
         "@npmcli/eslint-config": "^4.0.0",
         "@npmcli/template-oss": "4.18.0",
         "nock": "^13.3.0",
-        "tap": "^16.3.4"
+        "tap": "^16.3.8"
       },
       "engines": {
         "node": "^16.14.0 || >=18.0.0"
@@ -16252,7 +16252,7 @@
         "@npmcli/eslint-config": "^4.0.0",
         "@npmcli/template-oss": "4.18.0",
         "nock": "^13.3.0",
-        "tap": "^16.3.4"
+        "tap": "^16.3.8"
       },
       "engines": {
         "node": "^16.14.0 || >=18.0.0"
@@ -16272,7 +16272,7 @@
         "@npmcli/eslint-config": "^4.0.0",
         "@npmcli/template-oss": "4.18.0",
         "require-inject": "^1.4.4",
-        "tap": "^16.3.4"
+        "tap": "^16.3.8"
       },
       "engines": {
         "node": "^16.14.0 || >=18.0.0"
diff --git a/package.json b/package.json
index 9c61ee0c3c458..d5e3a383601a5 100644
--- a/package.json
+++ b/package.json
@@ -206,7 +206,7 @@
     "remark-gfm": "^3.0.1",
     "remark-github": "^11.2.4",
     "spawk": "^1.7.1",
-    "tap": "^16.3.4"
+    "tap": "^16.3.8"
   },
   "scripts": {
     "dependencies": "node scripts/bundle-and-gitignore-deps.js && node scripts/dependency-graph.js",
diff --git a/smoke-tests/package.json b/smoke-tests/package.json
index d674ebee81e9d..79f633a6d7976 100644
--- a/smoke-tests/package.json
+++ b/smoke-tests/package.json
@@ -23,7 +23,7 @@
     "@npmcli/promise-spawn": "^6.0.2",
     "@npmcli/template-oss": "4.18.0",
     "http-proxy": "^1.18.1",
-    "tap": "^16.3.4",
+    "tap": "^16.3.8",
     "which": "^3.0.0"
   },
   "author": "GitHub Inc.",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index b3f740adeb018..adeab7e1246d1 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -43,7 +43,7 @@
     "benchmark": "^2.1.4",
     "minify-registry-metadata": "^3.0.0",
     "nock": "^13.3.0",
-    "tap": "^16.3.4",
+    "tap": "^16.3.8",
     "tar-stream": "^3.0.0",
     "tcompare": "^5.0.6"
   },
diff --git a/workspaces/config/package.json b/workspaces/config/package.json
index cadac2e88aa47..d5f3d51f3052b 100644
--- a/workspaces/config/package.json
+++ b/workspaces/config/package.json
@@ -33,7 +33,7 @@
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/mock-globals": "^1.0.0",
     "@npmcli/template-oss": "4.18.0",
-    "tap": "^16.3.4"
+    "tap": "^16.3.8"
   },
   "dependencies": {
     "@npmcli/map-workspaces": "^3.0.2",
diff --git a/workspaces/libnpmaccess/package.json b/workspaces/libnpmaccess/package.json
index 87a0921c198bb..6080d9465bdd2 100644
--- a/workspaces/libnpmaccess/package.json
+++ b/workspaces/libnpmaccess/package.json
@@ -19,7 +19,7 @@
     "@npmcli/mock-registry": "^1.0.0",
     "@npmcli/template-oss": "4.18.0",
     "nock": "^13.3.0",
-    "tap": "^16.3.4"
+    "tap": "^16.3.8"
   },
   "repository": {
     "type": "git",
diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json
index 8f19809199c8c..8a9f82a675ce0 100644
--- a/workspaces/libnpmdiff/package.json
+++ b/workspaces/libnpmdiff/package.json
@@ -43,7 +43,7 @@
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/template-oss": "4.18.0",
-    "tap": "^16.3.4"
+    "tap": "^16.3.8"
   },
   "dependencies": {
     "@npmcli/arborist": "^6.3.0",
diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json
index 9a35feb28f0dd..f4d199602cb2d 100644
--- a/workspaces/libnpmexec/package.json
+++ b/workspaces/libnpmexec/package.json
@@ -56,7 +56,7 @@
     "chalk": "^5.2.0",
     "just-extend": "^6.2.0",
     "just-safe-set": "^4.2.1",
-    "tap": "^16.3.4"
+    "tap": "^16.3.8"
   },
   "dependencies": {
     "@npmcli/arborist": "^6.3.0",
diff --git a/workspaces/libnpmfund/package.json b/workspaces/libnpmfund/package.json
index 0c863c2f92203..0ad2bfc924876 100644
--- a/workspaces/libnpmfund/package.json
+++ b/workspaces/libnpmfund/package.json
@@ -42,7 +42,7 @@
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/template-oss": "4.18.0",
-    "tap": "^16.3.4"
+    "tap": "^16.3.8"
   },
   "dependencies": {
     "@npmcli/arborist": "^6.3.0"
diff --git a/workspaces/libnpmhook/package.json b/workspaces/libnpmhook/package.json
index 6ba8ae8cce833..b77a18bd3f38f 100644
--- a/workspaces/libnpmhook/package.json
+++ b/workspaces/libnpmhook/package.json
@@ -37,7 +37,7 @@
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/template-oss": "4.18.0",
     "nock": "^13.3.0",
-    "tap": "^16.3.4"
+    "tap": "^16.3.8"
   },
   "engines": {
     "node": "^16.14.0 || >=18.0.0"
diff --git a/workspaces/libnpmorg/package.json b/workspaces/libnpmorg/package.json
index ae6d61c480859..5aa13607dfc88 100644
--- a/workspaces/libnpmorg/package.json
+++ b/workspaces/libnpmorg/package.json
@@ -31,7 +31,7 @@
     "@npmcli/template-oss": "4.18.0",
     "minipass": "^7.0.3",
     "nock": "^13.3.0",
-    "tap": "^16.3.4"
+    "tap": "^16.3.8"
   },
   "repository": {
     "type": "git",
diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json
index f461bf76c783b..d8e8e00d9f926 100644
--- a/workspaces/libnpmpack/package.json
+++ b/workspaces/libnpmpack/package.json
@@ -26,7 +26,7 @@
     "@npmcli/template-oss": "4.18.0",
     "nock": "^13.3.0",
     "spawk": "^1.7.1",
-    "tap": "^16.3.4"
+    "tap": "^16.3.8"
   },
   "repository": {
     "type": "git",
diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json
index fb5daa47c8d93..2b59a773142f9 100644
--- a/workspaces/libnpmpublish/package.json
+++ b/workspaces/libnpmpublish/package.json
@@ -28,7 +28,7 @@
     "@npmcli/mock-registry": "^1.0.0",
     "@npmcli/template-oss": "4.18.0",
     "nock": "^13.3.0",
-    "tap": "^16.3.4"
+    "tap": "^16.3.8"
   },
   "repository": {
     "type": "git",
diff --git a/workspaces/libnpmsearch/package.json b/workspaces/libnpmsearch/package.json
index fc2df01ee15f5..9a5824f5ca734 100644
--- a/workspaces/libnpmsearch/package.json
+++ b/workspaces/libnpmsearch/package.json
@@ -28,7 +28,7 @@
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/template-oss": "4.18.0",
     "nock": "^13.3.0",
-    "tap": "^16.3.4"
+    "tap": "^16.3.8"
   },
   "repository": {
     "type": "git",
diff --git a/workspaces/libnpmteam/package.json b/workspaces/libnpmteam/package.json
index 38632f66bf4aa..e7781fad3a3a1 100644
--- a/workspaces/libnpmteam/package.json
+++ b/workspaces/libnpmteam/package.json
@@ -18,7 +18,7 @@
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/template-oss": "4.18.0",
     "nock": "^13.3.0",
-    "tap": "^16.3.4"
+    "tap": "^16.3.8"
   },
   "repository": {
     "type": "git",
diff --git a/workspaces/libnpmversion/package.json b/workspaces/libnpmversion/package.json
index aaaba35ad9e4b..2798ec4260bcf 100644
--- a/workspaces/libnpmversion/package.json
+++ b/workspaces/libnpmversion/package.json
@@ -34,7 +34,7 @@
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/template-oss": "4.18.0",
     "require-inject": "^1.4.4",
-    "tap": "^16.3.4"
+    "tap": "^16.3.8"
   },
   "dependencies": {
     "@npmcli/git": "^5.0.1",

From 5bd0bfde9b94f49f603258c390c8aac92c73484b Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Thu, 17 Aug 2023 12:52:17 -0700
Subject: [PATCH 52/68] chore: nock@13.3.0

---
 mock-registry/package.json            |  2 +-
 package-lock.json                     | 26 +++++++++++++-------------
 package.json                          |  2 +-
 workspaces/arborist/package.json      |  2 +-
 workspaces/libnpmaccess/package.json  |  2 +-
 workspaces/libnpmhook/package.json    |  2 +-
 workspaces/libnpmorg/package.json     |  2 +-
 workspaces/libnpmpack/package.json    |  2 +-
 workspaces/libnpmpublish/package.json |  2 +-
 workspaces/libnpmsearch/package.json  |  2 +-
 workspaces/libnpmteam/package.json    |  2 +-
 11 files changed, 23 insertions(+), 23 deletions(-)

diff --git a/mock-registry/package.json b/mock-registry/package.json
index 841bf00dc6a63..cf80f303096b7 100644
--- a/mock-registry/package.json
+++ b/mock-registry/package.json
@@ -54,7 +54,7 @@
     "@npmcli/eslint-config": "^4.0.1",
     "@npmcli/template-oss": "4.18.0",
     "json-stringify-safe": "^5.0.1",
-    "nock": "^13.3.0",
+    "nock": "^13.3.3",
     "npm-package-arg": "^11.0.0",
     "pacote": "^17.0.1",
     "tap": "^16.3.8"
diff --git a/package-lock.json b/package-lock.json
index d468446084ac4..88ee48cfed3e2 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -169,7 +169,7 @@
         "@tufjs/repo-mock": "^1.3.1",
         "diff": "^5.1.0",
         "licensee": "^10.0.0",
-        "nock": "^13.3.0",
+        "nock": "^13.3.3",
         "npm-packlist": "^7.0.4",
         "remark": "^14.0.2",
         "remark-gfm": "^3.0.1",
@@ -228,7 +228,7 @@
         "@npmcli/eslint-config": "^4.0.1",
         "@npmcli/template-oss": "4.18.0",
         "json-stringify-safe": "^5.0.1",
-        "nock": "^13.3.0",
+        "nock": "^13.3.3",
         "npm-package-arg": "^11.0.0",
         "pacote": "^17.0.1",
         "tap": "^16.3.8"
@@ -9332,9 +9332,9 @@
       "dev": true
     },
     "node_modules/nock": {
-      "version": "13.3.2",
-      "resolved": "https://registry.npmjs.org/nock/-/nock-13.3.2.tgz",
-      "integrity": "sha512-CwbljitiWJhF1gL83NbanhoKs1l23TDlRioNraPTZrzZIEooPemrHRj5m0FZCPkB1ecdYCSWWGcHysJgX/ngnQ==",
+      "version": "13.3.3",
+      "resolved": "https://registry.npmjs.org/nock/-/nock-13.3.3.tgz",
+      "integrity": "sha512-z+KUlILy9SK/RjpeXDiDUEAq4T94ADPHE3qaRkf66mpEhzc/ytOMm3Bwdrbq6k1tMWkbdujiKim3G2tfQARuJw==",
       "dev": true,
       "dependencies": {
         "debug": "^4.1.0",
@@ -16026,7 +16026,7 @@
         "@npmcli/template-oss": "4.18.0",
         "benchmark": "^2.1.4",
         "minify-registry-metadata": "^3.0.0",
-        "nock": "^13.3.0",
+        "nock": "^13.3.3",
         "tap": "^16.3.8",
         "tar-stream": "^3.0.0",
         "tcompare": "^5.0.6"
@@ -16070,7 +16070,7 @@
         "@npmcli/eslint-config": "^4.0.0",
         "@npmcli/mock-registry": "^1.0.0",
         "@npmcli/template-oss": "4.18.0",
-        "nock": "^13.3.0",
+        "nock": "^13.3.3",
         "tap": "^16.3.8"
       },
       "engines": {
@@ -16155,7 +16155,7 @@
       "devDependencies": {
         "@npmcli/eslint-config": "^4.0.0",
         "@npmcli/template-oss": "4.18.0",
-        "nock": "^13.3.0",
+        "nock": "^13.3.3",
         "tap": "^16.3.8"
       },
       "engines": {
@@ -16173,7 +16173,7 @@
         "@npmcli/eslint-config": "^4.0.0",
         "@npmcli/template-oss": "4.18.0",
         "minipass": "^7.0.3",
-        "nock": "^13.3.0",
+        "nock": "^13.3.3",
         "tap": "^16.3.8"
       },
       "engines": {
@@ -16192,7 +16192,7 @@
       "devDependencies": {
         "@npmcli/eslint-config": "^4.0.0",
         "@npmcli/template-oss": "4.18.0",
-        "nock": "^13.3.0",
+        "nock": "^13.3.3",
         "spawk": "^1.7.1",
         "tap": "^16.3.8"
       },
@@ -16218,7 +16218,7 @@
         "@npmcli/mock-globals": "^1.0.0",
         "@npmcli/mock-registry": "^1.0.0",
         "@npmcli/template-oss": "4.18.0",
-        "nock": "^13.3.0",
+        "nock": "^13.3.3",
         "tap": "^16.3.8"
       },
       "engines": {
@@ -16234,7 +16234,7 @@
       "devDependencies": {
         "@npmcli/eslint-config": "^4.0.0",
         "@npmcli/template-oss": "4.18.0",
-        "nock": "^13.3.0",
+        "nock": "^13.3.3",
         "tap": "^16.3.8"
       },
       "engines": {
@@ -16251,7 +16251,7 @@
       "devDependencies": {
         "@npmcli/eslint-config": "^4.0.0",
         "@npmcli/template-oss": "4.18.0",
-        "nock": "^13.3.0",
+        "nock": "^13.3.3",
         "tap": "^16.3.8"
       },
       "engines": {
diff --git a/package.json b/package.json
index d5e3a383601a5..f03a8635c8c98 100644
--- a/package.json
+++ b/package.json
@@ -200,7 +200,7 @@
     "@tufjs/repo-mock": "^1.3.1",
     "diff": "^5.1.0",
     "licensee": "^10.0.0",
-    "nock": "^13.3.0",
+    "nock": "^13.3.3",
     "npm-packlist": "^7.0.4",
     "remark": "^14.0.2",
     "remark-gfm": "^3.0.1",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index adeab7e1246d1..844ecf4b09bac 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -42,7 +42,7 @@
     "@npmcli/template-oss": "4.18.0",
     "benchmark": "^2.1.4",
     "minify-registry-metadata": "^3.0.0",
-    "nock": "^13.3.0",
+    "nock": "^13.3.3",
     "tap": "^16.3.8",
     "tar-stream": "^3.0.0",
     "tcompare": "^5.0.6"
diff --git a/workspaces/libnpmaccess/package.json b/workspaces/libnpmaccess/package.json
index 6080d9465bdd2..acd9f8db3ebd1 100644
--- a/workspaces/libnpmaccess/package.json
+++ b/workspaces/libnpmaccess/package.json
@@ -18,7 +18,7 @@
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/mock-registry": "^1.0.0",
     "@npmcli/template-oss": "4.18.0",
-    "nock": "^13.3.0",
+    "nock": "^13.3.3",
     "tap": "^16.3.8"
   },
   "repository": {
diff --git a/workspaces/libnpmhook/package.json b/workspaces/libnpmhook/package.json
index b77a18bd3f38f..ae25d7a8505b8 100644
--- a/workspaces/libnpmhook/package.json
+++ b/workspaces/libnpmhook/package.json
@@ -36,7 +36,7 @@
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/template-oss": "4.18.0",
-    "nock": "^13.3.0",
+    "nock": "^13.3.3",
     "tap": "^16.3.8"
   },
   "engines": {
diff --git a/workspaces/libnpmorg/package.json b/workspaces/libnpmorg/package.json
index 5aa13607dfc88..817ecbdc58ceb 100644
--- a/workspaces/libnpmorg/package.json
+++ b/workspaces/libnpmorg/package.json
@@ -30,7 +30,7 @@
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/template-oss": "4.18.0",
     "minipass": "^7.0.3",
-    "nock": "^13.3.0",
+    "nock": "^13.3.3",
     "tap": "^16.3.8"
   },
   "repository": {
diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json
index d8e8e00d9f926..439e5deb119f4 100644
--- a/workspaces/libnpmpack/package.json
+++ b/workspaces/libnpmpack/package.json
@@ -24,7 +24,7 @@
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/template-oss": "4.18.0",
-    "nock": "^13.3.0",
+    "nock": "^13.3.3",
     "spawk": "^1.7.1",
     "tap": "^16.3.8"
   },
diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json
index 2b59a773142f9..7dd2809873e4f 100644
--- a/workspaces/libnpmpublish/package.json
+++ b/workspaces/libnpmpublish/package.json
@@ -27,7 +27,7 @@
     "@npmcli/mock-globals": "^1.0.0",
     "@npmcli/mock-registry": "^1.0.0",
     "@npmcli/template-oss": "4.18.0",
-    "nock": "^13.3.0",
+    "nock": "^13.3.3",
     "tap": "^16.3.8"
   },
   "repository": {
diff --git a/workspaces/libnpmsearch/package.json b/workspaces/libnpmsearch/package.json
index 9a5824f5ca734..8b59bc7ac987c 100644
--- a/workspaces/libnpmsearch/package.json
+++ b/workspaces/libnpmsearch/package.json
@@ -27,7 +27,7 @@
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/template-oss": "4.18.0",
-    "nock": "^13.3.0",
+    "nock": "^13.3.3",
     "tap": "^16.3.8"
   },
   "repository": {
diff --git a/workspaces/libnpmteam/package.json b/workspaces/libnpmteam/package.json
index e7781fad3a3a1..ea402b29eb5d8 100644
--- a/workspaces/libnpmteam/package.json
+++ b/workspaces/libnpmteam/package.json
@@ -17,7 +17,7 @@
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/template-oss": "4.18.0",
-    "nock": "^13.3.0",
+    "nock": "^13.3.3",
     "tap": "^16.3.8"
   },
   "repository": {

From dd1a9f58ebb0f42edd63fd4b0bd9c85fe088bfd3 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Thu, 17 Aug 2023 12:53:20 -0700
Subject: [PATCH 53/68] chore: audit fix for semver in nested dev deps

---
 package-lock.json | 89 +++++++++++++++++------------------------------
 1 file changed, 31 insertions(+), 58 deletions(-)

diff --git a/package-lock.json b/package-lock.json
index 88ee48cfed3e2..2144542b2f5f8 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -321,9 +321,9 @@
       }
     },
     "node_modules/@babel/core/node_modules/semver": {
-      "version": "6.3.0",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
-      "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
+      "version": "6.3.1",
+      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
+      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "bin": {
         "semver": "bin/semver.js"
@@ -373,9 +373,9 @@
       }
     },
     "node_modules/@babel/helper-compilation-targets/node_modules/semver": {
-      "version": "6.3.0",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
-      "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
+      "version": "6.3.1",
+      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
+      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "bin": {
         "semver": "bin/semver.js"
@@ -805,45 +805,18 @@
       }
     },
     "node_modules/@commitlint/is-ignored": {
-      "version": "17.4.4",
-      "resolved": "https://registry.npmjs.org/@commitlint/is-ignored/-/is-ignored-17.4.4.tgz",
-      "integrity": "sha512-Y3eo1SFJ2JQDik4rWkBC4tlRIxlXEFrRWxcyrzb1PUT2k3kZ/XGNuCDfk/u0bU2/yS0tOA/mTjFsV+C4qyACHw==",
+      "version": "17.7.0",
+      "resolved": "https://registry.npmjs.org/@commitlint/is-ignored/-/is-ignored-17.7.0.tgz",
+      "integrity": "sha512-043rA7m45tyEfW7Zv2vZHF++176MLHH9h70fnPoYlB1slKBeKl8BwNIlnPg4xBdRBVNPaCqvXxWswx2GR4c9Hw==",
       "dev": true,
       "dependencies": {
         "@commitlint/types": "^17.4.4",
-        "semver": "7.3.8"
+        "semver": "7.5.4"
       },
       "engines": {
         "node": ">=v14"
       }
     },
-    "node_modules/@commitlint/is-ignored/node_modules/lru-cache": {
-      "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
-      "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
-      "dev": true,
-      "dependencies": {
-        "yallist": "^4.0.0"
-      },
-      "engines": {
-        "node": ">=10"
-      }
-    },
-    "node_modules/@commitlint/is-ignored/node_modules/semver": {
-      "version": "7.3.8",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
-      "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
-      "dev": true,
-      "dependencies": {
-        "lru-cache": "^6.0.0"
-      },
-      "bin": {
-        "semver": "bin/semver.js"
-      },
-      "engines": {
-        "node": ">=10"
-      }
-    },
     "node_modules/@commitlint/lint": {
       "version": "17.6.1",
       "resolved": "https://registry.npmjs.org/@commitlint/lint/-/lint-17.6.1.tgz",
@@ -4203,9 +4176,9 @@
       }
     },
     "node_modules/conventional-changelog-writer/node_modules/semver": {
-      "version": "6.3.0",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
-      "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
+      "version": "6.3.1",
+      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
+      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "bin": {
         "semver": "bin/semver.js"
@@ -5222,9 +5195,9 @@
       }
     },
     "node_modules/eslint-plugin-import/node_modules/semver": {
-      "version": "6.3.0",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
-      "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
+      "version": "6.3.1",
+      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
+      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "peer": true,
       "bin": {
@@ -5277,9 +5250,9 @@
       }
     },
     "node_modules/eslint-plugin-node/node_modules/semver": {
-      "version": "6.3.0",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
-      "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
+      "version": "6.3.1",
+      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
+      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "peer": true,
       "bin": {
@@ -7214,9 +7187,9 @@
       }
     },
     "node_modules/istanbul-lib-instrument/node_modules/semver": {
-      "version": "6.3.0",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
-      "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
+      "version": "6.3.1",
+      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
+      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "bin": {
         "semver": "bin/semver.js"
@@ -7967,9 +7940,9 @@
       }
     },
     "node_modules/make-dir/node_modules/semver": {
-      "version": "6.3.0",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
-      "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
+      "version": "6.3.1",
+      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
+      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "bin": {
         "semver": "bin/semver.js"
@@ -11012,9 +10985,9 @@
       }
     },
     "node_modules/read-pkg/node_modules/semver": {
-      "version": "5.7.1",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
-      "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
+      "version": "5.7.2",
+      "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz",
+      "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==",
       "dev": true,
       "bin": {
         "semver": "bin/semver"
@@ -15778,9 +15751,9 @@
       }
     },
     "node_modules/write-json-file/node_modules/semver": {
-      "version": "5.7.1",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
-      "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
+      "version": "5.7.2",
+      "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz",
+      "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==",
       "dev": true,
       "bin": {
         "semver": "bin/semver"

From 54a2535eed0746efd3e8ede6feaec2e3acb5eb3d Mon Sep 17 00:00:00 2001
From: Brian DeHamer 
Date: Fri, 18 Aug 2023 10:07:51 -0700
Subject: [PATCH 54/68] deps: bump sigstore from 1.7.0 to 2.0.0

Signed-off-by: Brian DeHamer 
---
 DEPENDENCIES.md                               |    8 +-
 lib/commands/audit.js                         |    6 +-
 node_modules/.gitignore                       |   39 +-
 node_modules/@sigstore/bundle/LICENSE         |  202 +++
 node_modules/@sigstore/bundle/dist/build.js   |   89 ++
 node_modules/@sigstore/bundle/dist/bundle.js  |   22 +
 node_modules/@sigstore/bundle/dist/error.js   |   25 +
 node_modules/@sigstore/bundle/dist/index.js   |   40 +
 .../@sigstore/bundle/dist/serialized.js       |   38 +
 node_modules/@sigstore/bundle/dist/utility.js |    2 +
 .../@sigstore/bundle/dist/validate.js         |  160 ++
 .../@sigstore/protobuf-specs/LICENSE          |  202 +++
 .../dist/__generated__/envelope.js            |   89 ++
 .../dist/__generated__/events.js              |  185 +++
 .../google/api/field_behavior.js              |  119 ++
 .../dist/__generated__/google/protobuf/any.js |   65 +
 .../google/protobuf/descriptor.js             | 1308 +++++++++++++++++
 .../google/protobuf/timestamp.js              |   24 +
 .../dist/__generated__/sigstore_bundle.js     |  106 ++
 .../dist/__generated__/sigstore_common.js     |  457 ++++++
 .../dist/__generated__/sigstore_rekor.js      |  167 +++
 .../dist/__generated__/sigstore_trustroot.js  |  103 ++
 .../__generated__/sigstore_verification.js    |  273 ++++
 .../@sigstore/protobuf-specs/dist/index.js    |   37 +
 .../@sigstore/protobuf-specs/package.json     |   31 +
 node_modules/@sigstore/bundle/package.json    |   35 +
 node_modules/@sigstore/sign/LICENSE           |  202 +++
 .../@sigstore/sign/dist/bundler/base.js       |   50 +
 .../@sigstore/sign/dist/bundler/bundle.js     |   70 +
 .../@sigstore/sign/dist/bundler/dsse.js       |   45 +
 .../@sigstore/sign/dist/bundler/index.js      |    7 +
 .../@sigstore/sign/dist/bundler/message.js    |   30 +
 node_modules/@sigstore/sign/dist/error.js     |   12 +
 .../@sigstore/sign/dist/external/error.js     |   21 +
 .../@sigstore/sign/dist/external/fulcio.js    |   51 +
 .../@sigstore/sign/dist/external/rekor.js     |  115 ++
 .../@sigstore/sign/dist/external/tsa.js       |   47 +
 .../@sigstore/sign/dist/identity/ci.js        |   73 +
 .../@sigstore/sign/dist/identity/index.js     |   20 +
 .../@sigstore/sign/dist/identity/provider.js  |    2 +
 node_modules/@sigstore/sign/dist/index.js     |   15 +
 .../@sigstore/sign/dist/signer/fulcio/ca.js   |   64 +
 .../sign/dist/signer/fulcio/ephemeral.js      |   45 +
 .../sign/dist/signer/fulcio/index.js          |   73 +
 .../@sigstore/sign/dist/signer/index.js       |   20 +
 .../@sigstore/sign/dist/signer/signer.js      |   17 +
 .../@sigstore/sign/dist/types/fetch.js        |    2 +
 .../@sigstore/sign/dist/util/crypto.js        |   27 +
 node_modules/@sigstore/sign/dist/util/dsse.js |   25 +
 .../@sigstore/sign/dist/util/encoding.js      |   28 +
 .../@sigstore/sign/dist/util/index.js         |   48 +
 node_modules/@sigstore/sign/dist/util/json.js |   61 +
 node_modules/@sigstore/sign/dist/util/oidc.js |   54 +
 node_modules/@sigstore/sign/dist/util/pem.js  |   27 +
 node_modules/@sigstore/sign/dist/util/ua.js   |   33 +
 .../@sigstore/sign/dist/witness/index.js      |   22 +
 .../sign/dist/witness/tlog/client.js          |   69 +
 .../@sigstore/sign/dist/witness/tlog/entry.js |  136 ++
 .../@sigstore/sign/dist/witness/tlog/index.js |   77 +
 .../@sigstore/sign/dist/witness/tsa/client.js |   47 +
 .../@sigstore/sign/dist/witness/tsa/index.js  |   44 +
 .../@sigstore/sign/dist/witness/witness.js    |    2 +
 .../@sigstore/protobuf-specs/LICENSE          |  202 +++
 .../dist/__generated__/envelope.js            |   89 ++
 .../dist/__generated__/events.js              |  185 +++
 .../google/api/field_behavior.js              |  119 ++
 .../dist/__generated__/google/protobuf/any.js |   65 +
 .../google/protobuf/descriptor.js             | 1308 +++++++++++++++++
 .../google/protobuf/timestamp.js              |   24 +
 .../dist/__generated__/sigstore_bundle.js     |  106 ++
 .../dist/__generated__/sigstore_common.js     |  457 ++++++
 .../dist/__generated__/sigstore_rekor.js      |  167 +++
 .../dist/__generated__/sigstore_trustroot.js  |  103 ++
 .../__generated__/sigstore_verification.js    |  273 ++++
 .../@sigstore/protobuf-specs/dist/index.js    |   37 +
 .../@sigstore/protobuf-specs/package.json     |   31 +
 node_modules/@sigstore/sign/package.json      |   42 +
 node_modules/@sigstore/tuf/dist/client.js     |   15 +-
 .../@sigstore/protobuf-specs/LICENSE          |  202 +++
 .../dist/__generated__/envelope.js            |   89 ++
 .../dist/__generated__/events.js              |  185 +++
 .../google/api/field_behavior.js              |  119 ++
 .../dist/__generated__/google/protobuf/any.js |   65 +
 .../google/protobuf/descriptor.js             | 1308 +++++++++++++++++
 .../google/protobuf/timestamp.js              |   24 +
 .../dist/__generated__/sigstore_bundle.js     |  106 ++
 .../dist/__generated__/sigstore_common.js     |  457 ++++++
 .../dist/__generated__/sigstore_rekor.js      |  167 +++
 .../dist/__generated__/sigstore_trustroot.js  |  103 ++
 .../__generated__/sigstore_verification.js    |  273 ++++
 .../@sigstore/protobuf-specs/dist/index.js    |   37 +
 .../@sigstore/protobuf-specs/package.json     |   31 +
 node_modules/@sigstore/tuf/package.json       |   10 +-
 .../@tufjs/canonical-json/package.json        |   10 +-
 node_modules/@tufjs/models/dist/base.js       |    2 +-
 node_modules/@tufjs/models/package.json       |   12 +-
 .../node_modules/@sigstore/tuf/LICENSE        |  202 +++
 .../@sigstore/tuf/dist/appdata.js             |   44 +
 .../node_modules/@sigstore/tuf/dist/client.js |  101 ++
 .../node_modules/@sigstore/tuf/dist/error.js  |   12 +
 .../node_modules/@sigstore/tuf/dist/index.js  |   55 +
 .../node_modules/@sigstore/tuf/dist/target.js |   80 +
 .../@sigstore/protobuf-specs/LICENSE          |  202 +++
 .../dist/__generated__/envelope.js            |   89 ++
 .../dist/__generated__/events.js              |  185 +++
 .../google/api/field_behavior.js              |  119 ++
 .../dist/__generated__/google/protobuf/any.js |   65 +
 .../google/protobuf/descriptor.js             | 1308 +++++++++++++++++
 .../google/protobuf/timestamp.js              |   24 +
 .../dist/__generated__/sigstore_bundle.js     |  106 ++
 .../dist/__generated__/sigstore_common.js     |  457 ++++++
 .../dist/__generated__/sigstore_rekor.js      |  167 +++
 .../dist/__generated__/sigstore_trustroot.js  |  103 ++
 .../__generated__/sigstore_verification.js    |  273 ++++
 .../@sigstore/protobuf-specs/dist/index.js    |   37 +
 .../@sigstore/protobuf-specs/package.json     |   31 +
 .../node_modules/@sigstore/tuf/package.json   |   41 +
 .../tuf/store/public-good-instance-root.json  |    1 +
 .../@tufjs/canonical-json/LICENSE             |   21 +
 .../@tufjs/canonical-json/lib/index.js        |   64 +
 .../@tufjs/canonical-json/package.json        |   39 +
 .../node_modules/@tufjs/models/LICENSE        |   21 +
 .../node_modules/@tufjs/models/dist/base.js   |   83 ++
 .../@tufjs/models/dist/delegations.js         |  115 ++
 .../node_modules/@tufjs/models/dist/error.js  |   27 +
 .../node_modules/@tufjs/models/dist/file.js   |  183 +++
 .../node_modules/@tufjs/models/dist/index.js  |   24 +
 .../node_modules/@tufjs/models/dist/key.js    |   85 ++
 .../@tufjs/models/dist/metadata.js            |  158 ++
 .../node_modules/@tufjs/models/dist/role.js   |  299 ++++
 .../node_modules/@tufjs/models/dist/root.js   |  116 ++
 .../@tufjs/models/dist/signature.js           |   38 +
 .../@tufjs/models/dist/snapshot.js            |   71 +
 .../@tufjs/models/dist/targets.js             |   92 ++
 .../@tufjs/models/dist/timestamp.js           |   58 +
 .../@tufjs/models/dist/utils/guard.js         |   33 +
 .../@tufjs/models/dist/utils/index.js         |   28 +
 .../@tufjs/models/dist/utils/key.js           |  143 ++
 .../@tufjs/models/dist/utils/oid.js           |   27 +
 .../@tufjs/models/dist/utils/types.js         |    2 +
 .../@tufjs/models/dist/utils/verify.js        |   13 +
 .../node_modules/@tufjs/models/package.json   |   41 +
 .../sigstore/node_modules/tuf-js/LICENSE      |   21 +
 .../node_modules/tuf-js/dist/config.js        |   14 +
 .../node_modules/tuf-js/dist/error.js         |   48 +
 .../node_modules/tuf-js/dist/fetcher.js       |   84 ++
 .../node_modules/tuf-js/dist/index.js         |    9 +
 .../node_modules/tuf-js/dist/store.js         |  208 +++
 .../node_modules/tuf-js/dist/updater.js       |  320 ++++
 .../node_modules/tuf-js/dist/utils/tmpfile.js |   25 +
 .../node_modules/tuf-js/dist/utils/url.js     |   14 +
 .../sigstore/node_modules/tuf-js/package.json |   46 +
 node_modules/tuf-js/dist/config.js            |    3 +-
 node_modules/tuf-js/dist/fetcher.js           |    4 +-
 node_modules/tuf-js/dist/updater.js           |    4 +-
 .../tuf-js/node_modules/cacache/LICENSE.md    |   16 -
 .../node_modules/cacache/lib/content/path.js  |   29 -
 .../node_modules/cacache/lib/content/read.js  |  166 ---
 .../node_modules/cacache/lib/content/rm.js    |   18 -
 .../node_modules/cacache/lib/content/write.js |  205 ---
 .../node_modules/cacache/lib/entry-index.js   |  330 -----
 .../tuf-js/node_modules/cacache/lib/get.js    |  170 ---
 .../tuf-js/node_modules/cacache/lib/index.js  |   42 -
 .../node_modules/cacache/lib/memoization.js   |   72 -
 .../tuf-js/node_modules/cacache/lib/put.js    |   80 -
 .../tuf-js/node_modules/cacache/lib/rm.js     |   31 -
 .../node_modules/cacache/lib/util/glob.js     |    7 -
 .../cacache/lib/util/hash-to-segments.js      |    7 -
 .../node_modules/cacache/lib/util/tmp.js      |   26 -
 .../tuf-js/node_modules/cacache/lib/verify.js |  257 ----
 .../cacache/node_modules/minipass/LICENSE     |   15 -
 .../node_modules/minipass/dist/cjs/index.js   | 1028 -------------
 .../minipass/dist/cjs/package.json            |    3 -
 .../node_modules/minipass/dist/mjs/index.js   | 1018 -------------
 .../minipass/dist/mjs/package.json            |    3 -
 .../node_modules/minipass/package.json        |   82 --
 .../tuf-js/node_modules/cacache/package.json  |   82 --
 .../tuf-js/node_modules/lru-cache/LICENSE     |   15 -
 .../tuf-js/node_modules/lru-cache/index.js    | 1227 ----------------
 .../tuf-js/node_modules/lru-cache/index.mjs   | 1227 ----------------
 .../node_modules/lru-cache/package.json       |   96 --
 .../node_modules/make-fetch-happen/LICENSE    |   16 -
 .../make-fetch-happen/lib/agent.js            |  214 ---
 .../make-fetch-happen/lib/cache/entry.js      |  469 ------
 .../make-fetch-happen/lib/cache/errors.js     |   11 -
 .../make-fetch-happen/lib/cache/index.js      |   49 -
 .../make-fetch-happen/lib/cache/key.js        |   17 -
 .../make-fetch-happen/lib/cache/policy.js     |  161 --
 .../node_modules/make-fetch-happen/lib/dns.js |   49 -
 .../make-fetch-happen/lib/fetch.js            |  118 --
 .../make-fetch-happen/lib/index.js            |   41 -
 .../make-fetch-happen/lib/options.js          |   54 -
 .../make-fetch-happen/lib/pipeline.js         |   41 -
 .../make-fetch-happen/lib/remote.js           |  121 --
 .../make-fetch-happen/package.json            |   78 -
 .../tuf-js/node_modules/minipass/LICENSE      |   15 -
 .../tuf-js/node_modules/minipass/index.js     |  702 ---------
 .../tuf-js/node_modules/minipass/index.mjs    |  702 ---------
 .../tuf-js/node_modules/minipass/package.json |   76 -
 node_modules/tuf-js/package.json              |   15 +-
 package-lock.json                             |  267 ++--
 package.json                                  |    6 +-
 test/lib/commands/audit.js                    |   14 +-
 workspaces/libnpmpublish/lib/provenance.js    |    2 +-
 workspaces/libnpmpublish/package.json         |    2 +-
 workspaces/libnpmpublish/test/publish.js      |    2 +-
 206 files changed, 18343 insertions(+), 9370 deletions(-)
 create mode 100644 node_modules/@sigstore/bundle/LICENSE
 create mode 100644 node_modules/@sigstore/bundle/dist/build.js
 create mode 100644 node_modules/@sigstore/bundle/dist/bundle.js
 create mode 100644 node_modules/@sigstore/bundle/dist/error.js
 create mode 100644 node_modules/@sigstore/bundle/dist/index.js
 create mode 100644 node_modules/@sigstore/bundle/dist/serialized.js
 create mode 100644 node_modules/@sigstore/bundle/dist/utility.js
 create mode 100644 node_modules/@sigstore/bundle/dist/validate.js
 create mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/LICENSE
 create mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
 create mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
 create mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
 create mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
 create mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
 create mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
 create mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
 create mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
 create mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
 create mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
 create mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
 create mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/index.js
 create mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/package.json
 create mode 100644 node_modules/@sigstore/bundle/package.json
 create mode 100644 node_modules/@sigstore/sign/LICENSE
 create mode 100644 node_modules/@sigstore/sign/dist/bundler/base.js
 create mode 100644 node_modules/@sigstore/sign/dist/bundler/bundle.js
 create mode 100644 node_modules/@sigstore/sign/dist/bundler/dsse.js
 create mode 100644 node_modules/@sigstore/sign/dist/bundler/index.js
 create mode 100644 node_modules/@sigstore/sign/dist/bundler/message.js
 create mode 100644 node_modules/@sigstore/sign/dist/error.js
 create mode 100644 node_modules/@sigstore/sign/dist/external/error.js
 create mode 100644 node_modules/@sigstore/sign/dist/external/fulcio.js
 create mode 100644 node_modules/@sigstore/sign/dist/external/rekor.js
 create mode 100644 node_modules/@sigstore/sign/dist/external/tsa.js
 create mode 100644 node_modules/@sigstore/sign/dist/identity/ci.js
 create mode 100644 node_modules/@sigstore/sign/dist/identity/index.js
 create mode 100644 node_modules/@sigstore/sign/dist/identity/provider.js
 create mode 100644 node_modules/@sigstore/sign/dist/index.js
 create mode 100644 node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
 create mode 100644 node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js
 create mode 100644 node_modules/@sigstore/sign/dist/signer/fulcio/index.js
 create mode 100644 node_modules/@sigstore/sign/dist/signer/index.js
 create mode 100644 node_modules/@sigstore/sign/dist/signer/signer.js
 create mode 100644 node_modules/@sigstore/sign/dist/types/fetch.js
 create mode 100644 node_modules/@sigstore/sign/dist/util/crypto.js
 create mode 100644 node_modules/@sigstore/sign/dist/util/dsse.js
 create mode 100644 node_modules/@sigstore/sign/dist/util/encoding.js
 create mode 100644 node_modules/@sigstore/sign/dist/util/index.js
 create mode 100644 node_modules/@sigstore/sign/dist/util/json.js
 create mode 100644 node_modules/@sigstore/sign/dist/util/oidc.js
 create mode 100644 node_modules/@sigstore/sign/dist/util/pem.js
 create mode 100644 node_modules/@sigstore/sign/dist/util/ua.js
 create mode 100644 node_modules/@sigstore/sign/dist/witness/index.js
 create mode 100644 node_modules/@sigstore/sign/dist/witness/tlog/client.js
 create mode 100644 node_modules/@sigstore/sign/dist/witness/tlog/entry.js
 create mode 100644 node_modules/@sigstore/sign/dist/witness/tlog/index.js
 create mode 100644 node_modules/@sigstore/sign/dist/witness/tsa/client.js
 create mode 100644 node_modules/@sigstore/sign/dist/witness/tsa/index.js
 create mode 100644 node_modules/@sigstore/sign/dist/witness/witness.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/LICENSE
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/index.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/package.json
 create mode 100644 node_modules/@sigstore/sign/package.json
 create mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE
 create mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
 create mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
 create mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
 create mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
 create mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
 create mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
 create mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
 create mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
 create mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
 create mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
 create mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
 create mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js
 create mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/LICENSE
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/dist/appdata.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/dist/client.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/dist/error.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/dist/index.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/dist/target.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/package.json
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/store/public-good-instance-root.json
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/canonical-json/LICENSE
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/canonical-json/lib/index.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/canonical-json/package.json
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/LICENSE
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/base.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/delegations.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/error.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/file.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/index.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/key.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/metadata.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/role.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/root.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/signature.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/snapshot.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/targets.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/timestamp.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/utils/guard.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/utils/index.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/utils/key.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/utils/oid.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/utils/types.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/utils/verify.js
 create mode 100644 node_modules/sigstore/node_modules/@tufjs/models/package.json
 create mode 100644 node_modules/sigstore/node_modules/tuf-js/LICENSE
 create mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/config.js
 create mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/error.js
 create mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/fetcher.js
 create mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/index.js
 create mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/store.js
 create mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/updater.js
 create mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/utils/tmpfile.js
 create mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/utils/url.js
 create mode 100644 node_modules/sigstore/node_modules/tuf-js/package.json
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/LICENSE.md
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/content/path.js
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/content/read.js
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/content/rm.js
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/content/write.js
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/entry-index.js
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/get.js
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/index.js
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/memoization.js
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/put.js
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/rm.js
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/util/glob.js
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/lib/verify.js
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/node_modules/minipass/LICENSE
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/node_modules/minipass/package.json
 delete mode 100644 node_modules/tuf-js/node_modules/cacache/package.json
 delete mode 100644 node_modules/tuf-js/node_modules/lru-cache/LICENSE
 delete mode 100644 node_modules/tuf-js/node_modules/lru-cache/index.js
 delete mode 100644 node_modules/tuf-js/node_modules/lru-cache/index.mjs
 delete mode 100644 node_modules/tuf-js/node_modules/lru-cache/package.json
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/agent.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/dns.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/package.json
 delete mode 100644 node_modules/tuf-js/node_modules/minipass/LICENSE
 delete mode 100644 node_modules/tuf-js/node_modules/minipass/index.js
 delete mode 100644 node_modules/tuf-js/node_modules/minipass/index.mjs
 delete mode 100644 node_modules/tuf-js/node_modules/minipass/package.json

diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md
index 324dbb190ca34..7ff1866331389 100644
--- a/DEPENDENCIES.md
+++ b/DEPENDENCIES.md
@@ -565,7 +565,7 @@ graph LR;
   npm-->remark-github;
   npm-->remark;
   npm-->semver;
-  npm-->sigstore;
+  npm-->sigstore-tuf["@sigstore/tuf"];
   npm-->spawk;
   npm-->ssri;
   npm-->supports-color;
@@ -768,8 +768,14 @@ graph LR;
   semver-->lru-cache;
   shebang-command-->shebang-regex;
   sigstore-->make-fetch-happen;
+  sigstore-->sigstore-bundle["@sigstore/bundle"];
   sigstore-->sigstore-protobuf-specs["@sigstore/protobuf-specs"];
+  sigstore-->sigstore-sign["@sigstore/sign"];
   sigstore-->sigstore-tuf["@sigstore/tuf"];
+  sigstore-bundle-->sigstore-protobuf-specs["@sigstore/protobuf-specs"];
+  sigstore-sign-->make-fetch-happen;
+  sigstore-sign-->sigstore-bundle["@sigstore/bundle"];
+  sigstore-sign-->sigstore-protobuf-specs["@sigstore/protobuf-specs"];
   sigstore-tuf-->sigstore-protobuf-specs["@sigstore/protobuf-specs"];
   sigstore-tuf-->tuf-js;
   socks-->ip;
diff --git a/lib/commands/audit.js b/lib/commands/audit.js
index 500620f2cd01b..de5483109d598 100644
--- a/lib/commands/audit.js
+++ b/lib/commands/audit.js
@@ -4,7 +4,7 @@ const localeCompare = require('@isaacs/string-locale-compare')('en')
 const npa = require('npm-package-arg')
 const pacote = require('pacote')
 const pMap = require('p-map')
-const { sigstore } = require('sigstore')
+const tufClient = require('@sigstore/tuf')
 
 const ArboristWorkspaceCmd = require('../arborist-cmd.js')
 const auditError = require('../utils/audit-error.js')
@@ -38,8 +38,8 @@ class VerifySignatures {
       throw new Error('found no installed dependencies to audit')
     }
 
-    const tuf = await sigstore.tuf.client({
-      tufCachePath: this.opts.tufCache,
+    const tuf = await tufClient.initTUF({
+      cachePath: this.opts.tufCache,
       retry: this.opts.retry,
       timeout: this.opts.timeout,
     })
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 56c6317d7d0e4..6073ab13442a5 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -36,8 +36,25 @@
 !/@pkgjs/parseargs
 !/@sigstore/
 /@sigstore/*
+!/@sigstore/bundle
+!/@sigstore/bundle/node_modules/
+/@sigstore/bundle/node_modules/*
+!/@sigstore/bundle/node_modules/@sigstore/
+/@sigstore/bundle/node_modules/@sigstore/*
+!/@sigstore/bundle/node_modules/@sigstore/protobuf-specs
 !/@sigstore/protobuf-specs
+!/@sigstore/sign
+!/@sigstore/sign/node_modules/
+/@sigstore/sign/node_modules/*
+!/@sigstore/sign/node_modules/@sigstore/
+/@sigstore/sign/node_modules/@sigstore/*
+!/@sigstore/sign/node_modules/@sigstore/protobuf-specs
 !/@sigstore/tuf
+!/@sigstore/tuf/node_modules/
+/@sigstore/tuf/node_modules/*
+!/@sigstore/tuf/node_modules/@sigstore/
+/@sigstore/tuf/node_modules/@sigstore/*
+!/@sigstore/tuf/node_modules/@sigstore/protobuf-specs
 !/@tootallnate/
 /@tootallnate/*
 !/@tootallnate/once
@@ -247,6 +264,18 @@
 !/sigstore
 !/sigstore/node_modules/
 /sigstore/node_modules/*
+!/sigstore/node_modules/@sigstore/
+/sigstore/node_modules/@sigstore/*
+!/sigstore/node_modules/@sigstore/tuf
+!/sigstore/node_modules/@sigstore/tuf/node_modules/
+/sigstore/node_modules/@sigstore/tuf/node_modules/*
+!/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/
+/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/*
+!/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs
+!/sigstore/node_modules/@tufjs/
+/sigstore/node_modules/@tufjs/*
+!/sigstore/node_modules/@tufjs/canonical-json
+!/sigstore/node_modules/@tufjs/models
 !/sigstore/node_modules/cacache
 !/sigstore/node_modules/cacache/node_modules/
 /sigstore/node_modules/cacache/node_modules/*
@@ -254,6 +283,7 @@
 !/sigstore/node_modules/lru-cache
 !/sigstore/node_modules/make-fetch-happen
 !/sigstore/node_modules/minipass
+!/sigstore/node_modules/tuf-js
 !/smart-buffer
 !/socks-proxy-agent
 !/socks
@@ -280,15 +310,6 @@
 !/tiny-relative-date
 !/treeverse
 !/tuf-js
-!/tuf-js/node_modules/
-/tuf-js/node_modules/*
-!/tuf-js/node_modules/cacache
-!/tuf-js/node_modules/cacache/node_modules/
-/tuf-js/node_modules/cacache/node_modules/*
-!/tuf-js/node_modules/cacache/node_modules/minipass
-!/tuf-js/node_modules/lru-cache
-!/tuf-js/node_modules/make-fetch-happen
-!/tuf-js/node_modules/minipass
 !/unique-filename
 !/unique-slug
 !/util-deprecate
diff --git a/node_modules/@sigstore/bundle/LICENSE b/node_modules/@sigstore/bundle/LICENSE
new file mode 100644
index 0000000000000..e9e7c1679a09d
--- /dev/null
+++ b/node_modules/@sigstore/bundle/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright 2023 The Sigstore Authors
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/node_modules/@sigstore/bundle/dist/build.js b/node_modules/@sigstore/bundle/dist/build.js
new file mode 100644
index 0000000000000..0ccea62eaba87
--- /dev/null
+++ b/node_modules/@sigstore/bundle/dist/build.js
@@ -0,0 +1,89 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.toDSSEBundle = exports.toMessageSignatureBundle = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const protobuf_specs_1 = require("@sigstore/protobuf-specs");
+const bundle_1 = require("./bundle");
+// Message signature bundle - $case: 'messageSignature'
+function toMessageSignatureBundle(options) {
+    return {
+        mediaType: bundle_1.BUNDLE_V01_MEDIA_TYPE,
+        content: {
+            $case: 'messageSignature',
+            messageSignature: {
+                messageDigest: {
+                    algorithm: protobuf_specs_1.HashAlgorithm.SHA2_256,
+                    digest: options.digest,
+                },
+                signature: options.signature,
+            },
+        },
+        verificationMaterial: toVerificationMaterial(options),
+    };
+}
+exports.toMessageSignatureBundle = toMessageSignatureBundle;
+// DSSE envelope bundle - $case: 'dsseEnvelope'
+function toDSSEBundle(options) {
+    return {
+        mediaType: bundle_1.BUNDLE_V01_MEDIA_TYPE,
+        content: {
+            $case: 'dsseEnvelope',
+            dsseEnvelope: toEnvelope(options),
+        },
+        verificationMaterial: toVerificationMaterial(options),
+    };
+}
+exports.toDSSEBundle = toDSSEBundle;
+function toEnvelope(options) {
+    return {
+        payloadType: options.artifactType,
+        payload: options.artifact,
+        signatures: [toSignature(options)],
+    };
+}
+function toSignature(options) {
+    return {
+        keyid: options.keyHint || '',
+        sig: options.signature,
+    };
+}
+// Verification material
+function toVerificationMaterial(options) {
+    return {
+        content: toKeyContent(options),
+        tlogEntries: [],
+        timestampVerificationData: { rfc3161Timestamps: [] },
+    };
+}
+function toKeyContent(options) {
+    if (options.certificate) {
+        return {
+            $case: 'x509CertificateChain',
+            x509CertificateChain: {
+                certificates: [{ rawBytes: options.certificate }],
+            },
+        };
+    }
+    else {
+        return {
+            $case: 'publicKey',
+            publicKey: {
+                hint: options.keyHint || '',
+            },
+        };
+    }
+}
diff --git a/node_modules/@sigstore/bundle/dist/bundle.js b/node_modules/@sigstore/bundle/dist/bundle.js
new file mode 100644
index 0000000000000..8c01e2d19c5ec
--- /dev/null
+++ b/node_modules/@sigstore/bundle/dist/bundle.js
@@ -0,0 +1,22 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.isBundleWithDsseEnvelope = exports.isBundleWithMessageSignature = exports.isBundleWithPublicKey = exports.isBundleWithCertificateChain = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = void 0;
+exports.BUNDLE_V01_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1';
+exports.BUNDLE_V02_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.2';
+// Type guards for bundle variants.
+function isBundleWithCertificateChain(b) {
+    return b.verificationMaterial.content.$case === 'x509CertificateChain';
+}
+exports.isBundleWithCertificateChain = isBundleWithCertificateChain;
+function isBundleWithPublicKey(b) {
+    return b.verificationMaterial.content.$case === 'publicKey';
+}
+exports.isBundleWithPublicKey = isBundleWithPublicKey;
+function isBundleWithMessageSignature(b) {
+    return b.content.$case === 'messageSignature';
+}
+exports.isBundleWithMessageSignature = isBundleWithMessageSignature;
+function isBundleWithDsseEnvelope(b) {
+    return b.content.$case === 'dsseEnvelope';
+}
+exports.isBundleWithDsseEnvelope = isBundleWithDsseEnvelope;
diff --git a/node_modules/@sigstore/bundle/dist/error.js b/node_modules/@sigstore/bundle/dist/error.js
new file mode 100644
index 0000000000000..f84295323b812
--- /dev/null
+++ b/node_modules/@sigstore/bundle/dist/error.js
@@ -0,0 +1,25 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ValidationError = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+class ValidationError extends Error {
+    constructor(message, fields) {
+        super(message);
+        this.fields = fields;
+    }
+}
+exports.ValidationError = ValidationError;
diff --git a/node_modules/@sigstore/bundle/dist/index.js b/node_modules/@sigstore/bundle/dist/index.js
new file mode 100644
index 0000000000000..b016a16d11cc0
--- /dev/null
+++ b/node_modules/@sigstore/bundle/dist/index.js
@@ -0,0 +1,40 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.isBundleV01 = exports.assertBundleV01 = exports.assertBundleLatest = exports.assertBundle = exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = exports.ValidationError = exports.isBundleWithPublicKey = exports.isBundleWithMessageSignature = exports.isBundleWithDsseEnvelope = exports.isBundleWithCertificateChain = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = exports.toMessageSignatureBundle = exports.toDSSEBundle = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var build_1 = require("./build");
+Object.defineProperty(exports, "toDSSEBundle", { enumerable: true, get: function () { return build_1.toDSSEBundle; } });
+Object.defineProperty(exports, "toMessageSignatureBundle", { enumerable: true, get: function () { return build_1.toMessageSignatureBundle; } });
+var bundle_1 = require("./bundle");
+Object.defineProperty(exports, "BUNDLE_V01_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V01_MEDIA_TYPE; } });
+Object.defineProperty(exports, "BUNDLE_V02_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V02_MEDIA_TYPE; } });
+Object.defineProperty(exports, "isBundleWithCertificateChain", { enumerable: true, get: function () { return bundle_1.isBundleWithCertificateChain; } });
+Object.defineProperty(exports, "isBundleWithDsseEnvelope", { enumerable: true, get: function () { return bundle_1.isBundleWithDsseEnvelope; } });
+Object.defineProperty(exports, "isBundleWithMessageSignature", { enumerable: true, get: function () { return bundle_1.isBundleWithMessageSignature; } });
+Object.defineProperty(exports, "isBundleWithPublicKey", { enumerable: true, get: function () { return bundle_1.isBundleWithPublicKey; } });
+var error_1 = require("./error");
+Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return error_1.ValidationError; } });
+var serialized_1 = require("./serialized");
+Object.defineProperty(exports, "bundleFromJSON", { enumerable: true, get: function () { return serialized_1.bundleFromJSON; } });
+Object.defineProperty(exports, "bundleToJSON", { enumerable: true, get: function () { return serialized_1.bundleToJSON; } });
+Object.defineProperty(exports, "envelopeFromJSON", { enumerable: true, get: function () { return serialized_1.envelopeFromJSON; } });
+Object.defineProperty(exports, "envelopeToJSON", { enumerable: true, get: function () { return serialized_1.envelopeToJSON; } });
+var validate_1 = require("./validate");
+Object.defineProperty(exports, "assertBundle", { enumerable: true, get: function () { return validate_1.assertBundle; } });
+Object.defineProperty(exports, "assertBundleLatest", { enumerable: true, get: function () { return validate_1.assertBundleLatest; } });
+Object.defineProperty(exports, "assertBundleV01", { enumerable: true, get: function () { return validate_1.assertBundleV01; } });
+Object.defineProperty(exports, "isBundleV01", { enumerable: true, get: function () { return validate_1.isBundleV01; } });
diff --git a/node_modules/@sigstore/bundle/dist/serialized.js b/node_modules/@sigstore/bundle/dist/serialized.js
new file mode 100644
index 0000000000000..f1073358cacfd
--- /dev/null
+++ b/node_modules/@sigstore/bundle/dist/serialized.js
@@ -0,0 +1,38 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const protobuf_specs_1 = require("@sigstore/protobuf-specs");
+const validate_1 = require("./validate");
+const bundleFromJSON = (obj) => {
+    const bundle = protobuf_specs_1.Bundle.fromJSON(obj);
+    (0, validate_1.assertBundle)(bundle);
+    return bundle;
+};
+exports.bundleFromJSON = bundleFromJSON;
+const bundleToJSON = (bundle) => {
+    return protobuf_specs_1.Bundle.toJSON(bundle);
+};
+exports.bundleToJSON = bundleToJSON;
+const envelopeFromJSON = (obj) => {
+    return protobuf_specs_1.Envelope.fromJSON(obj);
+};
+exports.envelopeFromJSON = envelopeFromJSON;
+const envelopeToJSON = (envelope) => {
+    return protobuf_specs_1.Envelope.toJSON(envelope);
+};
+exports.envelopeToJSON = envelopeToJSON;
diff --git a/node_modules/@sigstore/bundle/dist/utility.js b/node_modules/@sigstore/bundle/dist/utility.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/@sigstore/bundle/dist/utility.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/bundle/dist/validate.js b/node_modules/@sigstore/bundle/dist/validate.js
new file mode 100644
index 0000000000000..015b6dfc58dd7
--- /dev/null
+++ b/node_modules/@sigstore/bundle/dist/validate.js
@@ -0,0 +1,160 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.assertBundleLatest = exports.isBundleV01 = exports.assertBundleV01 = exports.assertBundle = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const bundle_1 = require("./bundle");
+const error_1 = require("./error");
+// Performs basic validation of a Sigstore bundle to ensure that all required
+// fields are populated. This is not a complete validation of the bundle, but
+// rather a check that the bundle is in a valid state to be processed by the
+// rest of the code.
+function assertBundle(b) {
+    const invalidValues = [];
+    // Media type validation
+    if (b.mediaType === undefined ||
+        !b.mediaType.startsWith('application/vnd.dev.sigstore.bundle+json;version=')) {
+        invalidValues.push('mediaType');
+    }
+    // Content-related validation
+    if (b.content === undefined) {
+        invalidValues.push('content');
+    }
+    else {
+        switch (b.content.$case) {
+            case 'messageSignature':
+                if (b.content.messageSignature.messageDigest === undefined) {
+                    invalidValues.push('content.messageSignature.messageDigest');
+                }
+                else {
+                    if (b.content.messageSignature.messageDigest.digest.length === 0) {
+                        invalidValues.push('content.messageSignature.messageDigest.digest');
+                    }
+                }
+                if (b.content.messageSignature.signature.length === 0) {
+                    invalidValues.push('content.messageSignature.signature');
+                }
+                break;
+            case 'dsseEnvelope':
+                if (b.content.dsseEnvelope.payload.length === 0) {
+                    invalidValues.push('content.dsseEnvelope.payload');
+                }
+                if (b.content.dsseEnvelope.signatures.length !== 1) {
+                    invalidValues.push('content.dsseEnvelope.signatures');
+                }
+                else {
+                    if (b.content.dsseEnvelope.signatures[0].sig.length === 0) {
+                        invalidValues.push('content.dsseEnvelope.signatures[0].sig');
+                    }
+                }
+                break;
+        }
+    }
+    // Verification material-related validation
+    if (b.verificationMaterial === undefined) {
+        invalidValues.push('verificationMaterial');
+    }
+    else {
+        if (b.verificationMaterial.content === undefined) {
+            invalidValues.push('verificationMaterial.content');
+        }
+        else {
+            switch (b.verificationMaterial.content.$case) {
+                case 'x509CertificateChain':
+                    if (b.verificationMaterial.content.x509CertificateChain.certificates
+                        .length === 0) {
+                        invalidValues.push('verificationMaterial.content.x509CertificateChain.certificates');
+                    }
+                    b.verificationMaterial.content.x509CertificateChain.certificates.forEach((cert, i) => {
+                        if (cert.rawBytes.length === 0) {
+                            invalidValues.push(`verificationMaterial.content.x509CertificateChain.certificates[${i}].rawBytes`);
+                        }
+                    });
+                    break;
+            }
+        }
+        if (b.verificationMaterial.tlogEntries === undefined) {
+            invalidValues.push('verificationMaterial.tlogEntries');
+        }
+        else {
+            if (b.verificationMaterial.tlogEntries.length > 0) {
+                b.verificationMaterial.tlogEntries.forEach((entry, i) => {
+                    if (entry.logId === undefined) {
+                        invalidValues.push(`verificationMaterial.tlogEntries[${i}].logId`);
+                    }
+                    if (entry.kindVersion === undefined) {
+                        invalidValues.push(`verificationMaterial.tlogEntries[${i}].kindVersion`);
+                    }
+                });
+            }
+        }
+    }
+    if (invalidValues.length > 0) {
+        throw new error_1.ValidationError('invalid bundle', invalidValues);
+    }
+}
+exports.assertBundle = assertBundle;
+// Asserts that the given bundle conforms to the v0.1 bundle format.
+function assertBundleV01(b) {
+    const invalidValues = [];
+    if (b.mediaType && b.mediaType !== bundle_1.BUNDLE_V01_MEDIA_TYPE) {
+        invalidValues.push('mediaType');
+    }
+    if (b.verificationMaterial &&
+        b.verificationMaterial.tlogEntries?.length > 0) {
+        b.verificationMaterial.tlogEntries.forEach((entry, i) => {
+            if (entry.inclusionPromise === undefined) {
+                invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionPromise`);
+            }
+        });
+    }
+    if (invalidValues.length > 0) {
+        throw new error_1.ValidationError('invalid v0.1 bundle', invalidValues);
+    }
+}
+exports.assertBundleV01 = assertBundleV01;
+// Type guard to determine if Bundle is a v0.1 bundle.
+function isBundleV01(b) {
+    try {
+        assertBundleV01(b);
+        return true;
+    }
+    catch (e) {
+        return false;
+    }
+}
+exports.isBundleV01 = isBundleV01;
+// Asserts that the given bundle conforms to the newest (0.2) bundle format.
+function assertBundleLatest(b) {
+    const invalidValues = [];
+    if (b.verificationMaterial &&
+        b.verificationMaterial.tlogEntries?.length > 0) {
+        b.verificationMaterial.tlogEntries.forEach((entry, i) => {
+            if (entry.inclusionProof === undefined) {
+                invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof`);
+            }
+            else {
+                if (entry.inclusionProof.checkpoint === undefined) {
+                    invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof.checkpoint`);
+                }
+            }
+        });
+    }
+    if (invalidValues.length > 0) {
+        throw new error_1.ValidationError('invalid v0.2 bundle', invalidValues);
+    }
+}
+exports.assertBundleLatest = assertBundleLatest;
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/LICENSE b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/LICENSE
new file mode 100644
index 0000000000000..e9e7c1679a09d
--- /dev/null
+++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright 2023 The Sigstore Authors
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
new file mode 100644
index 0000000000000..0c367a8384454
--- /dev/null
+++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
@@ -0,0 +1,89 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = exports.Envelope = void 0;
+function createBaseEnvelope() {
+    return { payload: Buffer.alloc(0), payloadType: "", signatures: [] };
+}
+exports.Envelope = {
+    fromJSON(object) {
+        return {
+            payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
+            payloadType: isSet(object.payloadType) ? String(object.payloadType) : "",
+            signatures: Array.isArray(object?.signatures) ? object.signatures.map((e) => exports.Signature.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.payload !== undefined &&
+            (obj.payload = base64FromBytes(message.payload !== undefined ? message.payload : Buffer.alloc(0)));
+        message.payloadType !== undefined && (obj.payloadType = message.payloadType);
+        if (message.signatures) {
+            obj.signatures = message.signatures.map((e) => e ? exports.Signature.toJSON(e) : undefined);
+        }
+        else {
+            obj.signatures = [];
+        }
+        return obj;
+    },
+};
+function createBaseSignature() {
+    return { sig: Buffer.alloc(0), keyid: "" };
+}
+exports.Signature = {
+    fromJSON(object) {
+        return {
+            sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
+            keyid: isSet(object.keyid) ? String(object.keyid) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.sig !== undefined && (obj.sig = base64FromBytes(message.sig !== undefined ? message.sig : Buffer.alloc(0)));
+        message.keyid !== undefined && (obj.keyid = message.keyid);
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
new file mode 100644
index 0000000000000..073093b8371a8
--- /dev/null
+++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
@@ -0,0 +1,185 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
+/* eslint-disable */
+const any_1 = require("./google/protobuf/any");
+const timestamp_1 = require("./google/protobuf/timestamp");
+function createBaseCloudEvent() {
+    return { id: "", source: "", specVersion: "", type: "", attributes: {}, data: undefined };
+}
+exports.CloudEvent = {
+    fromJSON(object) {
+        return {
+            id: isSet(object.id) ? String(object.id) : "",
+            source: isSet(object.source) ? String(object.source) : "",
+            specVersion: isSet(object.specVersion) ? String(object.specVersion) : "",
+            type: isSet(object.type) ? String(object.type) : "",
+            attributes: isObject(object.attributes)
+                ? Object.entries(object.attributes).reduce((acc, [key, value]) => {
+                    acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
+                    return acc;
+                }, {})
+                : {},
+            data: isSet(object.binaryData)
+                ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
+                : isSet(object.textData)
+                    ? { $case: "textData", textData: String(object.textData) }
+                    : isSet(object.protoData)
+                        ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
+                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.id !== undefined && (obj.id = message.id);
+        message.source !== undefined && (obj.source = message.source);
+        message.specVersion !== undefined && (obj.specVersion = message.specVersion);
+        message.type !== undefined && (obj.type = message.type);
+        obj.attributes = {};
+        if (message.attributes) {
+            Object.entries(message.attributes).forEach(([k, v]) => {
+                obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
+            });
+        }
+        message.data?.$case === "binaryData" &&
+            (obj.binaryData = message.data?.binaryData !== undefined ? base64FromBytes(message.data?.binaryData) : undefined);
+        message.data?.$case === "textData" && (obj.textData = message.data?.textData);
+        message.data?.$case === "protoData" &&
+            (obj.protoData = message.data?.protoData ? any_1.Any.toJSON(message.data?.protoData) : undefined);
+        return obj;
+    },
+};
+function createBaseCloudEvent_AttributesEntry() {
+    return { key: "", value: undefined };
+}
+exports.CloudEvent_AttributesEntry = {
+    fromJSON(object) {
+        return {
+            key: isSet(object.key) ? String(object.key) : "",
+            value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.key !== undefined && (obj.key = message.key);
+        message.value !== undefined &&
+            (obj.value = message.value ? exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value) : undefined);
+        return obj;
+    },
+};
+function createBaseCloudEvent_CloudEventAttributeValue() {
+    return { attr: undefined };
+}
+exports.CloudEvent_CloudEventAttributeValue = {
+    fromJSON(object) {
+        return {
+            attr: isSet(object.ceBoolean)
+                ? { $case: "ceBoolean", ceBoolean: Boolean(object.ceBoolean) }
+                : isSet(object.ceInteger)
+                    ? { $case: "ceInteger", ceInteger: Number(object.ceInteger) }
+                    : isSet(object.ceString)
+                        ? { $case: "ceString", ceString: String(object.ceString) }
+                        : isSet(object.ceBytes)
+                            ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
+                            : isSet(object.ceUri)
+                                ? { $case: "ceUri", ceUri: String(object.ceUri) }
+                                : isSet(object.ceUriRef)
+                                    ? { $case: "ceUriRef", ceUriRef: String(object.ceUriRef) }
+                                    : isSet(object.ceTimestamp)
+                                        ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
+                                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.attr?.$case === "ceBoolean" && (obj.ceBoolean = message.attr?.ceBoolean);
+        message.attr?.$case === "ceInteger" && (obj.ceInteger = Math.round(message.attr?.ceInteger));
+        message.attr?.$case === "ceString" && (obj.ceString = message.attr?.ceString);
+        message.attr?.$case === "ceBytes" &&
+            (obj.ceBytes = message.attr?.ceBytes !== undefined ? base64FromBytes(message.attr?.ceBytes) : undefined);
+        message.attr?.$case === "ceUri" && (obj.ceUri = message.attr?.ceUri);
+        message.attr?.$case === "ceUriRef" && (obj.ceUriRef = message.attr?.ceUriRef);
+        message.attr?.$case === "ceTimestamp" && (obj.ceTimestamp = message.attr?.ceTimestamp.toISOString());
+        return obj;
+    },
+};
+function createBaseCloudEventBatch() {
+    return { events: [] };
+}
+exports.CloudEventBatch = {
+    fromJSON(object) {
+        return { events: Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [] };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.events) {
+            obj.events = message.events.map((e) => e ? exports.CloudEvent.toJSON(e) : undefined);
+        }
+        else {
+            obj.events = [];
+        }
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function fromTimestamp(t) {
+    let millis = Number(t.seconds) * 1000;
+    millis += t.nanos / 1000000;
+    return new Date(millis);
+}
+function fromJsonTimestamp(o) {
+    if (o instanceof Date) {
+        return o;
+    }
+    else if (typeof o === "string") {
+        return new Date(o);
+    }
+    else {
+        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+    }
+}
+function isObject(value) {
+    return typeof value === "object" && value !== null;
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
new file mode 100644
index 0000000000000..da627499ad765
--- /dev/null
+++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
@@ -0,0 +1,119 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.fieldBehaviorToJSON = exports.fieldBehaviorFromJSON = exports.FieldBehavior = void 0;
+/**
+ * An indicator of the behavior of a given field (for example, that a field
+ * is required in requests, or given as output but ignored as input).
+ * This **does not** change the behavior in protocol buffers itself; it only
+ * denotes the behavior and may affect how API tooling handles the field.
+ *
+ * Note: This enum **may** receive new values in the future.
+ */
+var FieldBehavior;
+(function (FieldBehavior) {
+    /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
+    FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
+    /**
+     * OPTIONAL - Specifically denotes a field as optional.
+     * While all fields in protocol buffers are optional, this may be specified
+     * for emphasis if appropriate.
+     */
+    FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
+    /**
+     * REQUIRED - Denotes a field as required.
+     * This indicates that the field **must** be provided as part of the request,
+     * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
+     */
+    FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
+    /**
+     * OUTPUT_ONLY - Denotes a field as output only.
+     * This indicates that the field is provided in responses, but including the
+     * field in a request does nothing (the server *must* ignore it and
+     * *must not* throw an error as a result of the field's presence).
+     */
+    FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
+    /**
+     * INPUT_ONLY - Denotes a field as input only.
+     * This indicates that the field is provided in requests, and the
+     * corresponding field is not included in output.
+     */
+    FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
+    /**
+     * IMMUTABLE - Denotes a field as immutable.
+     * This indicates that the field may be set once in a request to create a
+     * resource, but may not be changed thereafter.
+     */
+    FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
+    /**
+     * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
+     * This indicates that the service may provide the elements of the list
+     * in any arbitrary order, rather than the order the user originally
+     * provided. Additionally, the list's order may or may not be stable.
+     */
+    FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
+})(FieldBehavior = exports.FieldBehavior || (exports.FieldBehavior = {}));
+function fieldBehaviorFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "FIELD_BEHAVIOR_UNSPECIFIED":
+            return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
+        case 1:
+        case "OPTIONAL":
+            return FieldBehavior.OPTIONAL;
+        case 2:
+        case "REQUIRED":
+            return FieldBehavior.REQUIRED;
+        case 3:
+        case "OUTPUT_ONLY":
+            return FieldBehavior.OUTPUT_ONLY;
+        case 4:
+        case "INPUT_ONLY":
+            return FieldBehavior.INPUT_ONLY;
+        case 5:
+        case "IMMUTABLE":
+            return FieldBehavior.IMMUTABLE;
+        case 6:
+        case "UNORDERED_LIST":
+            return FieldBehavior.UNORDERED_LIST;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+    }
+}
+exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
+function fieldBehaviorToJSON(object) {
+    switch (object) {
+        case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
+            return "FIELD_BEHAVIOR_UNSPECIFIED";
+        case FieldBehavior.OPTIONAL:
+            return "OPTIONAL";
+        case FieldBehavior.REQUIRED:
+            return "REQUIRED";
+        case FieldBehavior.OUTPUT_ONLY:
+            return "OUTPUT_ONLY";
+        case FieldBehavior.INPUT_ONLY:
+            return "INPUT_ONLY";
+        case FieldBehavior.IMMUTABLE:
+            return "IMMUTABLE";
+        case FieldBehavior.UNORDERED_LIST:
+            return "UNORDERED_LIST";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+    }
+}
+exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
new file mode 100644
index 0000000000000..6b3f3c97a6647
--- /dev/null
+++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
@@ -0,0 +1,65 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Any = void 0;
+function createBaseAny() {
+    return { typeUrl: "", value: Buffer.alloc(0) };
+}
+exports.Any = {
+    fromJSON(object) {
+        return {
+            typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "",
+            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl);
+        message.value !== undefined &&
+            (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
new file mode 100644
index 0000000000000..d429aac846043
--- /dev/null
+++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
@@ -0,0 +1,1308 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.GeneratedCodeInfo_Annotation = exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.methodOptions_IdempotencyLevelToJSON = exports.methodOptions_IdempotencyLevelFromJSON = exports.MethodOptions_IdempotencyLevel = exports.fieldOptions_JSTypeToJSON = exports.fieldOptions_JSTypeFromJSON = exports.FieldOptions_JSType = exports.fieldOptions_CTypeToJSON = exports.fieldOptions_CTypeFromJSON = exports.FieldOptions_CType = exports.fileOptions_OptimizeModeToJSON = exports.fileOptions_OptimizeModeFromJSON = exports.FileOptions_OptimizeMode = exports.fieldDescriptorProto_LabelToJSON = exports.fieldDescriptorProto_LabelFromJSON = exports.FieldDescriptorProto_Label = exports.fieldDescriptorProto_TypeToJSON = exports.fieldDescriptorProto_TypeFromJSON = exports.FieldDescriptorProto_Type = void 0;
+var FieldDescriptorProto_Type;
+(function (FieldDescriptorProto_Type) {
+    /**
+     * TYPE_DOUBLE - 0 is reserved for errors.
+     * Order is weird for historical reasons.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT";
+    /**
+     * TYPE_INT64 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
+     * negative values are likely.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64";
+    /**
+     * TYPE_INT32 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
+     * negative values are likely.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
+    /**
+     * TYPE_GROUP - Tag-delimited aggregate.
+     * Group type is deprecated and not supported in proto3. However, Proto3
+     * implementations should still be able to parse the group wire format and
+     * treat group fields as unknown fields.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
+    /** TYPE_MESSAGE - Length-delimited aggregate. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE";
+    /** TYPE_BYTES - New in version 2. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64";
+    /** TYPE_SINT32 - Uses ZigZag encoding. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
+    /** TYPE_SINT64 - Uses ZigZag encoding. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
+})(FieldDescriptorProto_Type = exports.FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = {}));
+function fieldDescriptorProto_TypeFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "TYPE_DOUBLE":
+            return FieldDescriptorProto_Type.TYPE_DOUBLE;
+        case 2:
+        case "TYPE_FLOAT":
+            return FieldDescriptorProto_Type.TYPE_FLOAT;
+        case 3:
+        case "TYPE_INT64":
+            return FieldDescriptorProto_Type.TYPE_INT64;
+        case 4:
+        case "TYPE_UINT64":
+            return FieldDescriptorProto_Type.TYPE_UINT64;
+        case 5:
+        case "TYPE_INT32":
+            return FieldDescriptorProto_Type.TYPE_INT32;
+        case 6:
+        case "TYPE_FIXED64":
+            return FieldDescriptorProto_Type.TYPE_FIXED64;
+        case 7:
+        case "TYPE_FIXED32":
+            return FieldDescriptorProto_Type.TYPE_FIXED32;
+        case 8:
+        case "TYPE_BOOL":
+            return FieldDescriptorProto_Type.TYPE_BOOL;
+        case 9:
+        case "TYPE_STRING":
+            return FieldDescriptorProto_Type.TYPE_STRING;
+        case 10:
+        case "TYPE_GROUP":
+            return FieldDescriptorProto_Type.TYPE_GROUP;
+        case 11:
+        case "TYPE_MESSAGE":
+            return FieldDescriptorProto_Type.TYPE_MESSAGE;
+        case 12:
+        case "TYPE_BYTES":
+            return FieldDescriptorProto_Type.TYPE_BYTES;
+        case 13:
+        case "TYPE_UINT32":
+            return FieldDescriptorProto_Type.TYPE_UINT32;
+        case 14:
+        case "TYPE_ENUM":
+            return FieldDescriptorProto_Type.TYPE_ENUM;
+        case 15:
+        case "TYPE_SFIXED32":
+            return FieldDescriptorProto_Type.TYPE_SFIXED32;
+        case 16:
+        case "TYPE_SFIXED64":
+            return FieldDescriptorProto_Type.TYPE_SFIXED64;
+        case 17:
+        case "TYPE_SINT32":
+            return FieldDescriptorProto_Type.TYPE_SINT32;
+        case 18:
+        case "TYPE_SINT64":
+            return FieldDescriptorProto_Type.TYPE_SINT64;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+    }
+}
+exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
+function fieldDescriptorProto_TypeToJSON(object) {
+    switch (object) {
+        case FieldDescriptorProto_Type.TYPE_DOUBLE:
+            return "TYPE_DOUBLE";
+        case FieldDescriptorProto_Type.TYPE_FLOAT:
+            return "TYPE_FLOAT";
+        case FieldDescriptorProto_Type.TYPE_INT64:
+            return "TYPE_INT64";
+        case FieldDescriptorProto_Type.TYPE_UINT64:
+            return "TYPE_UINT64";
+        case FieldDescriptorProto_Type.TYPE_INT32:
+            return "TYPE_INT32";
+        case FieldDescriptorProto_Type.TYPE_FIXED64:
+            return "TYPE_FIXED64";
+        case FieldDescriptorProto_Type.TYPE_FIXED32:
+            return "TYPE_FIXED32";
+        case FieldDescriptorProto_Type.TYPE_BOOL:
+            return "TYPE_BOOL";
+        case FieldDescriptorProto_Type.TYPE_STRING:
+            return "TYPE_STRING";
+        case FieldDescriptorProto_Type.TYPE_GROUP:
+            return "TYPE_GROUP";
+        case FieldDescriptorProto_Type.TYPE_MESSAGE:
+            return "TYPE_MESSAGE";
+        case FieldDescriptorProto_Type.TYPE_BYTES:
+            return "TYPE_BYTES";
+        case FieldDescriptorProto_Type.TYPE_UINT32:
+            return "TYPE_UINT32";
+        case FieldDescriptorProto_Type.TYPE_ENUM:
+            return "TYPE_ENUM";
+        case FieldDescriptorProto_Type.TYPE_SFIXED32:
+            return "TYPE_SFIXED32";
+        case FieldDescriptorProto_Type.TYPE_SFIXED64:
+            return "TYPE_SFIXED64";
+        case FieldDescriptorProto_Type.TYPE_SINT32:
+            return "TYPE_SINT32";
+        case FieldDescriptorProto_Type.TYPE_SINT64:
+            return "TYPE_SINT64";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+    }
+}
+exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
+var FieldDescriptorProto_Label;
+(function (FieldDescriptorProto_Label) {
+    /** LABEL_OPTIONAL - 0 is reserved for errors */
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
+})(FieldDescriptorProto_Label = exports.FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = {}));
+function fieldDescriptorProto_LabelFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "LABEL_OPTIONAL":
+            return FieldDescriptorProto_Label.LABEL_OPTIONAL;
+        case 2:
+        case "LABEL_REQUIRED":
+            return FieldDescriptorProto_Label.LABEL_REQUIRED;
+        case 3:
+        case "LABEL_REPEATED":
+            return FieldDescriptorProto_Label.LABEL_REPEATED;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+    }
+}
+exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
+function fieldDescriptorProto_LabelToJSON(object) {
+    switch (object) {
+        case FieldDescriptorProto_Label.LABEL_OPTIONAL:
+            return "LABEL_OPTIONAL";
+        case FieldDescriptorProto_Label.LABEL_REQUIRED:
+            return "LABEL_REQUIRED";
+        case FieldDescriptorProto_Label.LABEL_REPEATED:
+            return "LABEL_REPEATED";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+    }
+}
+exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
+/** Generated classes can be optimized for speed or code size. */
+var FileOptions_OptimizeMode;
+(function (FileOptions_OptimizeMode) {
+    /** SPEED - Generate complete code for parsing, serialization, */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED";
+    /** CODE_SIZE - etc. */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
+    /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
+})(FileOptions_OptimizeMode = exports.FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = {}));
+function fileOptions_OptimizeModeFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "SPEED":
+            return FileOptions_OptimizeMode.SPEED;
+        case 2:
+        case "CODE_SIZE":
+            return FileOptions_OptimizeMode.CODE_SIZE;
+        case 3:
+        case "LITE_RUNTIME":
+            return FileOptions_OptimizeMode.LITE_RUNTIME;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+    }
+}
+exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
+function fileOptions_OptimizeModeToJSON(object) {
+    switch (object) {
+        case FileOptions_OptimizeMode.SPEED:
+            return "SPEED";
+        case FileOptions_OptimizeMode.CODE_SIZE:
+            return "CODE_SIZE";
+        case FileOptions_OptimizeMode.LITE_RUNTIME:
+            return "LITE_RUNTIME";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+    }
+}
+exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
+var FieldOptions_CType;
+(function (FieldOptions_CType) {
+    /** STRING - Default mode. */
+    FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
+    FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
+    FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
+})(FieldOptions_CType = exports.FieldOptions_CType || (exports.FieldOptions_CType = {}));
+function fieldOptions_CTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "STRING":
+            return FieldOptions_CType.STRING;
+        case 1:
+        case "CORD":
+            return FieldOptions_CType.CORD;
+        case 2:
+        case "STRING_PIECE":
+            return FieldOptions_CType.STRING_PIECE;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+    }
+}
+exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
+function fieldOptions_CTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_CType.STRING:
+            return "STRING";
+        case FieldOptions_CType.CORD:
+            return "CORD";
+        case FieldOptions_CType.STRING_PIECE:
+            return "STRING_PIECE";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+    }
+}
+exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
+var FieldOptions_JSType;
+(function (FieldOptions_JSType) {
+    /** JS_NORMAL - Use the default type. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL";
+    /** JS_STRING - Use JavaScript strings. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
+    /** JS_NUMBER - Use JavaScript numbers. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
+})(FieldOptions_JSType = exports.FieldOptions_JSType || (exports.FieldOptions_JSType = {}));
+function fieldOptions_JSTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "JS_NORMAL":
+            return FieldOptions_JSType.JS_NORMAL;
+        case 1:
+        case "JS_STRING":
+            return FieldOptions_JSType.JS_STRING;
+        case 2:
+        case "JS_NUMBER":
+            return FieldOptions_JSType.JS_NUMBER;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+    }
+}
+exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
+function fieldOptions_JSTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_JSType.JS_NORMAL:
+            return "JS_NORMAL";
+        case FieldOptions_JSType.JS_STRING:
+            return "JS_STRING";
+        case FieldOptions_JSType.JS_NUMBER:
+            return "JS_NUMBER";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+    }
+}
+exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
+/**
+ * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
+ * or neither? HTTP based RPC implementation may choose GET verb for safe
+ * methods, and PUT verb for idempotent methods instead of the default POST.
+ */
+var MethodOptions_IdempotencyLevel;
+(function (MethodOptions_IdempotencyLevel) {
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN";
+    /** NO_SIDE_EFFECTS - implies idempotent */
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
+    /** IDEMPOTENT - idempotent, but may have side effects */
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
+})(MethodOptions_IdempotencyLevel = exports.MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = {}));
+function methodOptions_IdempotencyLevelFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "IDEMPOTENCY_UNKNOWN":
+            return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN;
+        case 1:
+        case "NO_SIDE_EFFECTS":
+            return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS;
+        case 2:
+        case "IDEMPOTENT":
+            return MethodOptions_IdempotencyLevel.IDEMPOTENT;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+    }
+}
+exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
+function methodOptions_IdempotencyLevelToJSON(object) {
+    switch (object) {
+        case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
+            return "IDEMPOTENCY_UNKNOWN";
+        case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS:
+            return "NO_SIDE_EFFECTS";
+        case MethodOptions_IdempotencyLevel.IDEMPOTENT:
+            return "IDEMPOTENT";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+    }
+}
+exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
+function createBaseFileDescriptorSet() {
+    return { file: [] };
+}
+exports.FileDescriptorSet = {
+    fromJSON(object) {
+        return { file: Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [] };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.file) {
+            obj.file = message.file.map((e) => e ? exports.FileDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.file = [];
+        }
+        return obj;
+    },
+};
+function createBaseFileDescriptorProto() {
+    return {
+        name: "",
+        package: "",
+        dependency: [],
+        publicDependency: [],
+        weakDependency: [],
+        messageType: [],
+        enumType: [],
+        service: [],
+        extension: [],
+        options: undefined,
+        sourceCodeInfo: undefined,
+        syntax: "",
+    };
+}
+exports.FileDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            package: isSet(object.package) ? String(object.package) : "",
+            dependency: Array.isArray(object?.dependency) ? object.dependency.map((e) => String(e)) : [],
+            publicDependency: Array.isArray(object?.publicDependency)
+                ? object.publicDependency.map((e) => Number(e))
+                : [],
+            weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e) => Number(e)) : [],
+            messageType: Array.isArray(object?.messageType)
+                ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
+                : [],
+            enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
+            service: Array.isArray(object?.service) ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e)) : [],
+            extension: Array.isArray(object?.extension)
+                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
+            sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
+            syntax: isSet(object.syntax) ? String(object.syntax) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.package !== undefined && (obj.package = message.package);
+        if (message.dependency) {
+            obj.dependency = message.dependency.map((e) => e);
+        }
+        else {
+            obj.dependency = [];
+        }
+        if (message.publicDependency) {
+            obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
+        }
+        else {
+            obj.publicDependency = [];
+        }
+        if (message.weakDependency) {
+            obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
+        }
+        else {
+            obj.weakDependency = [];
+        }
+        if (message.messageType) {
+            obj.messageType = message.messageType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.messageType = [];
+        }
+        if (message.enumType) {
+            obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.enumType = [];
+        }
+        if (message.service) {
+            obj.service = message.service.map((e) => e ? exports.ServiceDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.service = [];
+        }
+        if (message.extension) {
+            obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.extension = [];
+        }
+        message.options !== undefined && (obj.options = message.options ? exports.FileOptions.toJSON(message.options) : undefined);
+        message.sourceCodeInfo !== undefined &&
+            (obj.sourceCodeInfo = message.sourceCodeInfo ? exports.SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined);
+        message.syntax !== undefined && (obj.syntax = message.syntax);
+        return obj;
+    },
+};
+function createBaseDescriptorProto() {
+    return {
+        name: "",
+        field: [],
+        extension: [],
+        nestedType: [],
+        enumType: [],
+        extensionRange: [],
+        oneofDecl: [],
+        options: undefined,
+        reservedRange: [],
+        reservedName: [],
+    };
+}
+exports.DescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            field: Array.isArray(object?.field) ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e)) : [],
+            extension: Array.isArray(object?.extension)
+                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            nestedType: Array.isArray(object?.nestedType)
+                ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
+                : [],
+            enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
+            extensionRange: Array.isArray(object?.extensionRange)
+                ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
+                : [],
+            oneofDecl: Array.isArray(object?.oneofDecl)
+                ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
+            reservedRange: Array.isArray(object?.reservedRange)
+                ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
+                : [],
+            reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e) => String(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        if (message.field) {
+            obj.field = message.field.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.field = [];
+        }
+        if (message.extension) {
+            obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.extension = [];
+        }
+        if (message.nestedType) {
+            obj.nestedType = message.nestedType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.nestedType = [];
+        }
+        if (message.enumType) {
+            obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.enumType = [];
+        }
+        if (message.extensionRange) {
+            obj.extensionRange = message.extensionRange.map((e) => e ? exports.DescriptorProto_ExtensionRange.toJSON(e) : undefined);
+        }
+        else {
+            obj.extensionRange = [];
+        }
+        if (message.oneofDecl) {
+            obj.oneofDecl = message.oneofDecl.map((e) => e ? exports.OneofDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.oneofDecl = [];
+        }
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.MessageOptions.toJSON(message.options) : undefined);
+        if (message.reservedRange) {
+            obj.reservedRange = message.reservedRange.map((e) => e ? exports.DescriptorProto_ReservedRange.toJSON(e) : undefined);
+        }
+        else {
+            obj.reservedRange = [];
+        }
+        if (message.reservedName) {
+            obj.reservedName = message.reservedName.map((e) => e);
+        }
+        else {
+            obj.reservedName = [];
+        }
+        return obj;
+    },
+};
+function createBaseDescriptorProto_ExtensionRange() {
+    return { start: 0, end: 0, options: undefined };
+}
+exports.DescriptorProto_ExtensionRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? Number(object.start) : 0,
+            end: isSet(object.end) ? Number(object.end) : 0,
+            options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = Math.round(message.start));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.ExtensionRangeOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseDescriptorProto_ReservedRange() {
+    return { start: 0, end: 0 };
+}
+exports.DescriptorProto_ReservedRange = {
+    fromJSON(object) {
+        return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = Math.round(message.start));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        return obj;
+    },
+};
+function createBaseExtensionRangeOptions() {
+    return { uninterpretedOption: [] };
+}
+exports.ExtensionRangeOptions = {
+    fromJSON(object) {
+        return {
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseFieldDescriptorProto() {
+    return {
+        name: "",
+        number: 0,
+        label: 1,
+        type: 1,
+        typeName: "",
+        extendee: "",
+        defaultValue: "",
+        oneofIndex: 0,
+        jsonName: "",
+        options: undefined,
+        proto3Optional: false,
+    };
+}
+exports.FieldDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            number: isSet(object.number) ? Number(object.number) : 0,
+            label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
+            type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
+            typeName: isSet(object.typeName) ? String(object.typeName) : "",
+            extendee: isSet(object.extendee) ? String(object.extendee) : "",
+            defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "",
+            oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0,
+            jsonName: isSet(object.jsonName) ? String(object.jsonName) : "",
+            options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
+            proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.number !== undefined && (obj.number = Math.round(message.number));
+        message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label));
+        message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type));
+        message.typeName !== undefined && (obj.typeName = message.typeName);
+        message.extendee !== undefined && (obj.extendee = message.extendee);
+        message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue);
+        message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex));
+        message.jsonName !== undefined && (obj.jsonName = message.jsonName);
+        message.options !== undefined && (obj.options = message.options ? exports.FieldOptions.toJSON(message.options) : undefined);
+        message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional);
+        return obj;
+    },
+};
+function createBaseOneofDescriptorProto() {
+    return { name: "", options: undefined };
+}
+exports.OneofDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.options !== undefined && (obj.options = message.options ? exports.OneofOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseEnumDescriptorProto() {
+    return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] };
+}
+exports.EnumDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            value: Array.isArray(object?.value) ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e)) : [],
+            options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
+            reservedRange: Array.isArray(object?.reservedRange)
+                ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
+                : [],
+            reservedName: Array.isArray(object?.reservedName)
+                ? object.reservedName.map((e) => String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        if (message.value) {
+            obj.value = message.value.map((e) => e ? exports.EnumValueDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.value = [];
+        }
+        message.options !== undefined && (obj.options = message.options ? exports.EnumOptions.toJSON(message.options) : undefined);
+        if (message.reservedRange) {
+            obj.reservedRange = message.reservedRange.map((e) => e ? exports.EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined);
+        }
+        else {
+            obj.reservedRange = [];
+        }
+        if (message.reservedName) {
+            obj.reservedName = message.reservedName.map((e) => e);
+        }
+        else {
+            obj.reservedName = [];
+        }
+        return obj;
+    },
+};
+function createBaseEnumDescriptorProto_EnumReservedRange() {
+    return { start: 0, end: 0 };
+}
+exports.EnumDescriptorProto_EnumReservedRange = {
+    fromJSON(object) {
+        return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = Math.round(message.start));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        return obj;
+    },
+};
+function createBaseEnumValueDescriptorProto() {
+    return { name: "", number: 0, options: undefined };
+}
+exports.EnumValueDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            number: isSet(object.number) ? Number(object.number) : 0,
+            options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.number !== undefined && (obj.number = Math.round(message.number));
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.EnumValueOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseServiceDescriptorProto() {
+    return { name: "", method: [], options: undefined };
+}
+exports.ServiceDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            method: Array.isArray(object?.method) ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e)) : [],
+            options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        if (message.method) {
+            obj.method = message.method.map((e) => e ? exports.MethodDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.method = [];
+        }
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.ServiceOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseMethodDescriptorProto() {
+    return {
+        name: "",
+        inputType: "",
+        outputType: "",
+        options: undefined,
+        clientStreaming: false,
+        serverStreaming: false,
+    };
+}
+exports.MethodDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            inputType: isSet(object.inputType) ? String(object.inputType) : "",
+            outputType: isSet(object.outputType) ? String(object.outputType) : "",
+            options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
+            clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false,
+            serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.inputType !== undefined && (obj.inputType = message.inputType);
+        message.outputType !== undefined && (obj.outputType = message.outputType);
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.MethodOptions.toJSON(message.options) : undefined);
+        message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming);
+        message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming);
+        return obj;
+    },
+};
+function createBaseFileOptions() {
+    return {
+        javaPackage: "",
+        javaOuterClassname: "",
+        javaMultipleFiles: false,
+        javaGenerateEqualsAndHash: false,
+        javaStringCheckUtf8: false,
+        optimizeFor: 1,
+        goPackage: "",
+        ccGenericServices: false,
+        javaGenericServices: false,
+        pyGenericServices: false,
+        phpGenericServices: false,
+        deprecated: false,
+        ccEnableArenas: false,
+        objcClassPrefix: "",
+        csharpNamespace: "",
+        swiftPrefix: "",
+        phpClassPrefix: "",
+        phpNamespace: "",
+        phpMetadataNamespace: "",
+        rubyPackage: "",
+        uninterpretedOption: [],
+    };
+}
+exports.FileOptions = {
+    fromJSON(object) {
+        return {
+            javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "",
+            javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "",
+            javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false,
+            javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
+                ? Boolean(object.javaGenerateEqualsAndHash)
+                : false,
+            javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false,
+            optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
+            goPackage: isSet(object.goPackage) ? String(object.goPackage) : "",
+            ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false,
+            javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false,
+            pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false,
+            phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false,
+            objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "",
+            csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "",
+            swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "",
+            phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "",
+            phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "",
+            phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "",
+            rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "",
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage);
+        message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname);
+        message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles);
+        message.javaGenerateEqualsAndHash !== undefined &&
+            (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash);
+        message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8);
+        message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor));
+        message.goPackage !== undefined && (obj.goPackage = message.goPackage);
+        message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices);
+        message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices);
+        message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices);
+        message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas);
+        message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix);
+        message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace);
+        message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix);
+        message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix);
+        message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace);
+        message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace);
+        message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseMessageOptions() {
+    return {
+        messageSetWireFormat: false,
+        noStandardDescriptorAccessor: false,
+        deprecated: false,
+        mapEntry: false,
+        uninterpretedOption: [],
+    };
+}
+exports.MessageOptions = {
+    fromJSON(object) {
+        return {
+            messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false,
+            noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
+                ? Boolean(object.noStandardDescriptorAccessor)
+                : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat);
+        message.noStandardDescriptorAccessor !== undefined &&
+            (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseFieldOptions() {
+    return {
+        ctype: 0,
+        packed: false,
+        jstype: 0,
+        lazy: false,
+        unverifiedLazy: false,
+        deprecated: false,
+        weak: false,
+        uninterpretedOption: [],
+    };
+}
+exports.FieldOptions = {
+    fromJSON(object) {
+        return {
+            ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
+            packed: isSet(object.packed) ? Boolean(object.packed) : false,
+            jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
+            lazy: isSet(object.lazy) ? Boolean(object.lazy) : false,
+            unverifiedLazy: isSet(object.unverifiedLazy) ? Boolean(object.unverifiedLazy) : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            weak: isSet(object.weak) ? Boolean(object.weak) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype));
+        message.packed !== undefined && (obj.packed = message.packed);
+        message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype));
+        message.lazy !== undefined && (obj.lazy = message.lazy);
+        message.unverifiedLazy !== undefined && (obj.unverifiedLazy = message.unverifiedLazy);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.weak !== undefined && (obj.weak = message.weak);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseOneofOptions() {
+    return { uninterpretedOption: [] };
+}
+exports.OneofOptions = {
+    fromJSON(object) {
+        return {
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseEnumOptions() {
+    return { allowAlias: false, deprecated: false, uninterpretedOption: [] };
+}
+exports.EnumOptions = {
+    fromJSON(object) {
+        return {
+            allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseEnumValueOptions() {
+    return { deprecated: false, uninterpretedOption: [] };
+}
+exports.EnumValueOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseServiceOptions() {
+    return { deprecated: false, uninterpretedOption: [] };
+}
+exports.ServiceOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseMethodOptions() {
+    return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] };
+}
+exports.MethodOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            idempotencyLevel: isSet(object.idempotencyLevel)
+                ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
+                : 0,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.idempotencyLevel !== undefined &&
+            (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel));
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseUninterpretedOption() {
+    return {
+        name: [],
+        identifierValue: "",
+        positiveIntValue: "0",
+        negativeIntValue: "0",
+        doubleValue: 0,
+        stringValue: Buffer.alloc(0),
+        aggregateValue: "",
+    };
+}
+exports.UninterpretedOption = {
+    fromJSON(object) {
+        return {
+            name: Array.isArray(object?.name) ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e)) : [],
+            identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "",
+            positiveIntValue: isSet(object.positiveIntValue) ? String(object.positiveIntValue) : "0",
+            negativeIntValue: isSet(object.negativeIntValue) ? String(object.negativeIntValue) : "0",
+            doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0,
+            stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
+            aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name) {
+            obj.name = message.name.map((e) => e ? exports.UninterpretedOption_NamePart.toJSON(e) : undefined);
+        }
+        else {
+            obj.name = [];
+        }
+        message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue);
+        message.positiveIntValue !== undefined && (obj.positiveIntValue = message.positiveIntValue);
+        message.negativeIntValue !== undefined && (obj.negativeIntValue = message.negativeIntValue);
+        message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue);
+        message.stringValue !== undefined &&
+            (obj.stringValue = base64FromBytes(message.stringValue !== undefined ? message.stringValue : Buffer.alloc(0)));
+        message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue);
+        return obj;
+    },
+};
+function createBaseUninterpretedOption_NamePart() {
+    return { namePart: "", isExtension: false };
+}
+exports.UninterpretedOption_NamePart = {
+    fromJSON(object) {
+        return {
+            namePart: isSet(object.namePart) ? String(object.namePart) : "",
+            isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.namePart !== undefined && (obj.namePart = message.namePart);
+        message.isExtension !== undefined && (obj.isExtension = message.isExtension);
+        return obj;
+    },
+};
+function createBaseSourceCodeInfo() {
+    return { location: [] };
+}
+exports.SourceCodeInfo = {
+    fromJSON(object) {
+        return {
+            location: Array.isArray(object?.location)
+                ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.location) {
+            obj.location = message.location.map((e) => e ? exports.SourceCodeInfo_Location.toJSON(e) : undefined);
+        }
+        else {
+            obj.location = [];
+        }
+        return obj;
+    },
+};
+function createBaseSourceCodeInfo_Location() {
+    return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] };
+}
+exports.SourceCodeInfo_Location = {
+    fromJSON(object) {
+        return {
+            path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
+            span: Array.isArray(object?.span) ? object.span.map((e) => Number(e)) : [],
+            leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "",
+            trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "",
+            leadingDetachedComments: Array.isArray(object?.leadingDetachedComments)
+                ? object.leadingDetachedComments.map((e) => String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.path) {
+            obj.path = message.path.map((e) => Math.round(e));
+        }
+        else {
+            obj.path = [];
+        }
+        if (message.span) {
+            obj.span = message.span.map((e) => Math.round(e));
+        }
+        else {
+            obj.span = [];
+        }
+        message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments);
+        message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments);
+        if (message.leadingDetachedComments) {
+            obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e);
+        }
+        else {
+            obj.leadingDetachedComments = [];
+        }
+        return obj;
+    },
+};
+function createBaseGeneratedCodeInfo() {
+    return { annotation: [] };
+}
+exports.GeneratedCodeInfo = {
+    fromJSON(object) {
+        return {
+            annotation: Array.isArray(object?.annotation)
+                ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.annotation) {
+            obj.annotation = message.annotation.map((e) => e ? exports.GeneratedCodeInfo_Annotation.toJSON(e) : undefined);
+        }
+        else {
+            obj.annotation = [];
+        }
+        return obj;
+    },
+};
+function createBaseGeneratedCodeInfo_Annotation() {
+    return { path: [], sourceFile: "", begin: 0, end: 0 };
+}
+exports.GeneratedCodeInfo_Annotation = {
+    fromJSON(object) {
+        return {
+            path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
+            sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "",
+            begin: isSet(object.begin) ? Number(object.begin) : 0,
+            end: isSet(object.end) ? Number(object.end) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.path) {
+            obj.path = message.path.map((e) => Math.round(e));
+        }
+        else {
+            obj.path = [];
+        }
+        message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile);
+        message.begin !== undefined && (obj.begin = Math.round(message.begin));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
new file mode 100644
index 0000000000000..159135fe87172
--- /dev/null
+++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
@@ -0,0 +1,24 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Timestamp = void 0;
+function createBaseTimestamp() {
+    return { seconds: "0", nanos: 0 };
+}
+exports.Timestamp = {
+    fromJSON(object) {
+        return {
+            seconds: isSet(object.seconds) ? String(object.seconds) : "0",
+            nanos: isSet(object.nanos) ? Number(object.nanos) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.seconds !== undefined && (obj.seconds = message.seconds);
+        message.nanos !== undefined && (obj.nanos = Math.round(message.nanos));
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
new file mode 100644
index 0000000000000..1ef3e1b3356b7
--- /dev/null
+++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
@@ -0,0 +1,106 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
+/* eslint-disable */
+const envelope_1 = require("./envelope");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_rekor_1 = require("./sigstore_rekor");
+function createBaseTimestampVerificationData() {
+    return { rfc3161Timestamps: [] };
+}
+exports.TimestampVerificationData = {
+    fromJSON(object) {
+        return {
+            rfc3161Timestamps: Array.isArray(object?.rfc3161Timestamps)
+                ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rfc3161Timestamps) {
+            obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => e ? sigstore_common_1.RFC3161SignedTimestamp.toJSON(e) : undefined);
+        }
+        else {
+            obj.rfc3161Timestamps = [];
+        }
+        return obj;
+    },
+};
+function createBaseVerificationMaterial() {
+    return { content: undefined, tlogEntries: [], timestampVerificationData: undefined };
+}
+exports.VerificationMaterial = {
+    fromJSON(object) {
+        return {
+            content: isSet(object.publicKey)
+                ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
+                : isSet(object.x509CertificateChain)
+                    ? {
+                        $case: "x509CertificateChain",
+                        x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
+                    }
+                    : undefined,
+            tlogEntries: Array.isArray(object?.tlogEntries)
+                ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
+                : [],
+            timestampVerificationData: isSet(object.timestampVerificationData)
+                ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
+                : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.content?.$case === "publicKey" &&
+            (obj.publicKey = message.content?.publicKey ? sigstore_common_1.PublicKeyIdentifier.toJSON(message.content?.publicKey) : undefined);
+        message.content?.$case === "x509CertificateChain" &&
+            (obj.x509CertificateChain = message.content?.x509CertificateChain
+                ? sigstore_common_1.X509CertificateChain.toJSON(message.content?.x509CertificateChain)
+                : undefined);
+        if (message.tlogEntries) {
+            obj.tlogEntries = message.tlogEntries.map((e) => e ? sigstore_rekor_1.TransparencyLogEntry.toJSON(e) : undefined);
+        }
+        else {
+            obj.tlogEntries = [];
+        }
+        message.timestampVerificationData !== undefined &&
+            (obj.timestampVerificationData = message.timestampVerificationData
+                ? exports.TimestampVerificationData.toJSON(message.timestampVerificationData)
+                : undefined);
+        return obj;
+    },
+};
+function createBaseBundle() {
+    return { mediaType: "", verificationMaterial: undefined, content: undefined };
+}
+exports.Bundle = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
+            verificationMaterial: isSet(object.verificationMaterial)
+                ? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
+                : undefined,
+            content: isSet(object.messageSignature)
+                ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
+                : isSet(object.dsseEnvelope)
+                    ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.mediaType !== undefined && (obj.mediaType = message.mediaType);
+        message.verificationMaterial !== undefined && (obj.verificationMaterial = message.verificationMaterial
+            ? exports.VerificationMaterial.toJSON(message.verificationMaterial)
+            : undefined);
+        message.content?.$case === "messageSignature" && (obj.messageSignature = message.content?.messageSignature
+            ? sigstore_common_1.MessageSignature.toJSON(message.content?.messageSignature)
+            : undefined);
+        message.content?.$case === "dsseEnvelope" &&
+            (obj.dsseEnvelope = message.content?.dsseEnvelope ? envelope_1.Envelope.toJSON(message.content?.dsseEnvelope) : undefined);
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
new file mode 100644
index 0000000000000..bcd654e9154b9
--- /dev/null
+++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
@@ -0,0 +1,457 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.subjectAlternativeNameTypeToJSON = exports.subjectAlternativeNameTypeFromJSON = exports.SubjectAlternativeNameType = exports.publicKeyDetailsToJSON = exports.publicKeyDetailsFromJSON = exports.PublicKeyDetails = exports.hashAlgorithmToJSON = exports.hashAlgorithmFromJSON = exports.HashAlgorithm = void 0;
+/* eslint-disable */
+const timestamp_1 = require("./google/protobuf/timestamp");
+/**
+ * Only a subset of the secure hash standard algorithms are supported.
+ * See  for more
+ * details.
+ * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
+ * any proto JSON serialization to emit the used hash algorithm, as default
+ * option is to *omit* the default value of an enum (which is the first
+ * value, represented by '0'.
+ */
+var HashAlgorithm;
+(function (HashAlgorithm) {
+    HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
+    HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
+})(HashAlgorithm = exports.HashAlgorithm || (exports.HashAlgorithm = {}));
+function hashAlgorithmFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "HASH_ALGORITHM_UNSPECIFIED":
+            return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
+        case 1:
+        case "SHA2_256":
+            return HashAlgorithm.SHA2_256;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+    }
+}
+exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
+function hashAlgorithmToJSON(object) {
+    switch (object) {
+        case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
+            return "HASH_ALGORITHM_UNSPECIFIED";
+        case HashAlgorithm.SHA2_256:
+            return "SHA2_256";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+    }
+}
+exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
+/**
+ * Details of a specific public key, capturing the the key encoding method,
+ * and signature algorithm.
+ * To avoid the possibility of contradicting formats such as PKCS1 with
+ * ED25519 the valid permutations are listed as a linear set instead of a
+ * cartesian set (i.e one combined variable instead of two, one for encoding
+ * and one for the signature algorithm).
+ */
+var PublicKeyDetails;
+(function (PublicKeyDetails) {
+    PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+    /** PKCS1_RSA_PKCS1V5 - RSA */
+    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
+    /** PKCS1_RSA_PSS - See RFC8017 */
+    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
+    /** PKIX_ECDSA_P256_SHA_256 - ECDSA */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
+    /** PKIX_ECDSA_P256_HMAC_SHA_256 - See RFC6979 */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
+    /** PKIX_ED25519 - Ed 25519 */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
+})(PublicKeyDetails = exports.PublicKeyDetails || (exports.PublicKeyDetails = {}));
+function publicKeyDetailsFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
+            return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
+        case 1:
+        case "PKCS1_RSA_PKCS1V5":
+            return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
+        case 2:
+        case "PKCS1_RSA_PSS":
+            return PublicKeyDetails.PKCS1_RSA_PSS;
+        case 3:
+        case "PKIX_RSA_PKCS1V5":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V5;
+        case 4:
+        case "PKIX_RSA_PSS":
+            return PublicKeyDetails.PKIX_RSA_PSS;
+        case 5:
+        case "PKIX_ECDSA_P256_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
+        case 6:
+        case "PKIX_ECDSA_P256_HMAC_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
+        case 7:
+        case "PKIX_ED25519":
+            return PublicKeyDetails.PKIX_ED25519;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+    }
+}
+exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
+function publicKeyDetailsToJSON(object) {
+    switch (object) {
+        case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
+            return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+        case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
+            return "PKCS1_RSA_PKCS1V5";
+        case PublicKeyDetails.PKCS1_RSA_PSS:
+            return "PKCS1_RSA_PSS";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V5:
+            return "PKIX_RSA_PKCS1V5";
+        case PublicKeyDetails.PKIX_RSA_PSS:
+            return "PKIX_RSA_PSS";
+        case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
+            return "PKIX_ECDSA_P256_SHA_256";
+        case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
+            return "PKIX_ECDSA_P256_HMAC_SHA_256";
+        case PublicKeyDetails.PKIX_ED25519:
+            return "PKIX_ED25519";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+    }
+}
+exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
+var SubjectAlternativeNameType;
+(function (SubjectAlternativeNameType) {
+    SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+    SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
+    SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
+    /**
+     * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
+     * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
+     * for more details.
+     */
+    SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
+})(SubjectAlternativeNameType = exports.SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = {}));
+function subjectAlternativeNameTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
+            return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
+        case 1:
+        case "EMAIL":
+            return SubjectAlternativeNameType.EMAIL;
+        case 2:
+        case "URI":
+            return SubjectAlternativeNameType.URI;
+        case 3:
+        case "OTHER_NAME":
+            return SubjectAlternativeNameType.OTHER_NAME;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+    }
+}
+exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
+function subjectAlternativeNameTypeToJSON(object) {
+    switch (object) {
+        case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
+            return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+        case SubjectAlternativeNameType.EMAIL:
+            return "EMAIL";
+        case SubjectAlternativeNameType.URI:
+            return "URI";
+        case SubjectAlternativeNameType.OTHER_NAME:
+            return "OTHER_NAME";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+    }
+}
+exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
+function createBaseHashOutput() {
+    return { algorithm: 0, digest: Buffer.alloc(0) };
+}
+exports.HashOutput = {
+    fromJSON(object) {
+        return {
+            algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
+            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.algorithm !== undefined && (obj.algorithm = hashAlgorithmToJSON(message.algorithm));
+        message.digest !== undefined &&
+            (obj.digest = base64FromBytes(message.digest !== undefined ? message.digest : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseMessageSignature() {
+    return { messageDigest: undefined, signature: Buffer.alloc(0) };
+}
+exports.MessageSignature = {
+    fromJSON(object) {
+        return {
+            messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
+            signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.messageDigest !== undefined &&
+            (obj.messageDigest = message.messageDigest ? exports.HashOutput.toJSON(message.messageDigest) : undefined);
+        message.signature !== undefined &&
+            (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseLogId() {
+    return { keyId: Buffer.alloc(0) };
+}
+exports.LogId = {
+    fromJSON(object) {
+        return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.keyId !== undefined &&
+            (obj.keyId = base64FromBytes(message.keyId !== undefined ? message.keyId : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseRFC3161SignedTimestamp() {
+    return { signedTimestamp: Buffer.alloc(0) };
+}
+exports.RFC3161SignedTimestamp = {
+    fromJSON(object) {
+        return {
+            signedTimestamp: isSet(object.signedTimestamp)
+                ? Buffer.from(bytesFromBase64(object.signedTimestamp))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.signedTimestamp !== undefined &&
+            (obj.signedTimestamp = base64FromBytes(message.signedTimestamp !== undefined ? message.signedTimestamp : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBasePublicKey() {
+    return { rawBytes: undefined, keyDetails: 0, validFor: undefined };
+}
+exports.PublicKey = {
+    fromJSON(object) {
+        return {
+            rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
+            keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
+            validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.rawBytes !== undefined &&
+            (obj.rawBytes = message.rawBytes !== undefined ? base64FromBytes(message.rawBytes) : undefined);
+        message.keyDetails !== undefined && (obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails));
+        message.validFor !== undefined &&
+            (obj.validFor = message.validFor ? exports.TimeRange.toJSON(message.validFor) : undefined);
+        return obj;
+    },
+};
+function createBasePublicKeyIdentifier() {
+    return { hint: "" };
+}
+exports.PublicKeyIdentifier = {
+    fromJSON(object) {
+        return { hint: isSet(object.hint) ? String(object.hint) : "" };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.hint !== undefined && (obj.hint = message.hint);
+        return obj;
+    },
+};
+function createBaseObjectIdentifier() {
+    return { id: [] };
+}
+exports.ObjectIdentifier = {
+    fromJSON(object) {
+        return { id: Array.isArray(object?.id) ? object.id.map((e) => Number(e)) : [] };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.id) {
+            obj.id = message.id.map((e) => Math.round(e));
+        }
+        else {
+            obj.id = [];
+        }
+        return obj;
+    },
+};
+function createBaseObjectIdentifierValuePair() {
+    return { oid: undefined, value: Buffer.alloc(0) };
+}
+exports.ObjectIdentifierValuePair = {
+    fromJSON(object) {
+        return {
+            oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
+            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.oid !== undefined && (obj.oid = message.oid ? exports.ObjectIdentifier.toJSON(message.oid) : undefined);
+        message.value !== undefined &&
+            (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseDistinguishedName() {
+    return { organization: "", commonName: "" };
+}
+exports.DistinguishedName = {
+    fromJSON(object) {
+        return {
+            organization: isSet(object.organization) ? String(object.organization) : "",
+            commonName: isSet(object.commonName) ? String(object.commonName) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.organization !== undefined && (obj.organization = message.organization);
+        message.commonName !== undefined && (obj.commonName = message.commonName);
+        return obj;
+    },
+};
+function createBaseX509Certificate() {
+    return { rawBytes: Buffer.alloc(0) };
+}
+exports.X509Certificate = {
+    fromJSON(object) {
+        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.rawBytes !== undefined &&
+            (obj.rawBytes = base64FromBytes(message.rawBytes !== undefined ? message.rawBytes : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseSubjectAlternativeName() {
+    return { type: 0, identity: undefined };
+}
+exports.SubjectAlternativeName = {
+    fromJSON(object) {
+        return {
+            type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
+            identity: isSet(object.regexp)
+                ? { $case: "regexp", regexp: String(object.regexp) }
+                : isSet(object.value)
+                    ? { $case: "value", value: String(object.value) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.type !== undefined && (obj.type = subjectAlternativeNameTypeToJSON(message.type));
+        message.identity?.$case === "regexp" && (obj.regexp = message.identity?.regexp);
+        message.identity?.$case === "value" && (obj.value = message.identity?.value);
+        return obj;
+    },
+};
+function createBaseX509CertificateChain() {
+    return { certificates: [] };
+}
+exports.X509CertificateChain = {
+    fromJSON(object) {
+        return {
+            certificates: Array.isArray(object?.certificates)
+                ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.certificates) {
+            obj.certificates = message.certificates.map((e) => e ? exports.X509Certificate.toJSON(e) : undefined);
+        }
+        else {
+            obj.certificates = [];
+        }
+        return obj;
+    },
+};
+function createBaseTimeRange() {
+    return { start: undefined, end: undefined };
+}
+exports.TimeRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
+            end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = message.start.toISOString());
+        message.end !== undefined && (obj.end = message.end.toISOString());
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function fromTimestamp(t) {
+    let millis = Number(t.seconds) * 1000;
+    millis += t.nanos / 1000000;
+    return new Date(millis);
+}
+function fromJsonTimestamp(o) {
+    if (o instanceof Date) {
+        return o;
+    }
+    else if (typeof o === "string") {
+        return new Date(o);
+    }
+    else {
+        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
new file mode 100644
index 0000000000000..398193b2075a7
--- /dev/null
+++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
@@ -0,0 +1,167 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+function createBaseKindVersion() {
+    return { kind: "", version: "" };
+}
+exports.KindVersion = {
+    fromJSON(object) {
+        return {
+            kind: isSet(object.kind) ? String(object.kind) : "",
+            version: isSet(object.version) ? String(object.version) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.kind !== undefined && (obj.kind = message.kind);
+        message.version !== undefined && (obj.version = message.version);
+        return obj;
+    },
+};
+function createBaseCheckpoint() {
+    return { envelope: "" };
+}
+exports.Checkpoint = {
+    fromJSON(object) {
+        return { envelope: isSet(object.envelope) ? String(object.envelope) : "" };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.envelope !== undefined && (obj.envelope = message.envelope);
+        return obj;
+    },
+};
+function createBaseInclusionProof() {
+    return { logIndex: "0", rootHash: Buffer.alloc(0), treeSize: "0", hashes: [], checkpoint: undefined };
+}
+exports.InclusionProof = {
+    fromJSON(object) {
+        return {
+            logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
+            rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
+            treeSize: isSet(object.treeSize) ? String(object.treeSize) : "0",
+            hashes: Array.isArray(object?.hashes) ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e))) : [],
+            checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.logIndex !== undefined && (obj.logIndex = message.logIndex);
+        message.rootHash !== undefined &&
+            (obj.rootHash = base64FromBytes(message.rootHash !== undefined ? message.rootHash : Buffer.alloc(0)));
+        message.treeSize !== undefined && (obj.treeSize = message.treeSize);
+        if (message.hashes) {
+            obj.hashes = message.hashes.map((e) => base64FromBytes(e !== undefined ? e : Buffer.alloc(0)));
+        }
+        else {
+            obj.hashes = [];
+        }
+        message.checkpoint !== undefined &&
+            (obj.checkpoint = message.checkpoint ? exports.Checkpoint.toJSON(message.checkpoint) : undefined);
+        return obj;
+    },
+};
+function createBaseInclusionPromise() {
+    return { signedEntryTimestamp: Buffer.alloc(0) };
+}
+exports.InclusionPromise = {
+    fromJSON(object) {
+        return {
+            signedEntryTimestamp: isSet(object.signedEntryTimestamp)
+                ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.signedEntryTimestamp !== undefined &&
+            (obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp !== undefined ? message.signedEntryTimestamp : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseTransparencyLogEntry() {
+    return {
+        logIndex: "0",
+        logId: undefined,
+        kindVersion: undefined,
+        integratedTime: "0",
+        inclusionPromise: undefined,
+        inclusionProof: undefined,
+        canonicalizedBody: Buffer.alloc(0),
+    };
+}
+exports.TransparencyLogEntry = {
+    fromJSON(object) {
+        return {
+            logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
+            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+            kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
+            integratedTime: isSet(object.integratedTime) ? String(object.integratedTime) : "0",
+            inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
+            inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
+            canonicalizedBody: isSet(object.canonicalizedBody)
+                ? Buffer.from(bytesFromBase64(object.canonicalizedBody))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.logIndex !== undefined && (obj.logIndex = message.logIndex);
+        message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
+        message.kindVersion !== undefined &&
+            (obj.kindVersion = message.kindVersion ? exports.KindVersion.toJSON(message.kindVersion) : undefined);
+        message.integratedTime !== undefined && (obj.integratedTime = message.integratedTime);
+        message.inclusionPromise !== undefined &&
+            (obj.inclusionPromise = message.inclusionPromise ? exports.InclusionPromise.toJSON(message.inclusionPromise) : undefined);
+        message.inclusionProof !== undefined &&
+            (obj.inclusionProof = message.inclusionProof ? exports.InclusionProof.toJSON(message.inclusionProof) : undefined);
+        message.canonicalizedBody !== undefined &&
+            (obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody !== undefined ? message.canonicalizedBody : Buffer.alloc(0)));
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
new file mode 100644
index 0000000000000..05e566767cdb2
--- /dev/null
+++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
@@ -0,0 +1,103 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+function createBaseTransparencyLogInstance() {
+    return { baseUrl: "", hashAlgorithm: 0, publicKey: undefined, logId: undefined };
+}
+exports.TransparencyLogInstance = {
+    fromJSON(object) {
+        return {
+            baseUrl: isSet(object.baseUrl) ? String(object.baseUrl) : "",
+            hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
+            publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
+            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.baseUrl !== undefined && (obj.baseUrl = message.baseUrl);
+        message.hashAlgorithm !== undefined && (obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm));
+        message.publicKey !== undefined &&
+            (obj.publicKey = message.publicKey ? sigstore_common_1.PublicKey.toJSON(message.publicKey) : undefined);
+        message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
+        return obj;
+    },
+};
+function createBaseCertificateAuthority() {
+    return { subject: undefined, uri: "", certChain: undefined, validFor: undefined };
+}
+exports.CertificateAuthority = {
+    fromJSON(object) {
+        return {
+            subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
+            uri: isSet(object.uri) ? String(object.uri) : "",
+            certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
+            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.subject !== undefined &&
+            (obj.subject = message.subject ? sigstore_common_1.DistinguishedName.toJSON(message.subject) : undefined);
+        message.uri !== undefined && (obj.uri = message.uri);
+        message.certChain !== undefined &&
+            (obj.certChain = message.certChain ? sigstore_common_1.X509CertificateChain.toJSON(message.certChain) : undefined);
+        message.validFor !== undefined &&
+            (obj.validFor = message.validFor ? sigstore_common_1.TimeRange.toJSON(message.validFor) : undefined);
+        return obj;
+    },
+};
+function createBaseTrustedRoot() {
+    return { mediaType: "", tlogs: [], certificateAuthorities: [], ctlogs: [], timestampAuthorities: [] };
+}
+exports.TrustedRoot = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
+            tlogs: Array.isArray(object?.tlogs) ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) : [],
+            certificateAuthorities: Array.isArray(object?.certificateAuthorities)
+                ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+                : [],
+            ctlogs: Array.isArray(object?.ctlogs)
+                ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
+                : [],
+            timestampAuthorities: Array.isArray(object?.timestampAuthorities)
+                ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.mediaType !== undefined && (obj.mediaType = message.mediaType);
+        if (message.tlogs) {
+            obj.tlogs = message.tlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
+        }
+        else {
+            obj.tlogs = [];
+        }
+        if (message.certificateAuthorities) {
+            obj.certificateAuthorities = message.certificateAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
+        }
+        else {
+            obj.certificateAuthorities = [];
+        }
+        if (message.ctlogs) {
+            obj.ctlogs = message.ctlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
+        }
+        else {
+            obj.ctlogs = [];
+        }
+        if (message.timestampAuthorities) {
+            obj.timestampAuthorities = message.timestampAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
+        }
+        else {
+            obj.timestampAuthorities = [];
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
new file mode 100644
index 0000000000000..8a72b89761869
--- /dev/null
+++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
@@ -0,0 +1,273 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
+/* eslint-disable */
+const sigstore_bundle_1 = require("./sigstore_bundle");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_trustroot_1 = require("./sigstore_trustroot");
+function createBaseCertificateIdentity() {
+    return { issuer: "", san: undefined, oids: [] };
+}
+exports.CertificateIdentity = {
+    fromJSON(object) {
+        return {
+            issuer: isSet(object.issuer) ? String(object.issuer) : "",
+            san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
+            oids: Array.isArray(object?.oids) ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.issuer !== undefined && (obj.issuer = message.issuer);
+        message.san !== undefined && (obj.san = message.san ? sigstore_common_1.SubjectAlternativeName.toJSON(message.san) : undefined);
+        if (message.oids) {
+            obj.oids = message.oids.map((e) => e ? sigstore_common_1.ObjectIdentifierValuePair.toJSON(e) : undefined);
+        }
+        else {
+            obj.oids = [];
+        }
+        return obj;
+    },
+};
+function createBaseCertificateIdentities() {
+    return { identities: [] };
+}
+exports.CertificateIdentities = {
+    fromJSON(object) {
+        return {
+            identities: Array.isArray(object?.identities)
+                ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.identities) {
+            obj.identities = message.identities.map((e) => e ? exports.CertificateIdentity.toJSON(e) : undefined);
+        }
+        else {
+            obj.identities = [];
+        }
+        return obj;
+    },
+};
+function createBasePublicKeyIdentities() {
+    return { publicKeys: [] };
+}
+exports.PublicKeyIdentities = {
+    fromJSON(object) {
+        return {
+            publicKeys: Array.isArray(object?.publicKeys) ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.publicKeys) {
+            obj.publicKeys = message.publicKeys.map((e) => e ? sigstore_common_1.PublicKey.toJSON(e) : undefined);
+        }
+        else {
+            obj.publicKeys = [];
+        }
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions() {
+    return { signers: undefined, tlogOptions: undefined, ctlogOptions: undefined, tsaOptions: undefined };
+}
+exports.ArtifactVerificationOptions = {
+    fromJSON(object) {
+        return {
+            signers: isSet(object.certificateIdentities)
+                ? {
+                    $case: "certificateIdentities",
+                    certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
+                }
+                : isSet(object.publicKeys)
+                    ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
+                    : undefined,
+            tlogOptions: isSet(object.tlogOptions)
+                ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
+                : undefined,
+            ctlogOptions: isSet(object.ctlogOptions)
+                ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
+                : undefined,
+            tsaOptions: isSet(object.tsaOptions)
+                ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
+                : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.signers?.$case === "certificateIdentities" &&
+            (obj.certificateIdentities = message.signers?.certificateIdentities
+                ? exports.CertificateIdentities.toJSON(message.signers?.certificateIdentities)
+                : undefined);
+        message.signers?.$case === "publicKeys" && (obj.publicKeys = message.signers?.publicKeys
+            ? exports.PublicKeyIdentities.toJSON(message.signers?.publicKeys)
+            : undefined);
+        message.tlogOptions !== undefined && (obj.tlogOptions = message.tlogOptions
+            ? exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions)
+            : undefined);
+        message.ctlogOptions !== undefined && (obj.ctlogOptions = message.ctlogOptions
+            ? exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions)
+            : undefined);
+        message.tsaOptions !== undefined && (obj.tsaOptions = message.tsaOptions
+            ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions)
+            : undefined);
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions_TlogOptions() {
+    return { threshold: 0, performOnlineVerification: false, disable: false };
+}
+exports.ArtifactVerificationOptions_TlogOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+            performOnlineVerification: isSet(object.performOnlineVerification)
+                ? Boolean(object.performOnlineVerification)
+                : false,
+            disable: isSet(object.disable) ? Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+        message.performOnlineVerification !== undefined &&
+            (obj.performOnlineVerification = message.performOnlineVerification);
+        message.disable !== undefined && (obj.disable = message.disable);
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions_CtlogOptions() {
+    return { threshold: 0, detachedSct: false, disable: false };
+}
+exports.ArtifactVerificationOptions_CtlogOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+            detachedSct: isSet(object.detachedSct) ? Boolean(object.detachedSct) : false,
+            disable: isSet(object.disable) ? Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+        message.detachedSct !== undefined && (obj.detachedSct = message.detachedSct);
+        message.disable !== undefined && (obj.disable = message.disable);
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions_TimestampAuthorityOptions() {
+    return { threshold: 0, disable: false };
+}
+exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+        message.disable !== undefined && (obj.disable = message.disable);
+        return obj;
+    },
+};
+function createBaseArtifact() {
+    return { data: undefined };
+}
+exports.Artifact = {
+    fromJSON(object) {
+        return {
+            data: isSet(object.artifactUri)
+                ? { $case: "artifactUri", artifactUri: String(object.artifactUri) }
+                : isSet(object.artifact)
+                    ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.data?.$case === "artifactUri" && (obj.artifactUri = message.data?.artifactUri);
+        message.data?.$case === "artifact" &&
+            (obj.artifact = message.data?.artifact !== undefined ? base64FromBytes(message.data?.artifact) : undefined);
+        return obj;
+    },
+};
+function createBaseInput() {
+    return {
+        artifactTrustRoot: undefined,
+        artifactVerificationOptions: undefined,
+        bundle: undefined,
+        artifact: undefined,
+    };
+}
+exports.Input = {
+    fromJSON(object) {
+        return {
+            artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
+            artifactVerificationOptions: isSet(object.artifactVerificationOptions)
+                ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
+                : undefined,
+            bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
+            artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.artifactTrustRoot !== undefined &&
+            (obj.artifactTrustRoot = message.artifactTrustRoot ? sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot) : undefined);
+        message.artifactVerificationOptions !== undefined &&
+            (obj.artifactVerificationOptions = message.artifactVerificationOptions
+                ? exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions)
+                : undefined);
+        message.bundle !== undefined && (obj.bundle = message.bundle ? sigstore_bundle_1.Bundle.toJSON(message.bundle) : undefined);
+        message.artifact !== undefined && (obj.artifact = message.artifact ? exports.Artifact.toJSON(message.artifact) : undefined);
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/index.js
new file mode 100644
index 0000000000000..eafb768c48fca
--- /dev/null
+++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/index.js
@@ -0,0 +1,37 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+__exportStar(require("./__generated__/envelope"), exports);
+__exportStar(require("./__generated__/sigstore_bundle"), exports);
+__exportStar(require("./__generated__/sigstore_common"), exports);
+__exportStar(require("./__generated__/sigstore_rekor"), exports);
+__exportStar(require("./__generated__/sigstore_trustroot"), exports);
+__exportStar(require("./__generated__/sigstore_verification"), exports);
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/package.json
new file mode 100644
index 0000000000000..450abb157f31a
--- /dev/null
+++ b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/package.json
@@ -0,0 +1,31 @@
+{
+  "name": "@sigstore/protobuf-specs",
+  "version": "0.2.1",
+  "description": "code-signing for npm packages",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "scripts": {
+    "build": "tsc"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sigstore/protobuf-specs.git"
+  },
+  "files": [
+    "dist"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "Apache-2.0",
+  "bugs": {
+    "url": "https://github.com/sigstore/protobuf-specs/issues"
+  },
+  "homepage": "https://github.com/sigstore/protobuf-specs#readme",
+  "devDependencies": {
+    "@tsconfig/node14": "^1.0.3",
+    "@types/node": "^18.14.0",
+    "typescript": "^4.9.5"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  }
+}
diff --git a/node_modules/@sigstore/bundle/package.json b/node_modules/@sigstore/bundle/package.json
new file mode 100644
index 0000000000000..2b15d08060753
--- /dev/null
+++ b/node_modules/@sigstore/bundle/package.json
@@ -0,0 +1,35 @@
+{
+  "name": "@sigstore/bundle",
+  "version": "2.0.0",
+  "description": "Sigstore bundle type",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "scripts": {
+    "clean": "shx rm -rf dist *.tsbuildinfo",
+    "build": "tsc --build",
+    "test": "jest"
+  },
+  "files": [
+    "dist",
+    "store"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "Apache-2.0",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sigstore/sigstore-js.git"
+  },
+  "bugs": {
+    "url": "https://github.com/sigstore/sigstore-js/issues"
+  },
+  "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/bundle#readme",
+  "publishConfig": {
+    "provenance": true
+  },
+  "dependencies": {
+    "@sigstore/protobuf-specs": "^0.2.1"
+  },
+  "engines": {
+    "node": "^16.14.0 || >=18.0.0"
+  }
+}
diff --git a/node_modules/@sigstore/sign/LICENSE b/node_modules/@sigstore/sign/LICENSE
new file mode 100644
index 0000000000000..e9e7c1679a09d
--- /dev/null
+++ b/node_modules/@sigstore/sign/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright 2023 The Sigstore Authors
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/node_modules/@sigstore/sign/dist/bundler/base.js b/node_modules/@sigstore/sign/dist/bundler/base.js
new file mode 100644
index 0000000000000..61d5eba4568a3
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/bundler/base.js
@@ -0,0 +1,50 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.BaseBundleBuilder = void 0;
+// BaseBundleBuilder is a base class for BundleBuilder implementations. It
+// provides a the basic wokflow for signing and witnessing an artifact.
+// Subclasses must implement the `package` method to assemble a valid bundle
+// with the generated signature and verification material.
+class BaseBundleBuilder {
+    constructor(options) {
+        this.signer = options.signer;
+        this.witnesses = options.witnesses;
+    }
+    // Executes the signing/witnessing process for the given artifact.
+    async create(artifact) {
+        const signature = await this.prepare(artifact).then((blob) => this.signer.sign(blob));
+        const bundle = await this.package(artifact, signature);
+        // Invoke all of the witnesses in parallel
+        const verificationMaterials = await Promise.all(this.witnesses.map((witness) => witness.testify(bundle.content, publicKey(signature.key))));
+        // Collect the verification material from all of the witnesses
+        const tlogEntryList = [];
+        const timestampList = [];
+        verificationMaterials.forEach(({ tlogEntries, rfc3161Timestamps }) => {
+            tlogEntryList.push(...(tlogEntries ?? []));
+            timestampList.push(...(rfc3161Timestamps ?? []));
+        });
+        // Merge the collected verification material into the bundle
+        bundle.verificationMaterial.tlogEntries = tlogEntryList;
+        bundle.verificationMaterial.timestampVerificationData = {
+            rfc3161Timestamps: timestampList,
+        };
+        return bundle;
+    }
+    // Override this function to apply any pre-signing transformations to the
+    // artifact. The returned buffer will be signed by the signer. The default
+    // implementation simply returns the artifact data.
+    async prepare(artifact) {
+        return artifact.data;
+    }
+}
+exports.BaseBundleBuilder = BaseBundleBuilder;
+// Extracts the public key from a KeyMaterial. Returns either the public key
+// or the certificate, depending on the type of key material.
+function publicKey(key) {
+    switch (key.$case) {
+        case 'publicKey':
+            return key.publicKey;
+        case 'x509Certificate':
+            return key.certificate;
+    }
+}
diff --git a/node_modules/@sigstore/sign/dist/bundler/bundle.js b/node_modules/@sigstore/sign/dist/bundler/bundle.js
new file mode 100644
index 0000000000000..f01aac252b304
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/bundler/bundle.js
@@ -0,0 +1,70 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.toDSSEBundle = exports.toMessageSignatureBundle = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const sigstore = __importStar(require("@sigstore/bundle"));
+const util_1 = require("../util");
+// Helper functions for assembling the parts of a Sigstore bundle
+// Message signature bundle - $case: 'messageSignature'
+function toMessageSignatureBundle(artifact, signature) {
+    const digest = util_1.crypto.hash(artifact.data);
+    return sigstore.toMessageSignatureBundle({
+        digest,
+        signature: signature.signature,
+        certificate: signature.key.$case === 'x509Certificate'
+            ? util_1.pem.toDER(signature.key.certificate)
+            : undefined,
+        keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined,
+    });
+}
+exports.toMessageSignatureBundle = toMessageSignatureBundle;
+// DSSE envelope bundle - $case: 'dsseEnvelope'
+function toDSSEBundle(artifact, signature) {
+    return sigstore.toDSSEBundle({
+        artifact: artifact.data,
+        artifactType: artifact.type,
+        signature: signature.signature,
+        certificate: signature.key.$case === 'x509Certificate'
+            ? util_1.pem.toDER(signature.key.certificate)
+            : undefined,
+        keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined,
+    });
+}
+exports.toDSSEBundle = toDSSEBundle;
diff --git a/node_modules/@sigstore/sign/dist/bundler/dsse.js b/node_modules/@sigstore/sign/dist/bundler/dsse.js
new file mode 100644
index 0000000000000..486d289aea38c
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/bundler/dsse.js
@@ -0,0 +1,45 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.DSSEBundleBuilder = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const util_1 = require("../util");
+const base_1 = require("./base");
+const bundle_1 = require("./bundle");
+// BundleBuilder implementation for DSSE wrapped attestations
+class DSSEBundleBuilder extends base_1.BaseBundleBuilder {
+    constructor(options) {
+        super(options);
+    }
+    // DSSE requires the artifact to be pre-encoded with the payload type
+    // before the signature is generated.
+    async prepare(artifact) {
+        const a = artifactDefaults(artifact);
+        return util_1.dsse.preAuthEncoding(a.type, a.data);
+    }
+    // Packages the artifact and signature into a DSSE bundle
+    async package(artifact, signature) {
+        return (0, bundle_1.toDSSEBundle)(artifactDefaults(artifact), signature);
+    }
+}
+exports.DSSEBundleBuilder = DSSEBundleBuilder;
+// Defaults the artifact type to an empty string if not provided
+function artifactDefaults(artifact) {
+    return {
+        ...artifact,
+        type: artifact.type ?? '',
+    };
+}
diff --git a/node_modules/@sigstore/sign/dist/bundler/index.js b/node_modules/@sigstore/sign/dist/bundler/index.js
new file mode 100644
index 0000000000000..d67c8c324a4f0
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/bundler/index.js
@@ -0,0 +1,7 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0;
+var dsse_1 = require("./dsse");
+Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return dsse_1.DSSEBundleBuilder; } });
+var message_1 = require("./message");
+Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return message_1.MessageSignatureBundleBuilder; } });
diff --git a/node_modules/@sigstore/sign/dist/bundler/message.js b/node_modules/@sigstore/sign/dist/bundler/message.js
new file mode 100644
index 0000000000000..e3991f42bab93
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/bundler/message.js
@@ -0,0 +1,30 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.MessageSignatureBundleBuilder = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const base_1 = require("./base");
+const bundle_1 = require("./bundle");
+// BundleBuilder implementation for raw message signatures
+class MessageSignatureBundleBuilder extends base_1.BaseBundleBuilder {
+    constructor(options) {
+        super(options);
+    }
+    async package(artifact, signature) {
+        return (0, bundle_1.toMessageSignatureBundle)(artifact, signature);
+    }
+}
+exports.MessageSignatureBundleBuilder = MessageSignatureBundleBuilder;
diff --git a/node_modules/@sigstore/sign/dist/error.js b/node_modules/@sigstore/sign/dist/error.js
new file mode 100644
index 0000000000000..b52ea7eef5d9b
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/error.js
@@ -0,0 +1,12 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.InternalError = void 0;
+class InternalError extends Error {
+    constructor({ code, message, cause, }) {
+        super(message);
+        this.name = this.constructor.name;
+        this.cause = cause;
+        this.code = code;
+    }
+}
+exports.InternalError = InternalError;
diff --git a/node_modules/@sigstore/sign/dist/external/error.js b/node_modules/@sigstore/sign/dist/external/error.js
new file mode 100644
index 0000000000000..d1e1c3df8a878
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/external/error.js
@@ -0,0 +1,21 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.checkStatus = exports.HTTPError = void 0;
+class HTTPError extends Error {
+    constructor(response) {
+        super(`HTTP Error: ${response.status} ${response.statusText}`);
+        this.response = response;
+        this.statusCode = response.status;
+        this.location = response.headers?.get('Location') || undefined;
+    }
+}
+exports.HTTPError = HTTPError;
+const checkStatus = (response) => {
+    if (response.ok) {
+        return response;
+    }
+    else {
+        throw new HTTPError(response);
+    }
+};
+exports.checkStatus = checkStatus;
diff --git a/node_modules/@sigstore/sign/dist/external/fulcio.js b/node_modules/@sigstore/sign/dist/external/fulcio.js
new file mode 100644
index 0000000000000..b27637c2dc570
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/external/fulcio.js
@@ -0,0 +1,51 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Fulcio = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
+const util_1 = require("../util");
+const error_1 = require("./error");
+/**
+ * Fulcio API client.
+ */
+class Fulcio {
+    constructor(options) {
+        this.fetch = make_fetch_happen_1.default.defaults({
+            retry: options.retry,
+            timeout: options.timeout,
+            headers: {
+                'Content-Type': 'application/json',
+                'User-Agent': util_1.ua.getUserAgent(),
+            },
+        });
+        this.baseUrl = options.baseURL;
+    }
+    async createSigningCertificate(request) {
+        const url = `${this.baseUrl}/api/v2/signingCert`;
+        const response = await this.fetch(url, {
+            method: 'POST',
+            body: JSON.stringify(request),
+        });
+        (0, error_1.checkStatus)(response);
+        const data = await response.json();
+        return data;
+    }
+}
+exports.Fulcio = Fulcio;
diff --git a/node_modules/@sigstore/sign/dist/external/rekor.js b/node_modules/@sigstore/sign/dist/external/rekor.js
new file mode 100644
index 0000000000000..9b4e66b656251
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/external/rekor.js
@@ -0,0 +1,115 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Rekor = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
+const util_1 = require("../util");
+const error_1 = require("./error");
+/**
+ * Rekor API client.
+ */
+class Rekor {
+    constructor(options) {
+        this.fetch = make_fetch_happen_1.default.defaults({
+            retry: options.retry,
+            timeout: options.timeout,
+            headers: {
+                Accept: 'application/json',
+                'User-Agent': util_1.ua.getUserAgent(),
+            },
+        });
+        this.baseUrl = options.baseURL;
+    }
+    /**
+     * Create a new entry in the Rekor log.
+     * @param propsedEntry {ProposedEntry} Data to create a new entry
+     * @returns {Promise} The created entry
+     */
+    async createEntry(propsedEntry) {
+        const url = `${this.baseUrl}/api/v1/log/entries`;
+        const response = await this.fetch(url, {
+            method: 'POST',
+            headers: { 'Content-Type': 'application/json' },
+            body: JSON.stringify(propsedEntry),
+        });
+        (0, error_1.checkStatus)(response);
+        const data = await response.json();
+        return entryFromResponse(data);
+    }
+    /**
+     * Get an entry from the Rekor log.
+     * @param uuid {string} The UUID of the entry to retrieve
+     * @returns {Promise} The retrieved entry
+     */
+    async getEntry(uuid) {
+        const url = `${this.baseUrl}/api/v1/log/entries/${uuid}`;
+        const response = await this.fetch(url);
+        (0, error_1.checkStatus)(response);
+        const data = await response.json();
+        return entryFromResponse(data);
+    }
+    /**
+     * Search the Rekor log index for entries matching the given query.
+     * @param opts {SearchIndex} Options to search the Rekor log
+     * @returns {Promise} UUIDs of matching entries
+     */
+    async searchIndex(opts) {
+        const url = `${this.baseUrl}/api/v1/index/retrieve`;
+        const response = await this.fetch(url, {
+            method: 'POST',
+            body: JSON.stringify(opts),
+            headers: { 'Content-Type': 'application/json' },
+        });
+        (0, error_1.checkStatus)(response);
+        const data = await response.json();
+        return data;
+    }
+    /**
+     * Search the Rekor logs for matching the given query.
+     * @param opts {SearchLogQuery} Query to search the Rekor log
+     * @returns {Promise} List of matching entries
+     */
+    async searchLog(opts) {
+        const url = `${this.baseUrl}/api/v1/log/entries/retrieve`;
+        const response = await this.fetch(url, {
+            method: 'POST',
+            body: JSON.stringify(opts),
+            headers: { 'Content-Type': 'application/json' },
+        });
+        (0, error_1.checkStatus)(response);
+        const rawData = await response.json();
+        const data = rawData.map((d) => entryFromResponse(d));
+        return data;
+    }
+}
+exports.Rekor = Rekor;
+// Unpack the response from the Rekor API into a more convenient format.
+function entryFromResponse(data) {
+    const entries = Object.entries(data);
+    if (entries.length != 1) {
+        throw new Error('Received multiple entries in Rekor response');
+    }
+    // Grab UUID and entry data from the response
+    const [uuid, entry] = entries[0];
+    return {
+        ...entry,
+        uuid,
+    };
+}
diff --git a/node_modules/@sigstore/sign/dist/external/tsa.js b/node_modules/@sigstore/sign/dist/external/tsa.js
new file mode 100644
index 0000000000000..5277d7d3f9707
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/external/tsa.js
@@ -0,0 +1,47 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TimestampAuthority = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
+const util_1 = require("../util");
+const error_1 = require("./error");
+class TimestampAuthority {
+    constructor(options) {
+        this.fetch = make_fetch_happen_1.default.defaults({
+            retry: options.retry,
+            timeout: options.timeout,
+            headers: {
+                'Content-Type': 'application/json',
+                'User-Agent': util_1.ua.getUserAgent(),
+            },
+        });
+        this.baseUrl = options.baseURL;
+    }
+    async createTimestamp(request) {
+        const url = `${this.baseUrl}/api/v1/timestamp`;
+        const response = await this.fetch(url, {
+            method: 'POST',
+            body: JSON.stringify(request),
+        });
+        (0, error_1.checkStatus)(response);
+        return response.buffer();
+    }
+}
+exports.TimestampAuthority = TimestampAuthority;
diff --git a/node_modules/@sigstore/sign/dist/identity/ci.js b/node_modules/@sigstore/sign/dist/identity/ci.js
new file mode 100644
index 0000000000000..d79133952b605
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/identity/ci.js
@@ -0,0 +1,73 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CIContextProvider = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
+// Collection of all the CI-specific providers we have implemented
+const providers = [getGHAToken, getEnv];
+/**
+ * CIContextProvider is a composite identity provider which will iterate
+ * over all of the CI-specific providers and return the token from the first
+ * one that resolves.
+ */
+class CIContextProvider {
+    /* istanbul ignore next */
+    constructor(audience = 'sigstore') {
+        this.audience = audience;
+    }
+    // Invoke all registered ProviderFuncs and return the value of whichever one
+    // resolves first.
+    async getToken() {
+        return Promise.any(providers.map((getToken) => getToken(this.audience))).catch(() => Promise.reject('CI: no tokens available'));
+    }
+}
+exports.CIContextProvider = CIContextProvider;
+/**
+ * getGHAToken can retrieve an OIDC token when running in a GitHub Actions
+ * workflow
+ */
+async function getGHAToken(audience) {
+    // Check to see if we're running in GitHub Actions
+    if (!process.env.ACTIONS_ID_TOKEN_REQUEST_URL ||
+        !process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN) {
+        return Promise.reject('no token available');
+    }
+    // Construct URL to request token w/ appropriate audience
+    const url = new URL(process.env.ACTIONS_ID_TOKEN_REQUEST_URL);
+    url.searchParams.append('audience', audience);
+    const response = await (0, make_fetch_happen_1.default)(url.href, {
+        retry: 2,
+        headers: {
+            Accept: 'application/json',
+            Authorization: `Bearer ${process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN}`,
+        },
+    });
+    return response.json().then((data) => data.value);
+}
+/**
+ * getEnv can retrieve an OIDC token from an environment variable.
+ * This matches the behavior of https://github.com/sigstore/cosign/tree/main/pkg/providers/envvar
+ */
+async function getEnv() {
+    if (!process.env.SIGSTORE_ID_TOKEN) {
+        return Promise.reject('no token available');
+    }
+    return process.env.SIGSTORE_ID_TOKEN;
+}
diff --git a/node_modules/@sigstore/sign/dist/identity/index.js b/node_modules/@sigstore/sign/dist/identity/index.js
new file mode 100644
index 0000000000000..1c1223b443fab
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/identity/index.js
@@ -0,0 +1,20 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CIContextProvider = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var ci_1 = require("./ci");
+Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return ci_1.CIContextProvider; } });
diff --git a/node_modules/@sigstore/sign/dist/identity/provider.js b/node_modules/@sigstore/sign/dist/identity/provider.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/identity/provider.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/sign/dist/index.js b/node_modules/@sigstore/sign/dist/index.js
new file mode 100644
index 0000000000000..f6d97c673ec62
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/index.js
@@ -0,0 +1,15 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TSAWitness = exports.RekorWitness = exports.FulcioSigner = exports.CIContextProvider = exports.InternalError = exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0;
+var bundler_1 = require("./bundler");
+Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return bundler_1.DSSEBundleBuilder; } });
+Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return bundler_1.MessageSignatureBundleBuilder; } });
+var error_1 = require("./error");
+Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return error_1.InternalError; } });
+var identity_1 = require("./identity");
+Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return identity_1.CIContextProvider; } });
+var signer_1 = require("./signer");
+Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return signer_1.FulcioSigner; } });
+var witness_1 = require("./witness");
+Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return witness_1.RekorWitness; } });
+Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return witness_1.TSAWitness; } });
diff --git a/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js b/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
new file mode 100644
index 0000000000000..9c0af0e914493
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
@@ -0,0 +1,64 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CAClient = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+const fulcio_1 = require("../../external/fulcio");
+class CAClient {
+    constructor(options) {
+        this.fulcio = new fulcio_1.Fulcio({
+            baseURL: options.fulcioBaseURL,
+            retry: options.retry,
+            timeout: options.timeout,
+        });
+    }
+    async createSigningCertificate(identityToken, publicKey, challenge) {
+        const request = toCertificateRequest(identityToken, publicKey, challenge);
+        try {
+            const resp = await this.fulcio.createSigningCertificate(request);
+            // Account for the fact that the response may contain either a
+            // signedCertificateEmbeddedSct or a signedCertificateDetachedSct.
+            const cert = resp.signedCertificateEmbeddedSct
+                ? resp.signedCertificateEmbeddedSct
+                : resp.signedCertificateDetachedSct;
+            // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
+            return cert.chain.certificates;
+        }
+        catch (err) {
+            throw new error_1.InternalError({
+                code: 'CA_CREATE_SIGNING_CERTIFICATE_ERROR',
+                message: 'error creating signing certificate',
+                cause: err,
+            });
+        }
+    }
+}
+exports.CAClient = CAClient;
+function toCertificateRequest(identityToken, publicKey, challenge) {
+    return {
+        credentials: {
+            oidcIdentityToken: identityToken,
+        },
+        publicKeyRequest: {
+            publicKey: {
+                algorithm: 'ECDSA',
+                content: publicKey,
+            },
+            proofOfPossession: challenge.toString('base64'),
+        },
+    };
+}
diff --git a/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js b/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js
new file mode 100644
index 0000000000000..481aa5c3579a2
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js
@@ -0,0 +1,45 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.EphemeralSigner = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const crypto_1 = __importDefault(require("crypto"));
+const EC_KEYPAIR_TYPE = 'ec';
+const P256_CURVE = 'P-256';
+// Signer implementation which uses an ephemeral keypair to sign artifacts.
+// The private key lives only in memory and is tied to the lifetime of the
+// EphemeralSigner instance.
+class EphemeralSigner {
+    constructor() {
+        this.keypair = crypto_1.default.generateKeyPairSync(EC_KEYPAIR_TYPE, {
+            namedCurve: P256_CURVE,
+        });
+    }
+    async sign(data) {
+        const signature = crypto_1.default.sign(null, data, this.keypair.privateKey);
+        const publicKey = this.keypair.publicKey
+            .export({ format: 'pem', type: 'spki' })
+            .toString('ascii');
+        return {
+            signature: signature,
+            key: { $case: 'publicKey', publicKey },
+        };
+    }
+}
+exports.EphemeralSigner = EphemeralSigner;
diff --git a/node_modules/@sigstore/sign/dist/signer/fulcio/index.js b/node_modules/@sigstore/sign/dist/signer/fulcio/index.js
new file mode 100644
index 0000000000000..b2eff7e1b981f
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/signer/fulcio/index.js
@@ -0,0 +1,73 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.FulcioSigner = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+const util_1 = require("../../util");
+const ca_1 = require("./ca");
+const ephemeral_1 = require("./ephemeral");
+// Signer implementation which can be used to decorate another signer
+// with a Fulcio-issued signing certificate for the signer's public key.
+// Must be instantiated with an identity provider which can provide a JWT
+// which represents the identity to be bound to the signing certificate.
+class FulcioSigner {
+    constructor(options) {
+        this.ca = new ca_1.CAClient(options);
+        this.identityProvider = options.identityProvider;
+        this.keyHolder = options.keyHolder || new ephemeral_1.EphemeralSigner();
+    }
+    async sign(data) {
+        // Retrieve identity token from the supplied identity provider
+        const identityToken = await this.getIdentityToken();
+        // Extract challenge claim from OIDC token
+        const subject = util_1.oidc.extractJWTSubject(identityToken);
+        // Construct challenge value by signing the subject claim
+        const challenge = await this.keyHolder.sign(Buffer.from(subject));
+        if (challenge.key.$case !== 'publicKey') {
+            throw new error_1.InternalError({
+                code: 'CA_CREATE_SIGNING_CERTIFICATE_ERROR',
+                message: 'unexpected format for signing key',
+            });
+        }
+        // Create signing certificate
+        const certificates = await this.ca.createSigningCertificate(identityToken, challenge.key.publicKey, challenge.signature);
+        // Generate artifact signature
+        const signature = await this.keyHolder.sign(data);
+        // Specifically returning only the first certificate in the chain
+        // as the key.
+        return {
+            signature: signature.signature,
+            key: {
+                $case: 'x509Certificate',
+                certificate: certificates[0],
+            },
+        };
+    }
+    async getIdentityToken() {
+        try {
+            return await this.identityProvider.getToken();
+        }
+        catch (err) {
+            throw new error_1.InternalError({
+                code: 'IDENTITY_TOKEN_READ_ERROR',
+                message: 'error retrieving identity token',
+                cause: err,
+            });
+        }
+    }
+}
+exports.FulcioSigner = FulcioSigner;
diff --git a/node_modules/@sigstore/sign/dist/signer/index.js b/node_modules/@sigstore/sign/dist/signer/index.js
new file mode 100644
index 0000000000000..4f64adf41ed8d
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/signer/index.js
@@ -0,0 +1,20 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.FulcioSigner = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var fulcio_1 = require("./fulcio");
+Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return fulcio_1.FulcioSigner; } });
diff --git a/node_modules/@sigstore/sign/dist/signer/signer.js b/node_modules/@sigstore/sign/dist/signer/signer.js
new file mode 100644
index 0000000000000..b92c54183375d
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/signer/signer.js
@@ -0,0 +1,17 @@
+"use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/sign/dist/types/fetch.js b/node_modules/@sigstore/sign/dist/types/fetch.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/types/fetch.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/sign/dist/util/crypto.js b/node_modules/@sigstore/sign/dist/util/crypto.js
new file mode 100644
index 0000000000000..11aad2fb6ff8b
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/util/crypto.js
@@ -0,0 +1,27 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.hash = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const crypto_1 = __importDefault(require("crypto"));
+const SHA256_ALGORITHM = 'sha256';
+function hash(data, algorithm = SHA256_ALGORITHM) {
+    return crypto_1.default.createHash(algorithm).update(data).digest();
+}
+exports.hash = hash;
diff --git a/node_modules/@sigstore/sign/dist/util/dsse.js b/node_modules/@sigstore/sign/dist/util/dsse.js
new file mode 100644
index 0000000000000..befcdbdc14ec8
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/util/dsse.js
@@ -0,0 +1,25 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.preAuthEncoding = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const PAE_PREFIX = 'DSSEv1';
+// DSSE Pre-Authentication Encoding
+function preAuthEncoding(payloadType, payload) {
+    const prefix = Buffer.from(`${PAE_PREFIX} ${payloadType.length} ${payloadType} ${payload.length} `, 'ascii');
+    return Buffer.concat([prefix, payload]);
+}
+exports.preAuthEncoding = preAuthEncoding;
diff --git a/node_modules/@sigstore/sign/dist/util/encoding.js b/node_modules/@sigstore/sign/dist/util/encoding.js
new file mode 100644
index 0000000000000..b020ac4d6ecd4
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/util/encoding.js
@@ -0,0 +1,28 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.base64Decode = exports.base64Encode = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const BASE64_ENCODING = 'base64';
+const UTF8_ENCODING = 'utf-8';
+function base64Encode(str) {
+    return Buffer.from(str, UTF8_ENCODING).toString(BASE64_ENCODING);
+}
+exports.base64Encode = base64Encode;
+function base64Decode(str) {
+    return Buffer.from(str, BASE64_ENCODING).toString(UTF8_ENCODING);
+}
+exports.base64Decode = base64Decode;
diff --git a/node_modules/@sigstore/sign/dist/util/index.js b/node_modules/@sigstore/sign/dist/util/index.js
new file mode 100644
index 0000000000000..567e5dbf6e04c
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/util/index.js
@@ -0,0 +1,48 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ua = exports.pem = exports.oidc = exports.json = exports.encoding = exports.dsse = exports.crypto = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+exports.crypto = __importStar(require("./crypto"));
+exports.dsse = __importStar(require("./dsse"));
+exports.encoding = __importStar(require("./encoding"));
+exports.json = __importStar(require("./json"));
+exports.oidc = __importStar(require("./oidc"));
+exports.pem = __importStar(require("./pem"));
+exports.ua = __importStar(require("./ua"));
diff --git a/node_modules/@sigstore/sign/dist/util/json.js b/node_modules/@sigstore/sign/dist/util/json.js
new file mode 100644
index 0000000000000..69176ad731eb7
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/util/json.js
@@ -0,0 +1,61 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.canonicalize = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+// JSON canonicalization per https://github.com/cyberphone/json-canonicalization
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function canonicalize(object) {
+    let buffer = '';
+    if (object === null || typeof object !== 'object' || object.toJSON != null) {
+        // Primitives or toJSONable objects
+        buffer += JSON.stringify(object);
+    }
+    else if (Array.isArray(object)) {
+        // Array - maintain element order
+        buffer += '[';
+        let first = true;
+        object.forEach((element) => {
+            if (!first) {
+                buffer += ',';
+            }
+            first = false;
+            // recursive call
+            buffer += canonicalize(element);
+        });
+        buffer += ']';
+    }
+    else {
+        // Object - Sort properties before serializing
+        buffer += '{';
+        let first = true;
+        Object.keys(object)
+            .sort()
+            .forEach((property) => {
+            if (!first) {
+                buffer += ',';
+            }
+            first = false;
+            buffer += JSON.stringify(property);
+            buffer += ':';
+            // recursive call
+            buffer += canonicalize(object[property]);
+        });
+        buffer += '}';
+    }
+    return buffer;
+}
+exports.canonicalize = canonicalize;
diff --git a/node_modules/@sigstore/sign/dist/util/oidc.js b/node_modules/@sigstore/sign/dist/util/oidc.js
new file mode 100644
index 0000000000000..8b49f3bbe8440
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/util/oidc.js
@@ -0,0 +1,54 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.extractJWTSubject = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const enc = __importStar(require("./encoding"));
+function extractJWTSubject(jwt) {
+    const parts = jwt.split('.', 3);
+    const payload = JSON.parse(enc.base64Decode(parts[1]));
+    switch (payload.iss) {
+        case 'https://accounts.google.com':
+        case 'https://oauth2.sigstore.dev/auth':
+            return payload.email;
+        default:
+            return payload.sub;
+    }
+}
+exports.extractJWTSubject = extractJWTSubject;
diff --git a/node_modules/@sigstore/sign/dist/util/pem.js b/node_modules/@sigstore/sign/dist/util/pem.js
new file mode 100644
index 0000000000000..36eeebd2052f5
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/util/pem.js
@@ -0,0 +1,27 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.toDER = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const PEM_HEADER = /-----BEGIN (.*)-----/;
+const PEM_FOOTER = /-----END (.*)-----/;
+function toDER(certificate) {
+    const lines = certificate
+        .split('\n')
+        .map((line) => line.match(PEM_HEADER) || line.match(PEM_FOOTER) ? '' : line);
+    return Buffer.from(lines.join(''), 'base64');
+}
+exports.toDER = toDER;
diff --git a/node_modules/@sigstore/sign/dist/util/ua.js b/node_modules/@sigstore/sign/dist/util/ua.js
new file mode 100644
index 0000000000000..c142330eb8338
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/util/ua.js
@@ -0,0 +1,33 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.getUserAgent = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const os_1 = __importDefault(require("os"));
+// Format User-Agent:  /  ()
+// source: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent
+const getUserAgent = () => {
+    // eslint-disable-next-line @typescript-eslint/no-var-requires
+    const packageVersion = require('../../package.json').version;
+    const nodeVersion = process.version;
+    const platformName = os_1.default.platform();
+    const archName = os_1.default.arch();
+    return `sigstore-js/${packageVersion} (Node ${nodeVersion}) (${platformName}/${archName})`;
+};
+exports.getUserAgent = getUserAgent;
diff --git a/node_modules/@sigstore/sign/dist/witness/index.js b/node_modules/@sigstore/sign/dist/witness/index.js
new file mode 100644
index 0000000000000..7218ea41bce6d
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/witness/index.js
@@ -0,0 +1,22 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TSAWitness = exports.RekorWitness = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var tlog_1 = require("./tlog");
+Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return tlog_1.RekorWitness; } });
+var tsa_1 = require("./tsa");
+Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return tsa_1.TSAWitness; } });
diff --git a/node_modules/@sigstore/sign/dist/witness/tlog/client.js b/node_modules/@sigstore/sign/dist/witness/tlog/client.js
new file mode 100644
index 0000000000000..3c1b5212e4265
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/witness/tlog/client.js
@@ -0,0 +1,69 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TLogClient = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+const error_2 = require("../../external/error");
+const rekor_1 = require("../../external/rekor");
+class TLogClient {
+    constructor(options) {
+        this.fetchOnConflict = options.fetchOnConflict ?? false;
+        this.rekor = new rekor_1.Rekor({
+            baseURL: options.rekorBaseURL,
+            retry: options.retry,
+            timeout: options.timeout,
+        });
+    }
+    async createEntry(proposedEntry) {
+        let entry;
+        try {
+            entry = await this.rekor.createEntry(proposedEntry);
+        }
+        catch (err) {
+            // If the entry already exists, fetch it (if enabled)
+            if (entryExistsError(err) && this.fetchOnConflict) {
+                // Grab the UUID of the existing entry from the location header
+                /* istanbul ignore next */
+                const uuid = err.location.split('/').pop() || '';
+                try {
+                    entry = await this.rekor.getEntry(uuid);
+                }
+                catch (err) {
+                    throw new error_1.InternalError({
+                        code: 'TLOG_FETCH_ENTRY_ERROR',
+                        message: 'error fetching tlog entry',
+                        cause: err,
+                    });
+                }
+            }
+            else {
+                throw new error_1.InternalError({
+                    code: 'TLOG_CREATE_ENTRY_ERROR',
+                    message: 'error creating tlog entry',
+                    cause: err,
+                });
+            }
+        }
+        return entry;
+    }
+}
+exports.TLogClient = TLogClient;
+function entryExistsError(value) {
+    return (value instanceof error_2.HTTPError &&
+        value.statusCode === 409 &&
+        value.location !== undefined);
+}
diff --git a/node_modules/@sigstore/sign/dist/witness/tlog/entry.js b/node_modules/@sigstore/sign/dist/witness/tlog/entry.js
new file mode 100644
index 0000000000000..c237523a2c9b2
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/witness/tlog/entry.js
@@ -0,0 +1,136 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.toProposedEntry = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const bundle_1 = require("@sigstore/bundle");
+const util_1 = require("../../util");
+function toProposedEntry(content, publicKey, 
+// TODO: Remove this parameter once have completely switched to 'dsse' entries
+entryType = 'intoto') {
+    switch (content.$case) {
+        case 'dsseEnvelope':
+            // TODO: Remove this conditional once have completely switched to 'dsse' entries
+            if (entryType === 'dsse') {
+                return toProposedDSSEEntry(content.dsseEnvelope, publicKey);
+            }
+            return toProposedIntotoEntry(content.dsseEnvelope, publicKey);
+        case 'messageSignature':
+            return toProposedHashedRekordEntry(content.messageSignature, publicKey);
+    }
+}
+exports.toProposedEntry = toProposedEntry;
+// Returns a properly formatted Rekor "hashedrekord" entry for the given digest
+// and signature
+function toProposedHashedRekordEntry(messageSignature, publicKey) {
+    const hexDigest = messageSignature.messageDigest.digest.toString('hex');
+    const b64Signature = messageSignature.signature.toString('base64');
+    const b64Key = util_1.encoding.base64Encode(publicKey);
+    return {
+        apiVersion: '0.0.1',
+        kind: 'hashedrekord',
+        spec: {
+            data: {
+                hash: {
+                    algorithm: 'sha256',
+                    value: hexDigest,
+                },
+            },
+            signature: {
+                content: b64Signature,
+                publicKey: {
+                    content: b64Key,
+                },
+            },
+        },
+    };
+}
+// Returns a properly formatted Rekor "dsse" entry for the given DSSE envelope
+// and signature
+function toProposedDSSEEntry(envelope, publicKey) {
+    const envelopeJSON = JSON.stringify((0, bundle_1.envelopeToJSON)(envelope));
+    const encodedKey = util_1.encoding.base64Encode(publicKey);
+    return {
+        apiVersion: '0.0.1',
+        kind: 'dsse',
+        spec: {
+            proposedContent: {
+                envelope: envelopeJSON,
+                verifiers: [encodedKey],
+            },
+        },
+    };
+}
+// Returns a properly formatted Rekor "intoto" entry for the given DSSE
+// envelope and signature
+function toProposedIntotoEntry(envelope, publicKey) {
+    // Calculate the value for the payloadHash field in the Rekor entry
+    const payloadHash = util_1.crypto.hash(envelope.payload).toString('hex');
+    // Calculate the value for the hash field in the Rekor entry
+    const envelopeHash = calculateDSSEHash(envelope, publicKey);
+    // Collect values for re-creating the DSSE envelope.
+    // Double-encode payload and signature cause that's what Rekor expects
+    const payload = util_1.encoding.base64Encode(envelope.payload.toString('base64'));
+    const sig = util_1.encoding.base64Encode(envelope.signatures[0].sig.toString('base64'));
+    const keyid = envelope.signatures[0].keyid;
+    const encodedKey = util_1.encoding.base64Encode(publicKey);
+    // Create the envelope portion of the entry. Note the inclusion of the
+    // publicKey in the signature struct is not a standard part of a DSSE
+    // envelope, but is required by Rekor.
+    const dsse = {
+        payloadType: envelope.payloadType,
+        payload: payload,
+        signatures: [{ sig, publicKey: encodedKey }],
+    };
+    // If the keyid is an empty string, Rekor seems to remove it altogether. We
+    // need to do the same here so that we can properly recreate the entry for
+    // verification.
+    if (keyid.length > 0) {
+        dsse.signatures[0].keyid = keyid;
+    }
+    return {
+        apiVersion: '0.0.2',
+        kind: 'intoto',
+        spec: {
+            content: {
+                envelope: dsse,
+                hash: { algorithm: 'sha256', value: envelopeHash },
+                payloadHash: { algorithm: 'sha256', value: payloadHash },
+            },
+        },
+    };
+}
+// Calculates the hash of a DSSE envelope for inclusion in a Rekor entry.
+// There is no standard way to do this, so the scheme we're using as as
+// follows:
+//  * payload is base64 encoded
+//  * signature is base64 encoded (only the first signature is used)
+//  * keyid is included ONLY if it is NOT an empty string
+//  * The resulting JSON is canonicalized and hashed to a hex string
+function calculateDSSEHash(envelope, publicKey) {
+    const dsse = {
+        payloadType: envelope.payloadType,
+        payload: envelope.payload.toString('base64'),
+        signatures: [
+            { sig: envelope.signatures[0].sig.toString('base64'), publicKey },
+        ],
+    };
+    // If the keyid is an empty string, Rekor seems to remove it altogether.
+    if (envelope.signatures[0].keyid.length > 0) {
+        dsse.signatures[0].keyid = envelope.signatures[0].keyid;
+    }
+    return util_1.crypto.hash(util_1.json.canonicalize(dsse)).toString('hex');
+}
diff --git a/node_modules/@sigstore/sign/dist/witness/tlog/index.js b/node_modules/@sigstore/sign/dist/witness/tlog/index.js
new file mode 100644
index 0000000000000..7d5487c2cb3c6
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/witness/tlog/index.js
@@ -0,0 +1,77 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.RekorWitness = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const util_1 = require("../../util");
+const client_1 = require("./client");
+const entry_1 = require("./entry");
+class RekorWitness {
+    constructor(options) {
+        this.tlog = new client_1.TLogClient(options);
+    }
+    async testify(content, publicKey) {
+        const proposedEntry = (0, entry_1.toProposedEntry)(content, publicKey);
+        const entry = await this.tlog.createEntry(proposedEntry);
+        return toTransparencyLogEntry(entry);
+    }
+}
+exports.RekorWitness = RekorWitness;
+function toTransparencyLogEntry(entry) {
+    const logID = Buffer.from(entry.logID, 'hex');
+    // Parse entry body so we can extract the kind and version.
+    const bodyJSON = util_1.encoding.base64Decode(entry.body);
+    const entryBody = JSON.parse(bodyJSON);
+    const promise = entry?.verification?.signedEntryTimestamp
+        ? inclusionPromise(entry.verification.signedEntryTimestamp)
+        : undefined;
+    const proof = entry?.verification?.inclusionProof
+        ? inclusionProof(entry.verification.inclusionProof)
+        : undefined;
+    const tlogEntry = {
+        logIndex: entry.logIndex.toString(),
+        logId: {
+            keyId: logID,
+        },
+        integratedTime: entry.integratedTime.toString(),
+        kindVersion: {
+            kind: entryBody.kind,
+            version: entryBody.apiVersion,
+        },
+        inclusionPromise: promise,
+        inclusionProof: proof,
+        canonicalizedBody: Buffer.from(entry.body, 'base64'),
+    };
+    return {
+        tlogEntries: [tlogEntry],
+    };
+}
+function inclusionPromise(promise) {
+    return {
+        signedEntryTimestamp: Buffer.from(promise, 'base64'),
+    };
+}
+function inclusionProof(proof) {
+    return {
+        logIndex: proof.logIndex.toString(),
+        treeSize: proof.treeSize.toString(),
+        rootHash: Buffer.from(proof.rootHash, 'hex'),
+        hashes: proof.hashes.map((h) => Buffer.from(h, 'hex')),
+        checkpoint: {
+            envelope: proof.checkpoint,
+        },
+    };
+}
diff --git a/node_modules/@sigstore/sign/dist/witness/tsa/client.js b/node_modules/@sigstore/sign/dist/witness/tsa/client.js
new file mode 100644
index 0000000000000..d2a7610401c4e
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/witness/tsa/client.js
@@ -0,0 +1,47 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TSAClient = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+const tsa_1 = require("../../external/tsa");
+const util_1 = require("../../util");
+class TSAClient {
+    constructor(options) {
+        this.tsa = new tsa_1.TimestampAuthority({
+            baseURL: options.tsaBaseURL,
+            retry: options.retry,
+            timeout: options.timeout,
+        });
+    }
+    async createTimestamp(signature) {
+        const request = {
+            artifactHash: util_1.crypto.hash(signature).toString('base64'),
+            hashAlgorithm: 'sha256',
+        };
+        try {
+            return await this.tsa.createTimestamp(request);
+        }
+        catch (err) {
+            throw new error_1.InternalError({
+                code: 'TSA_CREATE_TIMESTAMP_ERROR',
+                message: 'error creating timestamp',
+                cause: err,
+            });
+        }
+    }
+}
+exports.TSAClient = TSAClient;
diff --git a/node_modules/@sigstore/sign/dist/witness/tsa/index.js b/node_modules/@sigstore/sign/dist/witness/tsa/index.js
new file mode 100644
index 0000000000000..d4f5c7c859d10
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/witness/tsa/index.js
@@ -0,0 +1,44 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TSAWitness = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const client_1 = require("./client");
+class TSAWitness {
+    constructor(options) {
+        this.tsa = new client_1.TSAClient({
+            tsaBaseURL: options.tsaBaseURL,
+            retry: options.retry,
+            timeout: options.timeout,
+        });
+    }
+    async testify(content) {
+        const signature = extractSignature(content);
+        const timestamp = await this.tsa.createTimestamp(signature);
+        return {
+            rfc3161Timestamps: [{ signedTimestamp: timestamp }],
+        };
+    }
+}
+exports.TSAWitness = TSAWitness;
+function extractSignature(content) {
+    switch (content.$case) {
+        case 'dsseEnvelope':
+            return content.dsseEnvelope.signatures[0].sig;
+        case 'messageSignature':
+            return content.messageSignature.signature;
+    }
+}
diff --git a/node_modules/@sigstore/sign/dist/witness/witness.js b/node_modules/@sigstore/sign/dist/witness/witness.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/witness/witness.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/LICENSE b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/LICENSE
new file mode 100644
index 0000000000000..e9e7c1679a09d
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright 2023 The Sigstore Authors
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
new file mode 100644
index 0000000000000..0c367a8384454
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
@@ -0,0 +1,89 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = exports.Envelope = void 0;
+function createBaseEnvelope() {
+    return { payload: Buffer.alloc(0), payloadType: "", signatures: [] };
+}
+exports.Envelope = {
+    fromJSON(object) {
+        return {
+            payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
+            payloadType: isSet(object.payloadType) ? String(object.payloadType) : "",
+            signatures: Array.isArray(object?.signatures) ? object.signatures.map((e) => exports.Signature.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.payload !== undefined &&
+            (obj.payload = base64FromBytes(message.payload !== undefined ? message.payload : Buffer.alloc(0)));
+        message.payloadType !== undefined && (obj.payloadType = message.payloadType);
+        if (message.signatures) {
+            obj.signatures = message.signatures.map((e) => e ? exports.Signature.toJSON(e) : undefined);
+        }
+        else {
+            obj.signatures = [];
+        }
+        return obj;
+    },
+};
+function createBaseSignature() {
+    return { sig: Buffer.alloc(0), keyid: "" };
+}
+exports.Signature = {
+    fromJSON(object) {
+        return {
+            sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
+            keyid: isSet(object.keyid) ? String(object.keyid) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.sig !== undefined && (obj.sig = base64FromBytes(message.sig !== undefined ? message.sig : Buffer.alloc(0)));
+        message.keyid !== undefined && (obj.keyid = message.keyid);
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
new file mode 100644
index 0000000000000..073093b8371a8
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
@@ -0,0 +1,185 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
+/* eslint-disable */
+const any_1 = require("./google/protobuf/any");
+const timestamp_1 = require("./google/protobuf/timestamp");
+function createBaseCloudEvent() {
+    return { id: "", source: "", specVersion: "", type: "", attributes: {}, data: undefined };
+}
+exports.CloudEvent = {
+    fromJSON(object) {
+        return {
+            id: isSet(object.id) ? String(object.id) : "",
+            source: isSet(object.source) ? String(object.source) : "",
+            specVersion: isSet(object.specVersion) ? String(object.specVersion) : "",
+            type: isSet(object.type) ? String(object.type) : "",
+            attributes: isObject(object.attributes)
+                ? Object.entries(object.attributes).reduce((acc, [key, value]) => {
+                    acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
+                    return acc;
+                }, {})
+                : {},
+            data: isSet(object.binaryData)
+                ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
+                : isSet(object.textData)
+                    ? { $case: "textData", textData: String(object.textData) }
+                    : isSet(object.protoData)
+                        ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
+                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.id !== undefined && (obj.id = message.id);
+        message.source !== undefined && (obj.source = message.source);
+        message.specVersion !== undefined && (obj.specVersion = message.specVersion);
+        message.type !== undefined && (obj.type = message.type);
+        obj.attributes = {};
+        if (message.attributes) {
+            Object.entries(message.attributes).forEach(([k, v]) => {
+                obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
+            });
+        }
+        message.data?.$case === "binaryData" &&
+            (obj.binaryData = message.data?.binaryData !== undefined ? base64FromBytes(message.data?.binaryData) : undefined);
+        message.data?.$case === "textData" && (obj.textData = message.data?.textData);
+        message.data?.$case === "protoData" &&
+            (obj.protoData = message.data?.protoData ? any_1.Any.toJSON(message.data?.protoData) : undefined);
+        return obj;
+    },
+};
+function createBaseCloudEvent_AttributesEntry() {
+    return { key: "", value: undefined };
+}
+exports.CloudEvent_AttributesEntry = {
+    fromJSON(object) {
+        return {
+            key: isSet(object.key) ? String(object.key) : "",
+            value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.key !== undefined && (obj.key = message.key);
+        message.value !== undefined &&
+            (obj.value = message.value ? exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value) : undefined);
+        return obj;
+    },
+};
+function createBaseCloudEvent_CloudEventAttributeValue() {
+    return { attr: undefined };
+}
+exports.CloudEvent_CloudEventAttributeValue = {
+    fromJSON(object) {
+        return {
+            attr: isSet(object.ceBoolean)
+                ? { $case: "ceBoolean", ceBoolean: Boolean(object.ceBoolean) }
+                : isSet(object.ceInteger)
+                    ? { $case: "ceInteger", ceInteger: Number(object.ceInteger) }
+                    : isSet(object.ceString)
+                        ? { $case: "ceString", ceString: String(object.ceString) }
+                        : isSet(object.ceBytes)
+                            ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
+                            : isSet(object.ceUri)
+                                ? { $case: "ceUri", ceUri: String(object.ceUri) }
+                                : isSet(object.ceUriRef)
+                                    ? { $case: "ceUriRef", ceUriRef: String(object.ceUriRef) }
+                                    : isSet(object.ceTimestamp)
+                                        ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
+                                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.attr?.$case === "ceBoolean" && (obj.ceBoolean = message.attr?.ceBoolean);
+        message.attr?.$case === "ceInteger" && (obj.ceInteger = Math.round(message.attr?.ceInteger));
+        message.attr?.$case === "ceString" && (obj.ceString = message.attr?.ceString);
+        message.attr?.$case === "ceBytes" &&
+            (obj.ceBytes = message.attr?.ceBytes !== undefined ? base64FromBytes(message.attr?.ceBytes) : undefined);
+        message.attr?.$case === "ceUri" && (obj.ceUri = message.attr?.ceUri);
+        message.attr?.$case === "ceUriRef" && (obj.ceUriRef = message.attr?.ceUriRef);
+        message.attr?.$case === "ceTimestamp" && (obj.ceTimestamp = message.attr?.ceTimestamp.toISOString());
+        return obj;
+    },
+};
+function createBaseCloudEventBatch() {
+    return { events: [] };
+}
+exports.CloudEventBatch = {
+    fromJSON(object) {
+        return { events: Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [] };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.events) {
+            obj.events = message.events.map((e) => e ? exports.CloudEvent.toJSON(e) : undefined);
+        }
+        else {
+            obj.events = [];
+        }
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function fromTimestamp(t) {
+    let millis = Number(t.seconds) * 1000;
+    millis += t.nanos / 1000000;
+    return new Date(millis);
+}
+function fromJsonTimestamp(o) {
+    if (o instanceof Date) {
+        return o;
+    }
+    else if (typeof o === "string") {
+        return new Date(o);
+    }
+    else {
+        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+    }
+}
+function isObject(value) {
+    return typeof value === "object" && value !== null;
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
new file mode 100644
index 0000000000000..da627499ad765
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
@@ -0,0 +1,119 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.fieldBehaviorToJSON = exports.fieldBehaviorFromJSON = exports.FieldBehavior = void 0;
+/**
+ * An indicator of the behavior of a given field (for example, that a field
+ * is required in requests, or given as output but ignored as input).
+ * This **does not** change the behavior in protocol buffers itself; it only
+ * denotes the behavior and may affect how API tooling handles the field.
+ *
+ * Note: This enum **may** receive new values in the future.
+ */
+var FieldBehavior;
+(function (FieldBehavior) {
+    /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
+    FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
+    /**
+     * OPTIONAL - Specifically denotes a field as optional.
+     * While all fields in protocol buffers are optional, this may be specified
+     * for emphasis if appropriate.
+     */
+    FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
+    /**
+     * REQUIRED - Denotes a field as required.
+     * This indicates that the field **must** be provided as part of the request,
+     * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
+     */
+    FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
+    /**
+     * OUTPUT_ONLY - Denotes a field as output only.
+     * This indicates that the field is provided in responses, but including the
+     * field in a request does nothing (the server *must* ignore it and
+     * *must not* throw an error as a result of the field's presence).
+     */
+    FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
+    /**
+     * INPUT_ONLY - Denotes a field as input only.
+     * This indicates that the field is provided in requests, and the
+     * corresponding field is not included in output.
+     */
+    FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
+    /**
+     * IMMUTABLE - Denotes a field as immutable.
+     * This indicates that the field may be set once in a request to create a
+     * resource, but may not be changed thereafter.
+     */
+    FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
+    /**
+     * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
+     * This indicates that the service may provide the elements of the list
+     * in any arbitrary order, rather than the order the user originally
+     * provided. Additionally, the list's order may or may not be stable.
+     */
+    FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
+})(FieldBehavior = exports.FieldBehavior || (exports.FieldBehavior = {}));
+function fieldBehaviorFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "FIELD_BEHAVIOR_UNSPECIFIED":
+            return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
+        case 1:
+        case "OPTIONAL":
+            return FieldBehavior.OPTIONAL;
+        case 2:
+        case "REQUIRED":
+            return FieldBehavior.REQUIRED;
+        case 3:
+        case "OUTPUT_ONLY":
+            return FieldBehavior.OUTPUT_ONLY;
+        case 4:
+        case "INPUT_ONLY":
+            return FieldBehavior.INPUT_ONLY;
+        case 5:
+        case "IMMUTABLE":
+            return FieldBehavior.IMMUTABLE;
+        case 6:
+        case "UNORDERED_LIST":
+            return FieldBehavior.UNORDERED_LIST;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+    }
+}
+exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
+function fieldBehaviorToJSON(object) {
+    switch (object) {
+        case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
+            return "FIELD_BEHAVIOR_UNSPECIFIED";
+        case FieldBehavior.OPTIONAL:
+            return "OPTIONAL";
+        case FieldBehavior.REQUIRED:
+            return "REQUIRED";
+        case FieldBehavior.OUTPUT_ONLY:
+            return "OUTPUT_ONLY";
+        case FieldBehavior.INPUT_ONLY:
+            return "INPUT_ONLY";
+        case FieldBehavior.IMMUTABLE:
+            return "IMMUTABLE";
+        case FieldBehavior.UNORDERED_LIST:
+            return "UNORDERED_LIST";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+    }
+}
+exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
new file mode 100644
index 0000000000000..6b3f3c97a6647
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
@@ -0,0 +1,65 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Any = void 0;
+function createBaseAny() {
+    return { typeUrl: "", value: Buffer.alloc(0) };
+}
+exports.Any = {
+    fromJSON(object) {
+        return {
+            typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "",
+            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl);
+        message.value !== undefined &&
+            (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
new file mode 100644
index 0000000000000..d429aac846043
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
@@ -0,0 +1,1308 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.GeneratedCodeInfo_Annotation = exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.methodOptions_IdempotencyLevelToJSON = exports.methodOptions_IdempotencyLevelFromJSON = exports.MethodOptions_IdempotencyLevel = exports.fieldOptions_JSTypeToJSON = exports.fieldOptions_JSTypeFromJSON = exports.FieldOptions_JSType = exports.fieldOptions_CTypeToJSON = exports.fieldOptions_CTypeFromJSON = exports.FieldOptions_CType = exports.fileOptions_OptimizeModeToJSON = exports.fileOptions_OptimizeModeFromJSON = exports.FileOptions_OptimizeMode = exports.fieldDescriptorProto_LabelToJSON = exports.fieldDescriptorProto_LabelFromJSON = exports.FieldDescriptorProto_Label = exports.fieldDescriptorProto_TypeToJSON = exports.fieldDescriptorProto_TypeFromJSON = exports.FieldDescriptorProto_Type = void 0;
+var FieldDescriptorProto_Type;
+(function (FieldDescriptorProto_Type) {
+    /**
+     * TYPE_DOUBLE - 0 is reserved for errors.
+     * Order is weird for historical reasons.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT";
+    /**
+     * TYPE_INT64 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
+     * negative values are likely.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64";
+    /**
+     * TYPE_INT32 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
+     * negative values are likely.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
+    /**
+     * TYPE_GROUP - Tag-delimited aggregate.
+     * Group type is deprecated and not supported in proto3. However, Proto3
+     * implementations should still be able to parse the group wire format and
+     * treat group fields as unknown fields.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
+    /** TYPE_MESSAGE - Length-delimited aggregate. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE";
+    /** TYPE_BYTES - New in version 2. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64";
+    /** TYPE_SINT32 - Uses ZigZag encoding. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
+    /** TYPE_SINT64 - Uses ZigZag encoding. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
+})(FieldDescriptorProto_Type = exports.FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = {}));
+function fieldDescriptorProto_TypeFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "TYPE_DOUBLE":
+            return FieldDescriptorProto_Type.TYPE_DOUBLE;
+        case 2:
+        case "TYPE_FLOAT":
+            return FieldDescriptorProto_Type.TYPE_FLOAT;
+        case 3:
+        case "TYPE_INT64":
+            return FieldDescriptorProto_Type.TYPE_INT64;
+        case 4:
+        case "TYPE_UINT64":
+            return FieldDescriptorProto_Type.TYPE_UINT64;
+        case 5:
+        case "TYPE_INT32":
+            return FieldDescriptorProto_Type.TYPE_INT32;
+        case 6:
+        case "TYPE_FIXED64":
+            return FieldDescriptorProto_Type.TYPE_FIXED64;
+        case 7:
+        case "TYPE_FIXED32":
+            return FieldDescriptorProto_Type.TYPE_FIXED32;
+        case 8:
+        case "TYPE_BOOL":
+            return FieldDescriptorProto_Type.TYPE_BOOL;
+        case 9:
+        case "TYPE_STRING":
+            return FieldDescriptorProto_Type.TYPE_STRING;
+        case 10:
+        case "TYPE_GROUP":
+            return FieldDescriptorProto_Type.TYPE_GROUP;
+        case 11:
+        case "TYPE_MESSAGE":
+            return FieldDescriptorProto_Type.TYPE_MESSAGE;
+        case 12:
+        case "TYPE_BYTES":
+            return FieldDescriptorProto_Type.TYPE_BYTES;
+        case 13:
+        case "TYPE_UINT32":
+            return FieldDescriptorProto_Type.TYPE_UINT32;
+        case 14:
+        case "TYPE_ENUM":
+            return FieldDescriptorProto_Type.TYPE_ENUM;
+        case 15:
+        case "TYPE_SFIXED32":
+            return FieldDescriptorProto_Type.TYPE_SFIXED32;
+        case 16:
+        case "TYPE_SFIXED64":
+            return FieldDescriptorProto_Type.TYPE_SFIXED64;
+        case 17:
+        case "TYPE_SINT32":
+            return FieldDescriptorProto_Type.TYPE_SINT32;
+        case 18:
+        case "TYPE_SINT64":
+            return FieldDescriptorProto_Type.TYPE_SINT64;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+    }
+}
+exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
+function fieldDescriptorProto_TypeToJSON(object) {
+    switch (object) {
+        case FieldDescriptorProto_Type.TYPE_DOUBLE:
+            return "TYPE_DOUBLE";
+        case FieldDescriptorProto_Type.TYPE_FLOAT:
+            return "TYPE_FLOAT";
+        case FieldDescriptorProto_Type.TYPE_INT64:
+            return "TYPE_INT64";
+        case FieldDescriptorProto_Type.TYPE_UINT64:
+            return "TYPE_UINT64";
+        case FieldDescriptorProto_Type.TYPE_INT32:
+            return "TYPE_INT32";
+        case FieldDescriptorProto_Type.TYPE_FIXED64:
+            return "TYPE_FIXED64";
+        case FieldDescriptorProto_Type.TYPE_FIXED32:
+            return "TYPE_FIXED32";
+        case FieldDescriptorProto_Type.TYPE_BOOL:
+            return "TYPE_BOOL";
+        case FieldDescriptorProto_Type.TYPE_STRING:
+            return "TYPE_STRING";
+        case FieldDescriptorProto_Type.TYPE_GROUP:
+            return "TYPE_GROUP";
+        case FieldDescriptorProto_Type.TYPE_MESSAGE:
+            return "TYPE_MESSAGE";
+        case FieldDescriptorProto_Type.TYPE_BYTES:
+            return "TYPE_BYTES";
+        case FieldDescriptorProto_Type.TYPE_UINT32:
+            return "TYPE_UINT32";
+        case FieldDescriptorProto_Type.TYPE_ENUM:
+            return "TYPE_ENUM";
+        case FieldDescriptorProto_Type.TYPE_SFIXED32:
+            return "TYPE_SFIXED32";
+        case FieldDescriptorProto_Type.TYPE_SFIXED64:
+            return "TYPE_SFIXED64";
+        case FieldDescriptorProto_Type.TYPE_SINT32:
+            return "TYPE_SINT32";
+        case FieldDescriptorProto_Type.TYPE_SINT64:
+            return "TYPE_SINT64";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+    }
+}
+exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
+var FieldDescriptorProto_Label;
+(function (FieldDescriptorProto_Label) {
+    /** LABEL_OPTIONAL - 0 is reserved for errors */
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
+})(FieldDescriptorProto_Label = exports.FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = {}));
+function fieldDescriptorProto_LabelFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "LABEL_OPTIONAL":
+            return FieldDescriptorProto_Label.LABEL_OPTIONAL;
+        case 2:
+        case "LABEL_REQUIRED":
+            return FieldDescriptorProto_Label.LABEL_REQUIRED;
+        case 3:
+        case "LABEL_REPEATED":
+            return FieldDescriptorProto_Label.LABEL_REPEATED;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+    }
+}
+exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
+function fieldDescriptorProto_LabelToJSON(object) {
+    switch (object) {
+        case FieldDescriptorProto_Label.LABEL_OPTIONAL:
+            return "LABEL_OPTIONAL";
+        case FieldDescriptorProto_Label.LABEL_REQUIRED:
+            return "LABEL_REQUIRED";
+        case FieldDescriptorProto_Label.LABEL_REPEATED:
+            return "LABEL_REPEATED";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+    }
+}
+exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
+/** Generated classes can be optimized for speed or code size. */
+var FileOptions_OptimizeMode;
+(function (FileOptions_OptimizeMode) {
+    /** SPEED - Generate complete code for parsing, serialization, */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED";
+    /** CODE_SIZE - etc. */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
+    /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
+})(FileOptions_OptimizeMode = exports.FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = {}));
+function fileOptions_OptimizeModeFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "SPEED":
+            return FileOptions_OptimizeMode.SPEED;
+        case 2:
+        case "CODE_SIZE":
+            return FileOptions_OptimizeMode.CODE_SIZE;
+        case 3:
+        case "LITE_RUNTIME":
+            return FileOptions_OptimizeMode.LITE_RUNTIME;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+    }
+}
+exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
+function fileOptions_OptimizeModeToJSON(object) {
+    switch (object) {
+        case FileOptions_OptimizeMode.SPEED:
+            return "SPEED";
+        case FileOptions_OptimizeMode.CODE_SIZE:
+            return "CODE_SIZE";
+        case FileOptions_OptimizeMode.LITE_RUNTIME:
+            return "LITE_RUNTIME";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+    }
+}
+exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
+var FieldOptions_CType;
+(function (FieldOptions_CType) {
+    /** STRING - Default mode. */
+    FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
+    FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
+    FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
+})(FieldOptions_CType = exports.FieldOptions_CType || (exports.FieldOptions_CType = {}));
+function fieldOptions_CTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "STRING":
+            return FieldOptions_CType.STRING;
+        case 1:
+        case "CORD":
+            return FieldOptions_CType.CORD;
+        case 2:
+        case "STRING_PIECE":
+            return FieldOptions_CType.STRING_PIECE;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+    }
+}
+exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
+function fieldOptions_CTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_CType.STRING:
+            return "STRING";
+        case FieldOptions_CType.CORD:
+            return "CORD";
+        case FieldOptions_CType.STRING_PIECE:
+            return "STRING_PIECE";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+    }
+}
+exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
+var FieldOptions_JSType;
+(function (FieldOptions_JSType) {
+    /** JS_NORMAL - Use the default type. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL";
+    /** JS_STRING - Use JavaScript strings. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
+    /** JS_NUMBER - Use JavaScript numbers. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
+})(FieldOptions_JSType = exports.FieldOptions_JSType || (exports.FieldOptions_JSType = {}));
+function fieldOptions_JSTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "JS_NORMAL":
+            return FieldOptions_JSType.JS_NORMAL;
+        case 1:
+        case "JS_STRING":
+            return FieldOptions_JSType.JS_STRING;
+        case 2:
+        case "JS_NUMBER":
+            return FieldOptions_JSType.JS_NUMBER;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+    }
+}
+exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
+function fieldOptions_JSTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_JSType.JS_NORMAL:
+            return "JS_NORMAL";
+        case FieldOptions_JSType.JS_STRING:
+            return "JS_STRING";
+        case FieldOptions_JSType.JS_NUMBER:
+            return "JS_NUMBER";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+    }
+}
+exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
+/**
+ * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
+ * or neither? HTTP based RPC implementation may choose GET verb for safe
+ * methods, and PUT verb for idempotent methods instead of the default POST.
+ */
+var MethodOptions_IdempotencyLevel;
+(function (MethodOptions_IdempotencyLevel) {
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN";
+    /** NO_SIDE_EFFECTS - implies idempotent */
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
+    /** IDEMPOTENT - idempotent, but may have side effects */
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
+})(MethodOptions_IdempotencyLevel = exports.MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = {}));
+function methodOptions_IdempotencyLevelFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "IDEMPOTENCY_UNKNOWN":
+            return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN;
+        case 1:
+        case "NO_SIDE_EFFECTS":
+            return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS;
+        case 2:
+        case "IDEMPOTENT":
+            return MethodOptions_IdempotencyLevel.IDEMPOTENT;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+    }
+}
+exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
+function methodOptions_IdempotencyLevelToJSON(object) {
+    switch (object) {
+        case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
+            return "IDEMPOTENCY_UNKNOWN";
+        case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS:
+            return "NO_SIDE_EFFECTS";
+        case MethodOptions_IdempotencyLevel.IDEMPOTENT:
+            return "IDEMPOTENT";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+    }
+}
+exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
+function createBaseFileDescriptorSet() {
+    return { file: [] };
+}
+exports.FileDescriptorSet = {
+    fromJSON(object) {
+        return { file: Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [] };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.file) {
+            obj.file = message.file.map((e) => e ? exports.FileDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.file = [];
+        }
+        return obj;
+    },
+};
+function createBaseFileDescriptorProto() {
+    return {
+        name: "",
+        package: "",
+        dependency: [],
+        publicDependency: [],
+        weakDependency: [],
+        messageType: [],
+        enumType: [],
+        service: [],
+        extension: [],
+        options: undefined,
+        sourceCodeInfo: undefined,
+        syntax: "",
+    };
+}
+exports.FileDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            package: isSet(object.package) ? String(object.package) : "",
+            dependency: Array.isArray(object?.dependency) ? object.dependency.map((e) => String(e)) : [],
+            publicDependency: Array.isArray(object?.publicDependency)
+                ? object.publicDependency.map((e) => Number(e))
+                : [],
+            weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e) => Number(e)) : [],
+            messageType: Array.isArray(object?.messageType)
+                ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
+                : [],
+            enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
+            service: Array.isArray(object?.service) ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e)) : [],
+            extension: Array.isArray(object?.extension)
+                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
+            sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
+            syntax: isSet(object.syntax) ? String(object.syntax) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.package !== undefined && (obj.package = message.package);
+        if (message.dependency) {
+            obj.dependency = message.dependency.map((e) => e);
+        }
+        else {
+            obj.dependency = [];
+        }
+        if (message.publicDependency) {
+            obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
+        }
+        else {
+            obj.publicDependency = [];
+        }
+        if (message.weakDependency) {
+            obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
+        }
+        else {
+            obj.weakDependency = [];
+        }
+        if (message.messageType) {
+            obj.messageType = message.messageType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.messageType = [];
+        }
+        if (message.enumType) {
+            obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.enumType = [];
+        }
+        if (message.service) {
+            obj.service = message.service.map((e) => e ? exports.ServiceDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.service = [];
+        }
+        if (message.extension) {
+            obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.extension = [];
+        }
+        message.options !== undefined && (obj.options = message.options ? exports.FileOptions.toJSON(message.options) : undefined);
+        message.sourceCodeInfo !== undefined &&
+            (obj.sourceCodeInfo = message.sourceCodeInfo ? exports.SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined);
+        message.syntax !== undefined && (obj.syntax = message.syntax);
+        return obj;
+    },
+};
+function createBaseDescriptorProto() {
+    return {
+        name: "",
+        field: [],
+        extension: [],
+        nestedType: [],
+        enumType: [],
+        extensionRange: [],
+        oneofDecl: [],
+        options: undefined,
+        reservedRange: [],
+        reservedName: [],
+    };
+}
+exports.DescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            field: Array.isArray(object?.field) ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e)) : [],
+            extension: Array.isArray(object?.extension)
+                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            nestedType: Array.isArray(object?.nestedType)
+                ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
+                : [],
+            enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
+            extensionRange: Array.isArray(object?.extensionRange)
+                ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
+                : [],
+            oneofDecl: Array.isArray(object?.oneofDecl)
+                ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
+            reservedRange: Array.isArray(object?.reservedRange)
+                ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
+                : [],
+            reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e) => String(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        if (message.field) {
+            obj.field = message.field.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.field = [];
+        }
+        if (message.extension) {
+            obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.extension = [];
+        }
+        if (message.nestedType) {
+            obj.nestedType = message.nestedType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.nestedType = [];
+        }
+        if (message.enumType) {
+            obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.enumType = [];
+        }
+        if (message.extensionRange) {
+            obj.extensionRange = message.extensionRange.map((e) => e ? exports.DescriptorProto_ExtensionRange.toJSON(e) : undefined);
+        }
+        else {
+            obj.extensionRange = [];
+        }
+        if (message.oneofDecl) {
+            obj.oneofDecl = message.oneofDecl.map((e) => e ? exports.OneofDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.oneofDecl = [];
+        }
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.MessageOptions.toJSON(message.options) : undefined);
+        if (message.reservedRange) {
+            obj.reservedRange = message.reservedRange.map((e) => e ? exports.DescriptorProto_ReservedRange.toJSON(e) : undefined);
+        }
+        else {
+            obj.reservedRange = [];
+        }
+        if (message.reservedName) {
+            obj.reservedName = message.reservedName.map((e) => e);
+        }
+        else {
+            obj.reservedName = [];
+        }
+        return obj;
+    },
+};
+function createBaseDescriptorProto_ExtensionRange() {
+    return { start: 0, end: 0, options: undefined };
+}
+exports.DescriptorProto_ExtensionRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? Number(object.start) : 0,
+            end: isSet(object.end) ? Number(object.end) : 0,
+            options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = Math.round(message.start));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.ExtensionRangeOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseDescriptorProto_ReservedRange() {
+    return { start: 0, end: 0 };
+}
+exports.DescriptorProto_ReservedRange = {
+    fromJSON(object) {
+        return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = Math.round(message.start));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        return obj;
+    },
+};
+function createBaseExtensionRangeOptions() {
+    return { uninterpretedOption: [] };
+}
+exports.ExtensionRangeOptions = {
+    fromJSON(object) {
+        return {
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseFieldDescriptorProto() {
+    return {
+        name: "",
+        number: 0,
+        label: 1,
+        type: 1,
+        typeName: "",
+        extendee: "",
+        defaultValue: "",
+        oneofIndex: 0,
+        jsonName: "",
+        options: undefined,
+        proto3Optional: false,
+    };
+}
+exports.FieldDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            number: isSet(object.number) ? Number(object.number) : 0,
+            label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
+            type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
+            typeName: isSet(object.typeName) ? String(object.typeName) : "",
+            extendee: isSet(object.extendee) ? String(object.extendee) : "",
+            defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "",
+            oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0,
+            jsonName: isSet(object.jsonName) ? String(object.jsonName) : "",
+            options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
+            proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.number !== undefined && (obj.number = Math.round(message.number));
+        message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label));
+        message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type));
+        message.typeName !== undefined && (obj.typeName = message.typeName);
+        message.extendee !== undefined && (obj.extendee = message.extendee);
+        message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue);
+        message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex));
+        message.jsonName !== undefined && (obj.jsonName = message.jsonName);
+        message.options !== undefined && (obj.options = message.options ? exports.FieldOptions.toJSON(message.options) : undefined);
+        message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional);
+        return obj;
+    },
+};
+function createBaseOneofDescriptorProto() {
+    return { name: "", options: undefined };
+}
+exports.OneofDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.options !== undefined && (obj.options = message.options ? exports.OneofOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseEnumDescriptorProto() {
+    return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] };
+}
+exports.EnumDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            value: Array.isArray(object?.value) ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e)) : [],
+            options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
+            reservedRange: Array.isArray(object?.reservedRange)
+                ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
+                : [],
+            reservedName: Array.isArray(object?.reservedName)
+                ? object.reservedName.map((e) => String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        if (message.value) {
+            obj.value = message.value.map((e) => e ? exports.EnumValueDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.value = [];
+        }
+        message.options !== undefined && (obj.options = message.options ? exports.EnumOptions.toJSON(message.options) : undefined);
+        if (message.reservedRange) {
+            obj.reservedRange = message.reservedRange.map((e) => e ? exports.EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined);
+        }
+        else {
+            obj.reservedRange = [];
+        }
+        if (message.reservedName) {
+            obj.reservedName = message.reservedName.map((e) => e);
+        }
+        else {
+            obj.reservedName = [];
+        }
+        return obj;
+    },
+};
+function createBaseEnumDescriptorProto_EnumReservedRange() {
+    return { start: 0, end: 0 };
+}
+exports.EnumDescriptorProto_EnumReservedRange = {
+    fromJSON(object) {
+        return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = Math.round(message.start));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        return obj;
+    },
+};
+function createBaseEnumValueDescriptorProto() {
+    return { name: "", number: 0, options: undefined };
+}
+exports.EnumValueDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            number: isSet(object.number) ? Number(object.number) : 0,
+            options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.number !== undefined && (obj.number = Math.round(message.number));
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.EnumValueOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseServiceDescriptorProto() {
+    return { name: "", method: [], options: undefined };
+}
+exports.ServiceDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            method: Array.isArray(object?.method) ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e)) : [],
+            options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        if (message.method) {
+            obj.method = message.method.map((e) => e ? exports.MethodDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.method = [];
+        }
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.ServiceOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseMethodDescriptorProto() {
+    return {
+        name: "",
+        inputType: "",
+        outputType: "",
+        options: undefined,
+        clientStreaming: false,
+        serverStreaming: false,
+    };
+}
+exports.MethodDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            inputType: isSet(object.inputType) ? String(object.inputType) : "",
+            outputType: isSet(object.outputType) ? String(object.outputType) : "",
+            options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
+            clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false,
+            serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.inputType !== undefined && (obj.inputType = message.inputType);
+        message.outputType !== undefined && (obj.outputType = message.outputType);
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.MethodOptions.toJSON(message.options) : undefined);
+        message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming);
+        message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming);
+        return obj;
+    },
+};
+function createBaseFileOptions() {
+    return {
+        javaPackage: "",
+        javaOuterClassname: "",
+        javaMultipleFiles: false,
+        javaGenerateEqualsAndHash: false,
+        javaStringCheckUtf8: false,
+        optimizeFor: 1,
+        goPackage: "",
+        ccGenericServices: false,
+        javaGenericServices: false,
+        pyGenericServices: false,
+        phpGenericServices: false,
+        deprecated: false,
+        ccEnableArenas: false,
+        objcClassPrefix: "",
+        csharpNamespace: "",
+        swiftPrefix: "",
+        phpClassPrefix: "",
+        phpNamespace: "",
+        phpMetadataNamespace: "",
+        rubyPackage: "",
+        uninterpretedOption: [],
+    };
+}
+exports.FileOptions = {
+    fromJSON(object) {
+        return {
+            javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "",
+            javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "",
+            javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false,
+            javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
+                ? Boolean(object.javaGenerateEqualsAndHash)
+                : false,
+            javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false,
+            optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
+            goPackage: isSet(object.goPackage) ? String(object.goPackage) : "",
+            ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false,
+            javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false,
+            pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false,
+            phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false,
+            objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "",
+            csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "",
+            swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "",
+            phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "",
+            phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "",
+            phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "",
+            rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "",
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage);
+        message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname);
+        message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles);
+        message.javaGenerateEqualsAndHash !== undefined &&
+            (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash);
+        message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8);
+        message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor));
+        message.goPackage !== undefined && (obj.goPackage = message.goPackage);
+        message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices);
+        message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices);
+        message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices);
+        message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas);
+        message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix);
+        message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace);
+        message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix);
+        message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix);
+        message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace);
+        message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace);
+        message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseMessageOptions() {
+    return {
+        messageSetWireFormat: false,
+        noStandardDescriptorAccessor: false,
+        deprecated: false,
+        mapEntry: false,
+        uninterpretedOption: [],
+    };
+}
+exports.MessageOptions = {
+    fromJSON(object) {
+        return {
+            messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false,
+            noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
+                ? Boolean(object.noStandardDescriptorAccessor)
+                : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat);
+        message.noStandardDescriptorAccessor !== undefined &&
+            (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseFieldOptions() {
+    return {
+        ctype: 0,
+        packed: false,
+        jstype: 0,
+        lazy: false,
+        unverifiedLazy: false,
+        deprecated: false,
+        weak: false,
+        uninterpretedOption: [],
+    };
+}
+exports.FieldOptions = {
+    fromJSON(object) {
+        return {
+            ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
+            packed: isSet(object.packed) ? Boolean(object.packed) : false,
+            jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
+            lazy: isSet(object.lazy) ? Boolean(object.lazy) : false,
+            unverifiedLazy: isSet(object.unverifiedLazy) ? Boolean(object.unverifiedLazy) : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            weak: isSet(object.weak) ? Boolean(object.weak) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype));
+        message.packed !== undefined && (obj.packed = message.packed);
+        message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype));
+        message.lazy !== undefined && (obj.lazy = message.lazy);
+        message.unverifiedLazy !== undefined && (obj.unverifiedLazy = message.unverifiedLazy);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.weak !== undefined && (obj.weak = message.weak);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseOneofOptions() {
+    return { uninterpretedOption: [] };
+}
+exports.OneofOptions = {
+    fromJSON(object) {
+        return {
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseEnumOptions() {
+    return { allowAlias: false, deprecated: false, uninterpretedOption: [] };
+}
+exports.EnumOptions = {
+    fromJSON(object) {
+        return {
+            allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseEnumValueOptions() {
+    return { deprecated: false, uninterpretedOption: [] };
+}
+exports.EnumValueOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseServiceOptions() {
+    return { deprecated: false, uninterpretedOption: [] };
+}
+exports.ServiceOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseMethodOptions() {
+    return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] };
+}
+exports.MethodOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            idempotencyLevel: isSet(object.idempotencyLevel)
+                ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
+                : 0,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.idempotencyLevel !== undefined &&
+            (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel));
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseUninterpretedOption() {
+    return {
+        name: [],
+        identifierValue: "",
+        positiveIntValue: "0",
+        negativeIntValue: "0",
+        doubleValue: 0,
+        stringValue: Buffer.alloc(0),
+        aggregateValue: "",
+    };
+}
+exports.UninterpretedOption = {
+    fromJSON(object) {
+        return {
+            name: Array.isArray(object?.name) ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e)) : [],
+            identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "",
+            positiveIntValue: isSet(object.positiveIntValue) ? String(object.positiveIntValue) : "0",
+            negativeIntValue: isSet(object.negativeIntValue) ? String(object.negativeIntValue) : "0",
+            doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0,
+            stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
+            aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name) {
+            obj.name = message.name.map((e) => e ? exports.UninterpretedOption_NamePart.toJSON(e) : undefined);
+        }
+        else {
+            obj.name = [];
+        }
+        message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue);
+        message.positiveIntValue !== undefined && (obj.positiveIntValue = message.positiveIntValue);
+        message.negativeIntValue !== undefined && (obj.negativeIntValue = message.negativeIntValue);
+        message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue);
+        message.stringValue !== undefined &&
+            (obj.stringValue = base64FromBytes(message.stringValue !== undefined ? message.stringValue : Buffer.alloc(0)));
+        message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue);
+        return obj;
+    },
+};
+function createBaseUninterpretedOption_NamePart() {
+    return { namePart: "", isExtension: false };
+}
+exports.UninterpretedOption_NamePart = {
+    fromJSON(object) {
+        return {
+            namePart: isSet(object.namePart) ? String(object.namePart) : "",
+            isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.namePart !== undefined && (obj.namePart = message.namePart);
+        message.isExtension !== undefined && (obj.isExtension = message.isExtension);
+        return obj;
+    },
+};
+function createBaseSourceCodeInfo() {
+    return { location: [] };
+}
+exports.SourceCodeInfo = {
+    fromJSON(object) {
+        return {
+            location: Array.isArray(object?.location)
+                ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.location) {
+            obj.location = message.location.map((e) => e ? exports.SourceCodeInfo_Location.toJSON(e) : undefined);
+        }
+        else {
+            obj.location = [];
+        }
+        return obj;
+    },
+};
+function createBaseSourceCodeInfo_Location() {
+    return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] };
+}
+exports.SourceCodeInfo_Location = {
+    fromJSON(object) {
+        return {
+            path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
+            span: Array.isArray(object?.span) ? object.span.map((e) => Number(e)) : [],
+            leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "",
+            trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "",
+            leadingDetachedComments: Array.isArray(object?.leadingDetachedComments)
+                ? object.leadingDetachedComments.map((e) => String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.path) {
+            obj.path = message.path.map((e) => Math.round(e));
+        }
+        else {
+            obj.path = [];
+        }
+        if (message.span) {
+            obj.span = message.span.map((e) => Math.round(e));
+        }
+        else {
+            obj.span = [];
+        }
+        message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments);
+        message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments);
+        if (message.leadingDetachedComments) {
+            obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e);
+        }
+        else {
+            obj.leadingDetachedComments = [];
+        }
+        return obj;
+    },
+};
+function createBaseGeneratedCodeInfo() {
+    return { annotation: [] };
+}
+exports.GeneratedCodeInfo = {
+    fromJSON(object) {
+        return {
+            annotation: Array.isArray(object?.annotation)
+                ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.annotation) {
+            obj.annotation = message.annotation.map((e) => e ? exports.GeneratedCodeInfo_Annotation.toJSON(e) : undefined);
+        }
+        else {
+            obj.annotation = [];
+        }
+        return obj;
+    },
+};
+function createBaseGeneratedCodeInfo_Annotation() {
+    return { path: [], sourceFile: "", begin: 0, end: 0 };
+}
+exports.GeneratedCodeInfo_Annotation = {
+    fromJSON(object) {
+        return {
+            path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
+            sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "",
+            begin: isSet(object.begin) ? Number(object.begin) : 0,
+            end: isSet(object.end) ? Number(object.end) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.path) {
+            obj.path = message.path.map((e) => Math.round(e));
+        }
+        else {
+            obj.path = [];
+        }
+        message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile);
+        message.begin !== undefined && (obj.begin = Math.round(message.begin));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
new file mode 100644
index 0000000000000..159135fe87172
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
@@ -0,0 +1,24 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Timestamp = void 0;
+function createBaseTimestamp() {
+    return { seconds: "0", nanos: 0 };
+}
+exports.Timestamp = {
+    fromJSON(object) {
+        return {
+            seconds: isSet(object.seconds) ? String(object.seconds) : "0",
+            nanos: isSet(object.nanos) ? Number(object.nanos) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.seconds !== undefined && (obj.seconds = message.seconds);
+        message.nanos !== undefined && (obj.nanos = Math.round(message.nanos));
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
new file mode 100644
index 0000000000000..1ef3e1b3356b7
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
@@ -0,0 +1,106 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
+/* eslint-disable */
+const envelope_1 = require("./envelope");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_rekor_1 = require("./sigstore_rekor");
+function createBaseTimestampVerificationData() {
+    return { rfc3161Timestamps: [] };
+}
+exports.TimestampVerificationData = {
+    fromJSON(object) {
+        return {
+            rfc3161Timestamps: Array.isArray(object?.rfc3161Timestamps)
+                ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rfc3161Timestamps) {
+            obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => e ? sigstore_common_1.RFC3161SignedTimestamp.toJSON(e) : undefined);
+        }
+        else {
+            obj.rfc3161Timestamps = [];
+        }
+        return obj;
+    },
+};
+function createBaseVerificationMaterial() {
+    return { content: undefined, tlogEntries: [], timestampVerificationData: undefined };
+}
+exports.VerificationMaterial = {
+    fromJSON(object) {
+        return {
+            content: isSet(object.publicKey)
+                ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
+                : isSet(object.x509CertificateChain)
+                    ? {
+                        $case: "x509CertificateChain",
+                        x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
+                    }
+                    : undefined,
+            tlogEntries: Array.isArray(object?.tlogEntries)
+                ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
+                : [],
+            timestampVerificationData: isSet(object.timestampVerificationData)
+                ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
+                : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.content?.$case === "publicKey" &&
+            (obj.publicKey = message.content?.publicKey ? sigstore_common_1.PublicKeyIdentifier.toJSON(message.content?.publicKey) : undefined);
+        message.content?.$case === "x509CertificateChain" &&
+            (obj.x509CertificateChain = message.content?.x509CertificateChain
+                ? sigstore_common_1.X509CertificateChain.toJSON(message.content?.x509CertificateChain)
+                : undefined);
+        if (message.tlogEntries) {
+            obj.tlogEntries = message.tlogEntries.map((e) => e ? sigstore_rekor_1.TransparencyLogEntry.toJSON(e) : undefined);
+        }
+        else {
+            obj.tlogEntries = [];
+        }
+        message.timestampVerificationData !== undefined &&
+            (obj.timestampVerificationData = message.timestampVerificationData
+                ? exports.TimestampVerificationData.toJSON(message.timestampVerificationData)
+                : undefined);
+        return obj;
+    },
+};
+function createBaseBundle() {
+    return { mediaType: "", verificationMaterial: undefined, content: undefined };
+}
+exports.Bundle = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
+            verificationMaterial: isSet(object.verificationMaterial)
+                ? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
+                : undefined,
+            content: isSet(object.messageSignature)
+                ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
+                : isSet(object.dsseEnvelope)
+                    ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.mediaType !== undefined && (obj.mediaType = message.mediaType);
+        message.verificationMaterial !== undefined && (obj.verificationMaterial = message.verificationMaterial
+            ? exports.VerificationMaterial.toJSON(message.verificationMaterial)
+            : undefined);
+        message.content?.$case === "messageSignature" && (obj.messageSignature = message.content?.messageSignature
+            ? sigstore_common_1.MessageSignature.toJSON(message.content?.messageSignature)
+            : undefined);
+        message.content?.$case === "dsseEnvelope" &&
+            (obj.dsseEnvelope = message.content?.dsseEnvelope ? envelope_1.Envelope.toJSON(message.content?.dsseEnvelope) : undefined);
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
new file mode 100644
index 0000000000000..bcd654e9154b9
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
@@ -0,0 +1,457 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.subjectAlternativeNameTypeToJSON = exports.subjectAlternativeNameTypeFromJSON = exports.SubjectAlternativeNameType = exports.publicKeyDetailsToJSON = exports.publicKeyDetailsFromJSON = exports.PublicKeyDetails = exports.hashAlgorithmToJSON = exports.hashAlgorithmFromJSON = exports.HashAlgorithm = void 0;
+/* eslint-disable */
+const timestamp_1 = require("./google/protobuf/timestamp");
+/**
+ * Only a subset of the secure hash standard algorithms are supported.
+ * See  for more
+ * details.
+ * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
+ * any proto JSON serialization to emit the used hash algorithm, as default
+ * option is to *omit* the default value of an enum (which is the first
+ * value, represented by '0'.
+ */
+var HashAlgorithm;
+(function (HashAlgorithm) {
+    HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
+    HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
+})(HashAlgorithm = exports.HashAlgorithm || (exports.HashAlgorithm = {}));
+function hashAlgorithmFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "HASH_ALGORITHM_UNSPECIFIED":
+            return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
+        case 1:
+        case "SHA2_256":
+            return HashAlgorithm.SHA2_256;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+    }
+}
+exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
+function hashAlgorithmToJSON(object) {
+    switch (object) {
+        case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
+            return "HASH_ALGORITHM_UNSPECIFIED";
+        case HashAlgorithm.SHA2_256:
+            return "SHA2_256";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+    }
+}
+exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
+/**
+ * Details of a specific public key, capturing the the key encoding method,
+ * and signature algorithm.
+ * To avoid the possibility of contradicting formats such as PKCS1 with
+ * ED25519 the valid permutations are listed as a linear set instead of a
+ * cartesian set (i.e one combined variable instead of two, one for encoding
+ * and one for the signature algorithm).
+ */
+var PublicKeyDetails;
+(function (PublicKeyDetails) {
+    PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+    /** PKCS1_RSA_PKCS1V5 - RSA */
+    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
+    /** PKCS1_RSA_PSS - See RFC8017 */
+    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
+    /** PKIX_ECDSA_P256_SHA_256 - ECDSA */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
+    /** PKIX_ECDSA_P256_HMAC_SHA_256 - See RFC6979 */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
+    /** PKIX_ED25519 - Ed 25519 */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
+})(PublicKeyDetails = exports.PublicKeyDetails || (exports.PublicKeyDetails = {}));
+function publicKeyDetailsFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
+            return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
+        case 1:
+        case "PKCS1_RSA_PKCS1V5":
+            return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
+        case 2:
+        case "PKCS1_RSA_PSS":
+            return PublicKeyDetails.PKCS1_RSA_PSS;
+        case 3:
+        case "PKIX_RSA_PKCS1V5":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V5;
+        case 4:
+        case "PKIX_RSA_PSS":
+            return PublicKeyDetails.PKIX_RSA_PSS;
+        case 5:
+        case "PKIX_ECDSA_P256_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
+        case 6:
+        case "PKIX_ECDSA_P256_HMAC_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
+        case 7:
+        case "PKIX_ED25519":
+            return PublicKeyDetails.PKIX_ED25519;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+    }
+}
+exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
+function publicKeyDetailsToJSON(object) {
+    switch (object) {
+        case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
+            return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+        case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
+            return "PKCS1_RSA_PKCS1V5";
+        case PublicKeyDetails.PKCS1_RSA_PSS:
+            return "PKCS1_RSA_PSS";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V5:
+            return "PKIX_RSA_PKCS1V5";
+        case PublicKeyDetails.PKIX_RSA_PSS:
+            return "PKIX_RSA_PSS";
+        case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
+            return "PKIX_ECDSA_P256_SHA_256";
+        case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
+            return "PKIX_ECDSA_P256_HMAC_SHA_256";
+        case PublicKeyDetails.PKIX_ED25519:
+            return "PKIX_ED25519";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+    }
+}
+exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
+var SubjectAlternativeNameType;
+(function (SubjectAlternativeNameType) {
+    SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+    SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
+    SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
+    /**
+     * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
+     * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
+     * for more details.
+     */
+    SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
+})(SubjectAlternativeNameType = exports.SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = {}));
+function subjectAlternativeNameTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
+            return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
+        case 1:
+        case "EMAIL":
+            return SubjectAlternativeNameType.EMAIL;
+        case 2:
+        case "URI":
+            return SubjectAlternativeNameType.URI;
+        case 3:
+        case "OTHER_NAME":
+            return SubjectAlternativeNameType.OTHER_NAME;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+    }
+}
+exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
+function subjectAlternativeNameTypeToJSON(object) {
+    switch (object) {
+        case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
+            return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+        case SubjectAlternativeNameType.EMAIL:
+            return "EMAIL";
+        case SubjectAlternativeNameType.URI:
+            return "URI";
+        case SubjectAlternativeNameType.OTHER_NAME:
+            return "OTHER_NAME";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+    }
+}
+exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
+function createBaseHashOutput() {
+    return { algorithm: 0, digest: Buffer.alloc(0) };
+}
+exports.HashOutput = {
+    fromJSON(object) {
+        return {
+            algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
+            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.algorithm !== undefined && (obj.algorithm = hashAlgorithmToJSON(message.algorithm));
+        message.digest !== undefined &&
+            (obj.digest = base64FromBytes(message.digest !== undefined ? message.digest : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseMessageSignature() {
+    return { messageDigest: undefined, signature: Buffer.alloc(0) };
+}
+exports.MessageSignature = {
+    fromJSON(object) {
+        return {
+            messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
+            signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.messageDigest !== undefined &&
+            (obj.messageDigest = message.messageDigest ? exports.HashOutput.toJSON(message.messageDigest) : undefined);
+        message.signature !== undefined &&
+            (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseLogId() {
+    return { keyId: Buffer.alloc(0) };
+}
+exports.LogId = {
+    fromJSON(object) {
+        return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.keyId !== undefined &&
+            (obj.keyId = base64FromBytes(message.keyId !== undefined ? message.keyId : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseRFC3161SignedTimestamp() {
+    return { signedTimestamp: Buffer.alloc(0) };
+}
+exports.RFC3161SignedTimestamp = {
+    fromJSON(object) {
+        return {
+            signedTimestamp: isSet(object.signedTimestamp)
+                ? Buffer.from(bytesFromBase64(object.signedTimestamp))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.signedTimestamp !== undefined &&
+            (obj.signedTimestamp = base64FromBytes(message.signedTimestamp !== undefined ? message.signedTimestamp : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBasePublicKey() {
+    return { rawBytes: undefined, keyDetails: 0, validFor: undefined };
+}
+exports.PublicKey = {
+    fromJSON(object) {
+        return {
+            rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
+            keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
+            validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.rawBytes !== undefined &&
+            (obj.rawBytes = message.rawBytes !== undefined ? base64FromBytes(message.rawBytes) : undefined);
+        message.keyDetails !== undefined && (obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails));
+        message.validFor !== undefined &&
+            (obj.validFor = message.validFor ? exports.TimeRange.toJSON(message.validFor) : undefined);
+        return obj;
+    },
+};
+function createBasePublicKeyIdentifier() {
+    return { hint: "" };
+}
+exports.PublicKeyIdentifier = {
+    fromJSON(object) {
+        return { hint: isSet(object.hint) ? String(object.hint) : "" };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.hint !== undefined && (obj.hint = message.hint);
+        return obj;
+    },
+};
+function createBaseObjectIdentifier() {
+    return { id: [] };
+}
+exports.ObjectIdentifier = {
+    fromJSON(object) {
+        return { id: Array.isArray(object?.id) ? object.id.map((e) => Number(e)) : [] };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.id) {
+            obj.id = message.id.map((e) => Math.round(e));
+        }
+        else {
+            obj.id = [];
+        }
+        return obj;
+    },
+};
+function createBaseObjectIdentifierValuePair() {
+    return { oid: undefined, value: Buffer.alloc(0) };
+}
+exports.ObjectIdentifierValuePair = {
+    fromJSON(object) {
+        return {
+            oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
+            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.oid !== undefined && (obj.oid = message.oid ? exports.ObjectIdentifier.toJSON(message.oid) : undefined);
+        message.value !== undefined &&
+            (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseDistinguishedName() {
+    return { organization: "", commonName: "" };
+}
+exports.DistinguishedName = {
+    fromJSON(object) {
+        return {
+            organization: isSet(object.organization) ? String(object.organization) : "",
+            commonName: isSet(object.commonName) ? String(object.commonName) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.organization !== undefined && (obj.organization = message.organization);
+        message.commonName !== undefined && (obj.commonName = message.commonName);
+        return obj;
+    },
+};
+function createBaseX509Certificate() {
+    return { rawBytes: Buffer.alloc(0) };
+}
+exports.X509Certificate = {
+    fromJSON(object) {
+        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.rawBytes !== undefined &&
+            (obj.rawBytes = base64FromBytes(message.rawBytes !== undefined ? message.rawBytes : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseSubjectAlternativeName() {
+    return { type: 0, identity: undefined };
+}
+exports.SubjectAlternativeName = {
+    fromJSON(object) {
+        return {
+            type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
+            identity: isSet(object.regexp)
+                ? { $case: "regexp", regexp: String(object.regexp) }
+                : isSet(object.value)
+                    ? { $case: "value", value: String(object.value) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.type !== undefined && (obj.type = subjectAlternativeNameTypeToJSON(message.type));
+        message.identity?.$case === "regexp" && (obj.regexp = message.identity?.regexp);
+        message.identity?.$case === "value" && (obj.value = message.identity?.value);
+        return obj;
+    },
+};
+function createBaseX509CertificateChain() {
+    return { certificates: [] };
+}
+exports.X509CertificateChain = {
+    fromJSON(object) {
+        return {
+            certificates: Array.isArray(object?.certificates)
+                ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.certificates) {
+            obj.certificates = message.certificates.map((e) => e ? exports.X509Certificate.toJSON(e) : undefined);
+        }
+        else {
+            obj.certificates = [];
+        }
+        return obj;
+    },
+};
+function createBaseTimeRange() {
+    return { start: undefined, end: undefined };
+}
+exports.TimeRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
+            end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = message.start.toISOString());
+        message.end !== undefined && (obj.end = message.end.toISOString());
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function fromTimestamp(t) {
+    let millis = Number(t.seconds) * 1000;
+    millis += t.nanos / 1000000;
+    return new Date(millis);
+}
+function fromJsonTimestamp(o) {
+    if (o instanceof Date) {
+        return o;
+    }
+    else if (typeof o === "string") {
+        return new Date(o);
+    }
+    else {
+        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
new file mode 100644
index 0000000000000..398193b2075a7
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
@@ -0,0 +1,167 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+function createBaseKindVersion() {
+    return { kind: "", version: "" };
+}
+exports.KindVersion = {
+    fromJSON(object) {
+        return {
+            kind: isSet(object.kind) ? String(object.kind) : "",
+            version: isSet(object.version) ? String(object.version) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.kind !== undefined && (obj.kind = message.kind);
+        message.version !== undefined && (obj.version = message.version);
+        return obj;
+    },
+};
+function createBaseCheckpoint() {
+    return { envelope: "" };
+}
+exports.Checkpoint = {
+    fromJSON(object) {
+        return { envelope: isSet(object.envelope) ? String(object.envelope) : "" };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.envelope !== undefined && (obj.envelope = message.envelope);
+        return obj;
+    },
+};
+function createBaseInclusionProof() {
+    return { logIndex: "0", rootHash: Buffer.alloc(0), treeSize: "0", hashes: [], checkpoint: undefined };
+}
+exports.InclusionProof = {
+    fromJSON(object) {
+        return {
+            logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
+            rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
+            treeSize: isSet(object.treeSize) ? String(object.treeSize) : "0",
+            hashes: Array.isArray(object?.hashes) ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e))) : [],
+            checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.logIndex !== undefined && (obj.logIndex = message.logIndex);
+        message.rootHash !== undefined &&
+            (obj.rootHash = base64FromBytes(message.rootHash !== undefined ? message.rootHash : Buffer.alloc(0)));
+        message.treeSize !== undefined && (obj.treeSize = message.treeSize);
+        if (message.hashes) {
+            obj.hashes = message.hashes.map((e) => base64FromBytes(e !== undefined ? e : Buffer.alloc(0)));
+        }
+        else {
+            obj.hashes = [];
+        }
+        message.checkpoint !== undefined &&
+            (obj.checkpoint = message.checkpoint ? exports.Checkpoint.toJSON(message.checkpoint) : undefined);
+        return obj;
+    },
+};
+function createBaseInclusionPromise() {
+    return { signedEntryTimestamp: Buffer.alloc(0) };
+}
+exports.InclusionPromise = {
+    fromJSON(object) {
+        return {
+            signedEntryTimestamp: isSet(object.signedEntryTimestamp)
+                ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.signedEntryTimestamp !== undefined &&
+            (obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp !== undefined ? message.signedEntryTimestamp : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseTransparencyLogEntry() {
+    return {
+        logIndex: "0",
+        logId: undefined,
+        kindVersion: undefined,
+        integratedTime: "0",
+        inclusionPromise: undefined,
+        inclusionProof: undefined,
+        canonicalizedBody: Buffer.alloc(0),
+    };
+}
+exports.TransparencyLogEntry = {
+    fromJSON(object) {
+        return {
+            logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
+            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+            kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
+            integratedTime: isSet(object.integratedTime) ? String(object.integratedTime) : "0",
+            inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
+            inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
+            canonicalizedBody: isSet(object.canonicalizedBody)
+                ? Buffer.from(bytesFromBase64(object.canonicalizedBody))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.logIndex !== undefined && (obj.logIndex = message.logIndex);
+        message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
+        message.kindVersion !== undefined &&
+            (obj.kindVersion = message.kindVersion ? exports.KindVersion.toJSON(message.kindVersion) : undefined);
+        message.integratedTime !== undefined && (obj.integratedTime = message.integratedTime);
+        message.inclusionPromise !== undefined &&
+            (obj.inclusionPromise = message.inclusionPromise ? exports.InclusionPromise.toJSON(message.inclusionPromise) : undefined);
+        message.inclusionProof !== undefined &&
+            (obj.inclusionProof = message.inclusionProof ? exports.InclusionProof.toJSON(message.inclusionProof) : undefined);
+        message.canonicalizedBody !== undefined &&
+            (obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody !== undefined ? message.canonicalizedBody : Buffer.alloc(0)));
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
new file mode 100644
index 0000000000000..05e566767cdb2
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
@@ -0,0 +1,103 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+function createBaseTransparencyLogInstance() {
+    return { baseUrl: "", hashAlgorithm: 0, publicKey: undefined, logId: undefined };
+}
+exports.TransparencyLogInstance = {
+    fromJSON(object) {
+        return {
+            baseUrl: isSet(object.baseUrl) ? String(object.baseUrl) : "",
+            hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
+            publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
+            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.baseUrl !== undefined && (obj.baseUrl = message.baseUrl);
+        message.hashAlgorithm !== undefined && (obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm));
+        message.publicKey !== undefined &&
+            (obj.publicKey = message.publicKey ? sigstore_common_1.PublicKey.toJSON(message.publicKey) : undefined);
+        message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
+        return obj;
+    },
+};
+function createBaseCertificateAuthority() {
+    return { subject: undefined, uri: "", certChain: undefined, validFor: undefined };
+}
+exports.CertificateAuthority = {
+    fromJSON(object) {
+        return {
+            subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
+            uri: isSet(object.uri) ? String(object.uri) : "",
+            certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
+            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.subject !== undefined &&
+            (obj.subject = message.subject ? sigstore_common_1.DistinguishedName.toJSON(message.subject) : undefined);
+        message.uri !== undefined && (obj.uri = message.uri);
+        message.certChain !== undefined &&
+            (obj.certChain = message.certChain ? sigstore_common_1.X509CertificateChain.toJSON(message.certChain) : undefined);
+        message.validFor !== undefined &&
+            (obj.validFor = message.validFor ? sigstore_common_1.TimeRange.toJSON(message.validFor) : undefined);
+        return obj;
+    },
+};
+function createBaseTrustedRoot() {
+    return { mediaType: "", tlogs: [], certificateAuthorities: [], ctlogs: [], timestampAuthorities: [] };
+}
+exports.TrustedRoot = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
+            tlogs: Array.isArray(object?.tlogs) ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) : [],
+            certificateAuthorities: Array.isArray(object?.certificateAuthorities)
+                ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+                : [],
+            ctlogs: Array.isArray(object?.ctlogs)
+                ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
+                : [],
+            timestampAuthorities: Array.isArray(object?.timestampAuthorities)
+                ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.mediaType !== undefined && (obj.mediaType = message.mediaType);
+        if (message.tlogs) {
+            obj.tlogs = message.tlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
+        }
+        else {
+            obj.tlogs = [];
+        }
+        if (message.certificateAuthorities) {
+            obj.certificateAuthorities = message.certificateAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
+        }
+        else {
+            obj.certificateAuthorities = [];
+        }
+        if (message.ctlogs) {
+            obj.ctlogs = message.ctlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
+        }
+        else {
+            obj.ctlogs = [];
+        }
+        if (message.timestampAuthorities) {
+            obj.timestampAuthorities = message.timestampAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
+        }
+        else {
+            obj.timestampAuthorities = [];
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
new file mode 100644
index 0000000000000..8a72b89761869
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
@@ -0,0 +1,273 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
+/* eslint-disable */
+const sigstore_bundle_1 = require("./sigstore_bundle");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_trustroot_1 = require("./sigstore_trustroot");
+function createBaseCertificateIdentity() {
+    return { issuer: "", san: undefined, oids: [] };
+}
+exports.CertificateIdentity = {
+    fromJSON(object) {
+        return {
+            issuer: isSet(object.issuer) ? String(object.issuer) : "",
+            san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
+            oids: Array.isArray(object?.oids) ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.issuer !== undefined && (obj.issuer = message.issuer);
+        message.san !== undefined && (obj.san = message.san ? sigstore_common_1.SubjectAlternativeName.toJSON(message.san) : undefined);
+        if (message.oids) {
+            obj.oids = message.oids.map((e) => e ? sigstore_common_1.ObjectIdentifierValuePair.toJSON(e) : undefined);
+        }
+        else {
+            obj.oids = [];
+        }
+        return obj;
+    },
+};
+function createBaseCertificateIdentities() {
+    return { identities: [] };
+}
+exports.CertificateIdentities = {
+    fromJSON(object) {
+        return {
+            identities: Array.isArray(object?.identities)
+                ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.identities) {
+            obj.identities = message.identities.map((e) => e ? exports.CertificateIdentity.toJSON(e) : undefined);
+        }
+        else {
+            obj.identities = [];
+        }
+        return obj;
+    },
+};
+function createBasePublicKeyIdentities() {
+    return { publicKeys: [] };
+}
+exports.PublicKeyIdentities = {
+    fromJSON(object) {
+        return {
+            publicKeys: Array.isArray(object?.publicKeys) ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.publicKeys) {
+            obj.publicKeys = message.publicKeys.map((e) => e ? sigstore_common_1.PublicKey.toJSON(e) : undefined);
+        }
+        else {
+            obj.publicKeys = [];
+        }
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions() {
+    return { signers: undefined, tlogOptions: undefined, ctlogOptions: undefined, tsaOptions: undefined };
+}
+exports.ArtifactVerificationOptions = {
+    fromJSON(object) {
+        return {
+            signers: isSet(object.certificateIdentities)
+                ? {
+                    $case: "certificateIdentities",
+                    certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
+                }
+                : isSet(object.publicKeys)
+                    ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
+                    : undefined,
+            tlogOptions: isSet(object.tlogOptions)
+                ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
+                : undefined,
+            ctlogOptions: isSet(object.ctlogOptions)
+                ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
+                : undefined,
+            tsaOptions: isSet(object.tsaOptions)
+                ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
+                : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.signers?.$case === "certificateIdentities" &&
+            (obj.certificateIdentities = message.signers?.certificateIdentities
+                ? exports.CertificateIdentities.toJSON(message.signers?.certificateIdentities)
+                : undefined);
+        message.signers?.$case === "publicKeys" && (obj.publicKeys = message.signers?.publicKeys
+            ? exports.PublicKeyIdentities.toJSON(message.signers?.publicKeys)
+            : undefined);
+        message.tlogOptions !== undefined && (obj.tlogOptions = message.tlogOptions
+            ? exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions)
+            : undefined);
+        message.ctlogOptions !== undefined && (obj.ctlogOptions = message.ctlogOptions
+            ? exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions)
+            : undefined);
+        message.tsaOptions !== undefined && (obj.tsaOptions = message.tsaOptions
+            ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions)
+            : undefined);
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions_TlogOptions() {
+    return { threshold: 0, performOnlineVerification: false, disable: false };
+}
+exports.ArtifactVerificationOptions_TlogOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+            performOnlineVerification: isSet(object.performOnlineVerification)
+                ? Boolean(object.performOnlineVerification)
+                : false,
+            disable: isSet(object.disable) ? Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+        message.performOnlineVerification !== undefined &&
+            (obj.performOnlineVerification = message.performOnlineVerification);
+        message.disable !== undefined && (obj.disable = message.disable);
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions_CtlogOptions() {
+    return { threshold: 0, detachedSct: false, disable: false };
+}
+exports.ArtifactVerificationOptions_CtlogOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+            detachedSct: isSet(object.detachedSct) ? Boolean(object.detachedSct) : false,
+            disable: isSet(object.disable) ? Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+        message.detachedSct !== undefined && (obj.detachedSct = message.detachedSct);
+        message.disable !== undefined && (obj.disable = message.disable);
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions_TimestampAuthorityOptions() {
+    return { threshold: 0, disable: false };
+}
+exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+        message.disable !== undefined && (obj.disable = message.disable);
+        return obj;
+    },
+};
+function createBaseArtifact() {
+    return { data: undefined };
+}
+exports.Artifact = {
+    fromJSON(object) {
+        return {
+            data: isSet(object.artifactUri)
+                ? { $case: "artifactUri", artifactUri: String(object.artifactUri) }
+                : isSet(object.artifact)
+                    ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.data?.$case === "artifactUri" && (obj.artifactUri = message.data?.artifactUri);
+        message.data?.$case === "artifact" &&
+            (obj.artifact = message.data?.artifact !== undefined ? base64FromBytes(message.data?.artifact) : undefined);
+        return obj;
+    },
+};
+function createBaseInput() {
+    return {
+        artifactTrustRoot: undefined,
+        artifactVerificationOptions: undefined,
+        bundle: undefined,
+        artifact: undefined,
+    };
+}
+exports.Input = {
+    fromJSON(object) {
+        return {
+            artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
+            artifactVerificationOptions: isSet(object.artifactVerificationOptions)
+                ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
+                : undefined,
+            bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
+            artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.artifactTrustRoot !== undefined &&
+            (obj.artifactTrustRoot = message.artifactTrustRoot ? sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot) : undefined);
+        message.artifactVerificationOptions !== undefined &&
+            (obj.artifactVerificationOptions = message.artifactVerificationOptions
+                ? exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions)
+                : undefined);
+        message.bundle !== undefined && (obj.bundle = message.bundle ? sigstore_bundle_1.Bundle.toJSON(message.bundle) : undefined);
+        message.artifact !== undefined && (obj.artifact = message.artifact ? exports.Artifact.toJSON(message.artifact) : undefined);
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/index.js
new file mode 100644
index 0000000000000..eafb768c48fca
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/index.js
@@ -0,0 +1,37 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+__exportStar(require("./__generated__/envelope"), exports);
+__exportStar(require("./__generated__/sigstore_bundle"), exports);
+__exportStar(require("./__generated__/sigstore_common"), exports);
+__exportStar(require("./__generated__/sigstore_rekor"), exports);
+__exportStar(require("./__generated__/sigstore_trustroot"), exports);
+__exportStar(require("./__generated__/sigstore_verification"), exports);
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/package.json
new file mode 100644
index 0000000000000..450abb157f31a
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/package.json
@@ -0,0 +1,31 @@
+{
+  "name": "@sigstore/protobuf-specs",
+  "version": "0.2.1",
+  "description": "code-signing for npm packages",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "scripts": {
+    "build": "tsc"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sigstore/protobuf-specs.git"
+  },
+  "files": [
+    "dist"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "Apache-2.0",
+  "bugs": {
+    "url": "https://github.com/sigstore/protobuf-specs/issues"
+  },
+  "homepage": "https://github.com/sigstore/protobuf-specs#readme",
+  "devDependencies": {
+    "@tsconfig/node14": "^1.0.3",
+    "@types/node": "^18.14.0",
+    "typescript": "^4.9.5"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  }
+}
diff --git a/node_modules/@sigstore/sign/package.json b/node_modules/@sigstore/sign/package.json
new file mode 100644
index 0000000000000..732c94f9fcd49
--- /dev/null
+++ b/node_modules/@sigstore/sign/package.json
@@ -0,0 +1,42 @@
+{
+  "name": "@sigstore/sign",
+  "version": "2.0.0",
+  "description": "Sigstore signing library",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "scripts": {
+    "clean": "shx rm -rf dist *.tsbuildinfo",
+    "build": "tsc --build",
+    "test": "jest"
+  },
+  "files": [
+    "dist"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "Apache-2.0",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sigstore/sigstore-js.git"
+  },
+  "bugs": {
+    "url": "https://github.com/sigstore/sigstore-js/issues"
+  },
+  "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/sign#readme",
+  "publishConfig": {
+    "provenance": true
+  },
+  "devDependencies": {
+    "@sigstore/jest": "^0.0.0",
+    "@sigstore/mock": "^0.3.0",
+    "@sigstore/rekor-types": "^2.0.0",
+    "@types/make-fetch-happen": "^10.0.0"
+  },
+  "dependencies": {
+    "@sigstore/bundle": "^2.0.0",
+    "@sigstore/protobuf-specs": "^0.2.1",
+    "make-fetch-happen": "^13.0.0"
+  },
+  "engines": {
+    "node": "^16.14.0 || >=18.0.0"
+  }
+}
diff --git a/node_modules/@sigstore/tuf/dist/client.js b/node_modules/@sigstore/tuf/dist/client.js
index 08d6b61840909..797346d39e620 100644
--- a/node_modules/@sigstore/tuf/dist/client.js
+++ b/node_modules/@sigstore/tuf/dist/client.js
@@ -76,21 +76,8 @@ function initClient(cachePath, remote, options) {
     const baseURL = remote.mirror;
     const config = {
         fetchTimeout: options.timeout,
+        fetchRetry: options.retry,
     };
-    // tuf-js only supports a number for fetchRetries so we have to
-    // convert the boolean and object options to a number.
-    /* istanbul ignore if */
-    if (typeof options.retry !== 'undefined') {
-        if (typeof options.retry === 'number') {
-            config.fetchRetries = options.retry;
-        }
-        else if (typeof options.retry === 'object') {
-            config.fetchRetries = options.retry.retries;
-        }
-        else if (options.retry === true) {
-            config.fetchRetries = 1;
-        }
-    }
     return new tuf_js_1.Updater({
         metadataBaseUrl: baseURL,
         targetBaseUrl: `${baseURL}/targets`,
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE
new file mode 100644
index 0000000000000..e9e7c1679a09d
--- /dev/null
+++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright 2023 The Sigstore Authors
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
new file mode 100644
index 0000000000000..0c367a8384454
--- /dev/null
+++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
@@ -0,0 +1,89 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = exports.Envelope = void 0;
+function createBaseEnvelope() {
+    return { payload: Buffer.alloc(0), payloadType: "", signatures: [] };
+}
+exports.Envelope = {
+    fromJSON(object) {
+        return {
+            payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
+            payloadType: isSet(object.payloadType) ? String(object.payloadType) : "",
+            signatures: Array.isArray(object?.signatures) ? object.signatures.map((e) => exports.Signature.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.payload !== undefined &&
+            (obj.payload = base64FromBytes(message.payload !== undefined ? message.payload : Buffer.alloc(0)));
+        message.payloadType !== undefined && (obj.payloadType = message.payloadType);
+        if (message.signatures) {
+            obj.signatures = message.signatures.map((e) => e ? exports.Signature.toJSON(e) : undefined);
+        }
+        else {
+            obj.signatures = [];
+        }
+        return obj;
+    },
+};
+function createBaseSignature() {
+    return { sig: Buffer.alloc(0), keyid: "" };
+}
+exports.Signature = {
+    fromJSON(object) {
+        return {
+            sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
+            keyid: isSet(object.keyid) ? String(object.keyid) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.sig !== undefined && (obj.sig = base64FromBytes(message.sig !== undefined ? message.sig : Buffer.alloc(0)));
+        message.keyid !== undefined && (obj.keyid = message.keyid);
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
new file mode 100644
index 0000000000000..073093b8371a8
--- /dev/null
+++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
@@ -0,0 +1,185 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
+/* eslint-disable */
+const any_1 = require("./google/protobuf/any");
+const timestamp_1 = require("./google/protobuf/timestamp");
+function createBaseCloudEvent() {
+    return { id: "", source: "", specVersion: "", type: "", attributes: {}, data: undefined };
+}
+exports.CloudEvent = {
+    fromJSON(object) {
+        return {
+            id: isSet(object.id) ? String(object.id) : "",
+            source: isSet(object.source) ? String(object.source) : "",
+            specVersion: isSet(object.specVersion) ? String(object.specVersion) : "",
+            type: isSet(object.type) ? String(object.type) : "",
+            attributes: isObject(object.attributes)
+                ? Object.entries(object.attributes).reduce((acc, [key, value]) => {
+                    acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
+                    return acc;
+                }, {})
+                : {},
+            data: isSet(object.binaryData)
+                ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
+                : isSet(object.textData)
+                    ? { $case: "textData", textData: String(object.textData) }
+                    : isSet(object.protoData)
+                        ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
+                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.id !== undefined && (obj.id = message.id);
+        message.source !== undefined && (obj.source = message.source);
+        message.specVersion !== undefined && (obj.specVersion = message.specVersion);
+        message.type !== undefined && (obj.type = message.type);
+        obj.attributes = {};
+        if (message.attributes) {
+            Object.entries(message.attributes).forEach(([k, v]) => {
+                obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
+            });
+        }
+        message.data?.$case === "binaryData" &&
+            (obj.binaryData = message.data?.binaryData !== undefined ? base64FromBytes(message.data?.binaryData) : undefined);
+        message.data?.$case === "textData" && (obj.textData = message.data?.textData);
+        message.data?.$case === "protoData" &&
+            (obj.protoData = message.data?.protoData ? any_1.Any.toJSON(message.data?.protoData) : undefined);
+        return obj;
+    },
+};
+function createBaseCloudEvent_AttributesEntry() {
+    return { key: "", value: undefined };
+}
+exports.CloudEvent_AttributesEntry = {
+    fromJSON(object) {
+        return {
+            key: isSet(object.key) ? String(object.key) : "",
+            value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.key !== undefined && (obj.key = message.key);
+        message.value !== undefined &&
+            (obj.value = message.value ? exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value) : undefined);
+        return obj;
+    },
+};
+function createBaseCloudEvent_CloudEventAttributeValue() {
+    return { attr: undefined };
+}
+exports.CloudEvent_CloudEventAttributeValue = {
+    fromJSON(object) {
+        return {
+            attr: isSet(object.ceBoolean)
+                ? { $case: "ceBoolean", ceBoolean: Boolean(object.ceBoolean) }
+                : isSet(object.ceInteger)
+                    ? { $case: "ceInteger", ceInteger: Number(object.ceInteger) }
+                    : isSet(object.ceString)
+                        ? { $case: "ceString", ceString: String(object.ceString) }
+                        : isSet(object.ceBytes)
+                            ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
+                            : isSet(object.ceUri)
+                                ? { $case: "ceUri", ceUri: String(object.ceUri) }
+                                : isSet(object.ceUriRef)
+                                    ? { $case: "ceUriRef", ceUriRef: String(object.ceUriRef) }
+                                    : isSet(object.ceTimestamp)
+                                        ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
+                                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.attr?.$case === "ceBoolean" && (obj.ceBoolean = message.attr?.ceBoolean);
+        message.attr?.$case === "ceInteger" && (obj.ceInteger = Math.round(message.attr?.ceInteger));
+        message.attr?.$case === "ceString" && (obj.ceString = message.attr?.ceString);
+        message.attr?.$case === "ceBytes" &&
+            (obj.ceBytes = message.attr?.ceBytes !== undefined ? base64FromBytes(message.attr?.ceBytes) : undefined);
+        message.attr?.$case === "ceUri" && (obj.ceUri = message.attr?.ceUri);
+        message.attr?.$case === "ceUriRef" && (obj.ceUriRef = message.attr?.ceUriRef);
+        message.attr?.$case === "ceTimestamp" && (obj.ceTimestamp = message.attr?.ceTimestamp.toISOString());
+        return obj;
+    },
+};
+function createBaseCloudEventBatch() {
+    return { events: [] };
+}
+exports.CloudEventBatch = {
+    fromJSON(object) {
+        return { events: Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [] };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.events) {
+            obj.events = message.events.map((e) => e ? exports.CloudEvent.toJSON(e) : undefined);
+        }
+        else {
+            obj.events = [];
+        }
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function fromTimestamp(t) {
+    let millis = Number(t.seconds) * 1000;
+    millis += t.nanos / 1000000;
+    return new Date(millis);
+}
+function fromJsonTimestamp(o) {
+    if (o instanceof Date) {
+        return o;
+    }
+    else if (typeof o === "string") {
+        return new Date(o);
+    }
+    else {
+        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+    }
+}
+function isObject(value) {
+    return typeof value === "object" && value !== null;
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
new file mode 100644
index 0000000000000..da627499ad765
--- /dev/null
+++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
@@ -0,0 +1,119 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.fieldBehaviorToJSON = exports.fieldBehaviorFromJSON = exports.FieldBehavior = void 0;
+/**
+ * An indicator of the behavior of a given field (for example, that a field
+ * is required in requests, or given as output but ignored as input).
+ * This **does not** change the behavior in protocol buffers itself; it only
+ * denotes the behavior and may affect how API tooling handles the field.
+ *
+ * Note: This enum **may** receive new values in the future.
+ */
+var FieldBehavior;
+(function (FieldBehavior) {
+    /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
+    FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
+    /**
+     * OPTIONAL - Specifically denotes a field as optional.
+     * While all fields in protocol buffers are optional, this may be specified
+     * for emphasis if appropriate.
+     */
+    FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
+    /**
+     * REQUIRED - Denotes a field as required.
+     * This indicates that the field **must** be provided as part of the request,
+     * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
+     */
+    FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
+    /**
+     * OUTPUT_ONLY - Denotes a field as output only.
+     * This indicates that the field is provided in responses, but including the
+     * field in a request does nothing (the server *must* ignore it and
+     * *must not* throw an error as a result of the field's presence).
+     */
+    FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
+    /**
+     * INPUT_ONLY - Denotes a field as input only.
+     * This indicates that the field is provided in requests, and the
+     * corresponding field is not included in output.
+     */
+    FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
+    /**
+     * IMMUTABLE - Denotes a field as immutable.
+     * This indicates that the field may be set once in a request to create a
+     * resource, but may not be changed thereafter.
+     */
+    FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
+    /**
+     * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
+     * This indicates that the service may provide the elements of the list
+     * in any arbitrary order, rather than the order the user originally
+     * provided. Additionally, the list's order may or may not be stable.
+     */
+    FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
+})(FieldBehavior = exports.FieldBehavior || (exports.FieldBehavior = {}));
+function fieldBehaviorFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "FIELD_BEHAVIOR_UNSPECIFIED":
+            return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
+        case 1:
+        case "OPTIONAL":
+            return FieldBehavior.OPTIONAL;
+        case 2:
+        case "REQUIRED":
+            return FieldBehavior.REQUIRED;
+        case 3:
+        case "OUTPUT_ONLY":
+            return FieldBehavior.OUTPUT_ONLY;
+        case 4:
+        case "INPUT_ONLY":
+            return FieldBehavior.INPUT_ONLY;
+        case 5:
+        case "IMMUTABLE":
+            return FieldBehavior.IMMUTABLE;
+        case 6:
+        case "UNORDERED_LIST":
+            return FieldBehavior.UNORDERED_LIST;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+    }
+}
+exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
+function fieldBehaviorToJSON(object) {
+    switch (object) {
+        case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
+            return "FIELD_BEHAVIOR_UNSPECIFIED";
+        case FieldBehavior.OPTIONAL:
+            return "OPTIONAL";
+        case FieldBehavior.REQUIRED:
+            return "REQUIRED";
+        case FieldBehavior.OUTPUT_ONLY:
+            return "OUTPUT_ONLY";
+        case FieldBehavior.INPUT_ONLY:
+            return "INPUT_ONLY";
+        case FieldBehavior.IMMUTABLE:
+            return "IMMUTABLE";
+        case FieldBehavior.UNORDERED_LIST:
+            return "UNORDERED_LIST";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+    }
+}
+exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
new file mode 100644
index 0000000000000..6b3f3c97a6647
--- /dev/null
+++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
@@ -0,0 +1,65 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Any = void 0;
+function createBaseAny() {
+    return { typeUrl: "", value: Buffer.alloc(0) };
+}
+exports.Any = {
+    fromJSON(object) {
+        return {
+            typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "",
+            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl);
+        message.value !== undefined &&
+            (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
new file mode 100644
index 0000000000000..d429aac846043
--- /dev/null
+++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
@@ -0,0 +1,1308 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.GeneratedCodeInfo_Annotation = exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.methodOptions_IdempotencyLevelToJSON = exports.methodOptions_IdempotencyLevelFromJSON = exports.MethodOptions_IdempotencyLevel = exports.fieldOptions_JSTypeToJSON = exports.fieldOptions_JSTypeFromJSON = exports.FieldOptions_JSType = exports.fieldOptions_CTypeToJSON = exports.fieldOptions_CTypeFromJSON = exports.FieldOptions_CType = exports.fileOptions_OptimizeModeToJSON = exports.fileOptions_OptimizeModeFromJSON = exports.FileOptions_OptimizeMode = exports.fieldDescriptorProto_LabelToJSON = exports.fieldDescriptorProto_LabelFromJSON = exports.FieldDescriptorProto_Label = exports.fieldDescriptorProto_TypeToJSON = exports.fieldDescriptorProto_TypeFromJSON = exports.FieldDescriptorProto_Type = void 0;
+var FieldDescriptorProto_Type;
+(function (FieldDescriptorProto_Type) {
+    /**
+     * TYPE_DOUBLE - 0 is reserved for errors.
+     * Order is weird for historical reasons.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT";
+    /**
+     * TYPE_INT64 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
+     * negative values are likely.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64";
+    /**
+     * TYPE_INT32 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
+     * negative values are likely.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
+    /**
+     * TYPE_GROUP - Tag-delimited aggregate.
+     * Group type is deprecated and not supported in proto3. However, Proto3
+     * implementations should still be able to parse the group wire format and
+     * treat group fields as unknown fields.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
+    /** TYPE_MESSAGE - Length-delimited aggregate. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE";
+    /** TYPE_BYTES - New in version 2. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64";
+    /** TYPE_SINT32 - Uses ZigZag encoding. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
+    /** TYPE_SINT64 - Uses ZigZag encoding. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
+})(FieldDescriptorProto_Type = exports.FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = {}));
+function fieldDescriptorProto_TypeFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "TYPE_DOUBLE":
+            return FieldDescriptorProto_Type.TYPE_DOUBLE;
+        case 2:
+        case "TYPE_FLOAT":
+            return FieldDescriptorProto_Type.TYPE_FLOAT;
+        case 3:
+        case "TYPE_INT64":
+            return FieldDescriptorProto_Type.TYPE_INT64;
+        case 4:
+        case "TYPE_UINT64":
+            return FieldDescriptorProto_Type.TYPE_UINT64;
+        case 5:
+        case "TYPE_INT32":
+            return FieldDescriptorProto_Type.TYPE_INT32;
+        case 6:
+        case "TYPE_FIXED64":
+            return FieldDescriptorProto_Type.TYPE_FIXED64;
+        case 7:
+        case "TYPE_FIXED32":
+            return FieldDescriptorProto_Type.TYPE_FIXED32;
+        case 8:
+        case "TYPE_BOOL":
+            return FieldDescriptorProto_Type.TYPE_BOOL;
+        case 9:
+        case "TYPE_STRING":
+            return FieldDescriptorProto_Type.TYPE_STRING;
+        case 10:
+        case "TYPE_GROUP":
+            return FieldDescriptorProto_Type.TYPE_GROUP;
+        case 11:
+        case "TYPE_MESSAGE":
+            return FieldDescriptorProto_Type.TYPE_MESSAGE;
+        case 12:
+        case "TYPE_BYTES":
+            return FieldDescriptorProto_Type.TYPE_BYTES;
+        case 13:
+        case "TYPE_UINT32":
+            return FieldDescriptorProto_Type.TYPE_UINT32;
+        case 14:
+        case "TYPE_ENUM":
+            return FieldDescriptorProto_Type.TYPE_ENUM;
+        case 15:
+        case "TYPE_SFIXED32":
+            return FieldDescriptorProto_Type.TYPE_SFIXED32;
+        case 16:
+        case "TYPE_SFIXED64":
+            return FieldDescriptorProto_Type.TYPE_SFIXED64;
+        case 17:
+        case "TYPE_SINT32":
+            return FieldDescriptorProto_Type.TYPE_SINT32;
+        case 18:
+        case "TYPE_SINT64":
+            return FieldDescriptorProto_Type.TYPE_SINT64;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+    }
+}
+exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
+function fieldDescriptorProto_TypeToJSON(object) {
+    switch (object) {
+        case FieldDescriptorProto_Type.TYPE_DOUBLE:
+            return "TYPE_DOUBLE";
+        case FieldDescriptorProto_Type.TYPE_FLOAT:
+            return "TYPE_FLOAT";
+        case FieldDescriptorProto_Type.TYPE_INT64:
+            return "TYPE_INT64";
+        case FieldDescriptorProto_Type.TYPE_UINT64:
+            return "TYPE_UINT64";
+        case FieldDescriptorProto_Type.TYPE_INT32:
+            return "TYPE_INT32";
+        case FieldDescriptorProto_Type.TYPE_FIXED64:
+            return "TYPE_FIXED64";
+        case FieldDescriptorProto_Type.TYPE_FIXED32:
+            return "TYPE_FIXED32";
+        case FieldDescriptorProto_Type.TYPE_BOOL:
+            return "TYPE_BOOL";
+        case FieldDescriptorProto_Type.TYPE_STRING:
+            return "TYPE_STRING";
+        case FieldDescriptorProto_Type.TYPE_GROUP:
+            return "TYPE_GROUP";
+        case FieldDescriptorProto_Type.TYPE_MESSAGE:
+            return "TYPE_MESSAGE";
+        case FieldDescriptorProto_Type.TYPE_BYTES:
+            return "TYPE_BYTES";
+        case FieldDescriptorProto_Type.TYPE_UINT32:
+            return "TYPE_UINT32";
+        case FieldDescriptorProto_Type.TYPE_ENUM:
+            return "TYPE_ENUM";
+        case FieldDescriptorProto_Type.TYPE_SFIXED32:
+            return "TYPE_SFIXED32";
+        case FieldDescriptorProto_Type.TYPE_SFIXED64:
+            return "TYPE_SFIXED64";
+        case FieldDescriptorProto_Type.TYPE_SINT32:
+            return "TYPE_SINT32";
+        case FieldDescriptorProto_Type.TYPE_SINT64:
+            return "TYPE_SINT64";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+    }
+}
+exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
+var FieldDescriptorProto_Label;
+(function (FieldDescriptorProto_Label) {
+    /** LABEL_OPTIONAL - 0 is reserved for errors */
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
+})(FieldDescriptorProto_Label = exports.FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = {}));
+function fieldDescriptorProto_LabelFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "LABEL_OPTIONAL":
+            return FieldDescriptorProto_Label.LABEL_OPTIONAL;
+        case 2:
+        case "LABEL_REQUIRED":
+            return FieldDescriptorProto_Label.LABEL_REQUIRED;
+        case 3:
+        case "LABEL_REPEATED":
+            return FieldDescriptorProto_Label.LABEL_REPEATED;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+    }
+}
+exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
+function fieldDescriptorProto_LabelToJSON(object) {
+    switch (object) {
+        case FieldDescriptorProto_Label.LABEL_OPTIONAL:
+            return "LABEL_OPTIONAL";
+        case FieldDescriptorProto_Label.LABEL_REQUIRED:
+            return "LABEL_REQUIRED";
+        case FieldDescriptorProto_Label.LABEL_REPEATED:
+            return "LABEL_REPEATED";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+    }
+}
+exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
+/** Generated classes can be optimized for speed or code size. */
+var FileOptions_OptimizeMode;
+(function (FileOptions_OptimizeMode) {
+    /** SPEED - Generate complete code for parsing, serialization, */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED";
+    /** CODE_SIZE - etc. */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
+    /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
+})(FileOptions_OptimizeMode = exports.FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = {}));
+function fileOptions_OptimizeModeFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "SPEED":
+            return FileOptions_OptimizeMode.SPEED;
+        case 2:
+        case "CODE_SIZE":
+            return FileOptions_OptimizeMode.CODE_SIZE;
+        case 3:
+        case "LITE_RUNTIME":
+            return FileOptions_OptimizeMode.LITE_RUNTIME;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+    }
+}
+exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
+function fileOptions_OptimizeModeToJSON(object) {
+    switch (object) {
+        case FileOptions_OptimizeMode.SPEED:
+            return "SPEED";
+        case FileOptions_OptimizeMode.CODE_SIZE:
+            return "CODE_SIZE";
+        case FileOptions_OptimizeMode.LITE_RUNTIME:
+            return "LITE_RUNTIME";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+    }
+}
+exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
+var FieldOptions_CType;
+(function (FieldOptions_CType) {
+    /** STRING - Default mode. */
+    FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
+    FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
+    FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
+})(FieldOptions_CType = exports.FieldOptions_CType || (exports.FieldOptions_CType = {}));
+function fieldOptions_CTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "STRING":
+            return FieldOptions_CType.STRING;
+        case 1:
+        case "CORD":
+            return FieldOptions_CType.CORD;
+        case 2:
+        case "STRING_PIECE":
+            return FieldOptions_CType.STRING_PIECE;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+    }
+}
+exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
+function fieldOptions_CTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_CType.STRING:
+            return "STRING";
+        case FieldOptions_CType.CORD:
+            return "CORD";
+        case FieldOptions_CType.STRING_PIECE:
+            return "STRING_PIECE";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+    }
+}
+exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
+var FieldOptions_JSType;
+(function (FieldOptions_JSType) {
+    /** JS_NORMAL - Use the default type. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL";
+    /** JS_STRING - Use JavaScript strings. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
+    /** JS_NUMBER - Use JavaScript numbers. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
+})(FieldOptions_JSType = exports.FieldOptions_JSType || (exports.FieldOptions_JSType = {}));
+function fieldOptions_JSTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "JS_NORMAL":
+            return FieldOptions_JSType.JS_NORMAL;
+        case 1:
+        case "JS_STRING":
+            return FieldOptions_JSType.JS_STRING;
+        case 2:
+        case "JS_NUMBER":
+            return FieldOptions_JSType.JS_NUMBER;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+    }
+}
+exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
+function fieldOptions_JSTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_JSType.JS_NORMAL:
+            return "JS_NORMAL";
+        case FieldOptions_JSType.JS_STRING:
+            return "JS_STRING";
+        case FieldOptions_JSType.JS_NUMBER:
+            return "JS_NUMBER";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+    }
+}
+exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
+/**
+ * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
+ * or neither? HTTP based RPC implementation may choose GET verb for safe
+ * methods, and PUT verb for idempotent methods instead of the default POST.
+ */
+var MethodOptions_IdempotencyLevel;
+(function (MethodOptions_IdempotencyLevel) {
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN";
+    /** NO_SIDE_EFFECTS - implies idempotent */
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
+    /** IDEMPOTENT - idempotent, but may have side effects */
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
+})(MethodOptions_IdempotencyLevel = exports.MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = {}));
+function methodOptions_IdempotencyLevelFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "IDEMPOTENCY_UNKNOWN":
+            return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN;
+        case 1:
+        case "NO_SIDE_EFFECTS":
+            return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS;
+        case 2:
+        case "IDEMPOTENT":
+            return MethodOptions_IdempotencyLevel.IDEMPOTENT;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+    }
+}
+exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
+function methodOptions_IdempotencyLevelToJSON(object) {
+    switch (object) {
+        case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
+            return "IDEMPOTENCY_UNKNOWN";
+        case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS:
+            return "NO_SIDE_EFFECTS";
+        case MethodOptions_IdempotencyLevel.IDEMPOTENT:
+            return "IDEMPOTENT";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+    }
+}
+exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
+function createBaseFileDescriptorSet() {
+    return { file: [] };
+}
+exports.FileDescriptorSet = {
+    fromJSON(object) {
+        return { file: Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [] };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.file) {
+            obj.file = message.file.map((e) => e ? exports.FileDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.file = [];
+        }
+        return obj;
+    },
+};
+function createBaseFileDescriptorProto() {
+    return {
+        name: "",
+        package: "",
+        dependency: [],
+        publicDependency: [],
+        weakDependency: [],
+        messageType: [],
+        enumType: [],
+        service: [],
+        extension: [],
+        options: undefined,
+        sourceCodeInfo: undefined,
+        syntax: "",
+    };
+}
+exports.FileDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            package: isSet(object.package) ? String(object.package) : "",
+            dependency: Array.isArray(object?.dependency) ? object.dependency.map((e) => String(e)) : [],
+            publicDependency: Array.isArray(object?.publicDependency)
+                ? object.publicDependency.map((e) => Number(e))
+                : [],
+            weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e) => Number(e)) : [],
+            messageType: Array.isArray(object?.messageType)
+                ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
+                : [],
+            enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
+            service: Array.isArray(object?.service) ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e)) : [],
+            extension: Array.isArray(object?.extension)
+                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
+            sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
+            syntax: isSet(object.syntax) ? String(object.syntax) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.package !== undefined && (obj.package = message.package);
+        if (message.dependency) {
+            obj.dependency = message.dependency.map((e) => e);
+        }
+        else {
+            obj.dependency = [];
+        }
+        if (message.publicDependency) {
+            obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
+        }
+        else {
+            obj.publicDependency = [];
+        }
+        if (message.weakDependency) {
+            obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
+        }
+        else {
+            obj.weakDependency = [];
+        }
+        if (message.messageType) {
+            obj.messageType = message.messageType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.messageType = [];
+        }
+        if (message.enumType) {
+            obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.enumType = [];
+        }
+        if (message.service) {
+            obj.service = message.service.map((e) => e ? exports.ServiceDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.service = [];
+        }
+        if (message.extension) {
+            obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.extension = [];
+        }
+        message.options !== undefined && (obj.options = message.options ? exports.FileOptions.toJSON(message.options) : undefined);
+        message.sourceCodeInfo !== undefined &&
+            (obj.sourceCodeInfo = message.sourceCodeInfo ? exports.SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined);
+        message.syntax !== undefined && (obj.syntax = message.syntax);
+        return obj;
+    },
+};
+function createBaseDescriptorProto() {
+    return {
+        name: "",
+        field: [],
+        extension: [],
+        nestedType: [],
+        enumType: [],
+        extensionRange: [],
+        oneofDecl: [],
+        options: undefined,
+        reservedRange: [],
+        reservedName: [],
+    };
+}
+exports.DescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            field: Array.isArray(object?.field) ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e)) : [],
+            extension: Array.isArray(object?.extension)
+                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            nestedType: Array.isArray(object?.nestedType)
+                ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
+                : [],
+            enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
+            extensionRange: Array.isArray(object?.extensionRange)
+                ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
+                : [],
+            oneofDecl: Array.isArray(object?.oneofDecl)
+                ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
+            reservedRange: Array.isArray(object?.reservedRange)
+                ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
+                : [],
+            reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e) => String(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        if (message.field) {
+            obj.field = message.field.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.field = [];
+        }
+        if (message.extension) {
+            obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.extension = [];
+        }
+        if (message.nestedType) {
+            obj.nestedType = message.nestedType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.nestedType = [];
+        }
+        if (message.enumType) {
+            obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.enumType = [];
+        }
+        if (message.extensionRange) {
+            obj.extensionRange = message.extensionRange.map((e) => e ? exports.DescriptorProto_ExtensionRange.toJSON(e) : undefined);
+        }
+        else {
+            obj.extensionRange = [];
+        }
+        if (message.oneofDecl) {
+            obj.oneofDecl = message.oneofDecl.map((e) => e ? exports.OneofDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.oneofDecl = [];
+        }
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.MessageOptions.toJSON(message.options) : undefined);
+        if (message.reservedRange) {
+            obj.reservedRange = message.reservedRange.map((e) => e ? exports.DescriptorProto_ReservedRange.toJSON(e) : undefined);
+        }
+        else {
+            obj.reservedRange = [];
+        }
+        if (message.reservedName) {
+            obj.reservedName = message.reservedName.map((e) => e);
+        }
+        else {
+            obj.reservedName = [];
+        }
+        return obj;
+    },
+};
+function createBaseDescriptorProto_ExtensionRange() {
+    return { start: 0, end: 0, options: undefined };
+}
+exports.DescriptorProto_ExtensionRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? Number(object.start) : 0,
+            end: isSet(object.end) ? Number(object.end) : 0,
+            options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = Math.round(message.start));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.ExtensionRangeOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseDescriptorProto_ReservedRange() {
+    return { start: 0, end: 0 };
+}
+exports.DescriptorProto_ReservedRange = {
+    fromJSON(object) {
+        return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = Math.round(message.start));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        return obj;
+    },
+};
+function createBaseExtensionRangeOptions() {
+    return { uninterpretedOption: [] };
+}
+exports.ExtensionRangeOptions = {
+    fromJSON(object) {
+        return {
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseFieldDescriptorProto() {
+    return {
+        name: "",
+        number: 0,
+        label: 1,
+        type: 1,
+        typeName: "",
+        extendee: "",
+        defaultValue: "",
+        oneofIndex: 0,
+        jsonName: "",
+        options: undefined,
+        proto3Optional: false,
+    };
+}
+exports.FieldDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            number: isSet(object.number) ? Number(object.number) : 0,
+            label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
+            type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
+            typeName: isSet(object.typeName) ? String(object.typeName) : "",
+            extendee: isSet(object.extendee) ? String(object.extendee) : "",
+            defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "",
+            oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0,
+            jsonName: isSet(object.jsonName) ? String(object.jsonName) : "",
+            options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
+            proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.number !== undefined && (obj.number = Math.round(message.number));
+        message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label));
+        message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type));
+        message.typeName !== undefined && (obj.typeName = message.typeName);
+        message.extendee !== undefined && (obj.extendee = message.extendee);
+        message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue);
+        message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex));
+        message.jsonName !== undefined && (obj.jsonName = message.jsonName);
+        message.options !== undefined && (obj.options = message.options ? exports.FieldOptions.toJSON(message.options) : undefined);
+        message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional);
+        return obj;
+    },
+};
+function createBaseOneofDescriptorProto() {
+    return { name: "", options: undefined };
+}
+exports.OneofDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.options !== undefined && (obj.options = message.options ? exports.OneofOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseEnumDescriptorProto() {
+    return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] };
+}
+exports.EnumDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            value: Array.isArray(object?.value) ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e)) : [],
+            options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
+            reservedRange: Array.isArray(object?.reservedRange)
+                ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
+                : [],
+            reservedName: Array.isArray(object?.reservedName)
+                ? object.reservedName.map((e) => String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        if (message.value) {
+            obj.value = message.value.map((e) => e ? exports.EnumValueDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.value = [];
+        }
+        message.options !== undefined && (obj.options = message.options ? exports.EnumOptions.toJSON(message.options) : undefined);
+        if (message.reservedRange) {
+            obj.reservedRange = message.reservedRange.map((e) => e ? exports.EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined);
+        }
+        else {
+            obj.reservedRange = [];
+        }
+        if (message.reservedName) {
+            obj.reservedName = message.reservedName.map((e) => e);
+        }
+        else {
+            obj.reservedName = [];
+        }
+        return obj;
+    },
+};
+function createBaseEnumDescriptorProto_EnumReservedRange() {
+    return { start: 0, end: 0 };
+}
+exports.EnumDescriptorProto_EnumReservedRange = {
+    fromJSON(object) {
+        return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = Math.round(message.start));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        return obj;
+    },
+};
+function createBaseEnumValueDescriptorProto() {
+    return { name: "", number: 0, options: undefined };
+}
+exports.EnumValueDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            number: isSet(object.number) ? Number(object.number) : 0,
+            options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.number !== undefined && (obj.number = Math.round(message.number));
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.EnumValueOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseServiceDescriptorProto() {
+    return { name: "", method: [], options: undefined };
+}
+exports.ServiceDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            method: Array.isArray(object?.method) ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e)) : [],
+            options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        if (message.method) {
+            obj.method = message.method.map((e) => e ? exports.MethodDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.method = [];
+        }
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.ServiceOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseMethodDescriptorProto() {
+    return {
+        name: "",
+        inputType: "",
+        outputType: "",
+        options: undefined,
+        clientStreaming: false,
+        serverStreaming: false,
+    };
+}
+exports.MethodDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            inputType: isSet(object.inputType) ? String(object.inputType) : "",
+            outputType: isSet(object.outputType) ? String(object.outputType) : "",
+            options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
+            clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false,
+            serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.inputType !== undefined && (obj.inputType = message.inputType);
+        message.outputType !== undefined && (obj.outputType = message.outputType);
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.MethodOptions.toJSON(message.options) : undefined);
+        message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming);
+        message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming);
+        return obj;
+    },
+};
+function createBaseFileOptions() {
+    return {
+        javaPackage: "",
+        javaOuterClassname: "",
+        javaMultipleFiles: false,
+        javaGenerateEqualsAndHash: false,
+        javaStringCheckUtf8: false,
+        optimizeFor: 1,
+        goPackage: "",
+        ccGenericServices: false,
+        javaGenericServices: false,
+        pyGenericServices: false,
+        phpGenericServices: false,
+        deprecated: false,
+        ccEnableArenas: false,
+        objcClassPrefix: "",
+        csharpNamespace: "",
+        swiftPrefix: "",
+        phpClassPrefix: "",
+        phpNamespace: "",
+        phpMetadataNamespace: "",
+        rubyPackage: "",
+        uninterpretedOption: [],
+    };
+}
+exports.FileOptions = {
+    fromJSON(object) {
+        return {
+            javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "",
+            javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "",
+            javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false,
+            javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
+                ? Boolean(object.javaGenerateEqualsAndHash)
+                : false,
+            javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false,
+            optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
+            goPackage: isSet(object.goPackage) ? String(object.goPackage) : "",
+            ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false,
+            javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false,
+            pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false,
+            phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false,
+            objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "",
+            csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "",
+            swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "",
+            phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "",
+            phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "",
+            phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "",
+            rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "",
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage);
+        message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname);
+        message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles);
+        message.javaGenerateEqualsAndHash !== undefined &&
+            (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash);
+        message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8);
+        message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor));
+        message.goPackage !== undefined && (obj.goPackage = message.goPackage);
+        message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices);
+        message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices);
+        message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices);
+        message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas);
+        message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix);
+        message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace);
+        message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix);
+        message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix);
+        message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace);
+        message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace);
+        message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseMessageOptions() {
+    return {
+        messageSetWireFormat: false,
+        noStandardDescriptorAccessor: false,
+        deprecated: false,
+        mapEntry: false,
+        uninterpretedOption: [],
+    };
+}
+exports.MessageOptions = {
+    fromJSON(object) {
+        return {
+            messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false,
+            noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
+                ? Boolean(object.noStandardDescriptorAccessor)
+                : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat);
+        message.noStandardDescriptorAccessor !== undefined &&
+            (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseFieldOptions() {
+    return {
+        ctype: 0,
+        packed: false,
+        jstype: 0,
+        lazy: false,
+        unverifiedLazy: false,
+        deprecated: false,
+        weak: false,
+        uninterpretedOption: [],
+    };
+}
+exports.FieldOptions = {
+    fromJSON(object) {
+        return {
+            ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
+            packed: isSet(object.packed) ? Boolean(object.packed) : false,
+            jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
+            lazy: isSet(object.lazy) ? Boolean(object.lazy) : false,
+            unverifiedLazy: isSet(object.unverifiedLazy) ? Boolean(object.unverifiedLazy) : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            weak: isSet(object.weak) ? Boolean(object.weak) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype));
+        message.packed !== undefined && (obj.packed = message.packed);
+        message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype));
+        message.lazy !== undefined && (obj.lazy = message.lazy);
+        message.unverifiedLazy !== undefined && (obj.unverifiedLazy = message.unverifiedLazy);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.weak !== undefined && (obj.weak = message.weak);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseOneofOptions() {
+    return { uninterpretedOption: [] };
+}
+exports.OneofOptions = {
+    fromJSON(object) {
+        return {
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseEnumOptions() {
+    return { allowAlias: false, deprecated: false, uninterpretedOption: [] };
+}
+exports.EnumOptions = {
+    fromJSON(object) {
+        return {
+            allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseEnumValueOptions() {
+    return { deprecated: false, uninterpretedOption: [] };
+}
+exports.EnumValueOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseServiceOptions() {
+    return { deprecated: false, uninterpretedOption: [] };
+}
+exports.ServiceOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseMethodOptions() {
+    return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] };
+}
+exports.MethodOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            idempotencyLevel: isSet(object.idempotencyLevel)
+                ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
+                : 0,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.idempotencyLevel !== undefined &&
+            (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel));
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseUninterpretedOption() {
+    return {
+        name: [],
+        identifierValue: "",
+        positiveIntValue: "0",
+        negativeIntValue: "0",
+        doubleValue: 0,
+        stringValue: Buffer.alloc(0),
+        aggregateValue: "",
+    };
+}
+exports.UninterpretedOption = {
+    fromJSON(object) {
+        return {
+            name: Array.isArray(object?.name) ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e)) : [],
+            identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "",
+            positiveIntValue: isSet(object.positiveIntValue) ? String(object.positiveIntValue) : "0",
+            negativeIntValue: isSet(object.negativeIntValue) ? String(object.negativeIntValue) : "0",
+            doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0,
+            stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
+            aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name) {
+            obj.name = message.name.map((e) => e ? exports.UninterpretedOption_NamePart.toJSON(e) : undefined);
+        }
+        else {
+            obj.name = [];
+        }
+        message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue);
+        message.positiveIntValue !== undefined && (obj.positiveIntValue = message.positiveIntValue);
+        message.negativeIntValue !== undefined && (obj.negativeIntValue = message.negativeIntValue);
+        message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue);
+        message.stringValue !== undefined &&
+            (obj.stringValue = base64FromBytes(message.stringValue !== undefined ? message.stringValue : Buffer.alloc(0)));
+        message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue);
+        return obj;
+    },
+};
+function createBaseUninterpretedOption_NamePart() {
+    return { namePart: "", isExtension: false };
+}
+exports.UninterpretedOption_NamePart = {
+    fromJSON(object) {
+        return {
+            namePart: isSet(object.namePart) ? String(object.namePart) : "",
+            isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.namePart !== undefined && (obj.namePart = message.namePart);
+        message.isExtension !== undefined && (obj.isExtension = message.isExtension);
+        return obj;
+    },
+};
+function createBaseSourceCodeInfo() {
+    return { location: [] };
+}
+exports.SourceCodeInfo = {
+    fromJSON(object) {
+        return {
+            location: Array.isArray(object?.location)
+                ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.location) {
+            obj.location = message.location.map((e) => e ? exports.SourceCodeInfo_Location.toJSON(e) : undefined);
+        }
+        else {
+            obj.location = [];
+        }
+        return obj;
+    },
+};
+function createBaseSourceCodeInfo_Location() {
+    return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] };
+}
+exports.SourceCodeInfo_Location = {
+    fromJSON(object) {
+        return {
+            path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
+            span: Array.isArray(object?.span) ? object.span.map((e) => Number(e)) : [],
+            leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "",
+            trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "",
+            leadingDetachedComments: Array.isArray(object?.leadingDetachedComments)
+                ? object.leadingDetachedComments.map((e) => String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.path) {
+            obj.path = message.path.map((e) => Math.round(e));
+        }
+        else {
+            obj.path = [];
+        }
+        if (message.span) {
+            obj.span = message.span.map((e) => Math.round(e));
+        }
+        else {
+            obj.span = [];
+        }
+        message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments);
+        message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments);
+        if (message.leadingDetachedComments) {
+            obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e);
+        }
+        else {
+            obj.leadingDetachedComments = [];
+        }
+        return obj;
+    },
+};
+function createBaseGeneratedCodeInfo() {
+    return { annotation: [] };
+}
+exports.GeneratedCodeInfo = {
+    fromJSON(object) {
+        return {
+            annotation: Array.isArray(object?.annotation)
+                ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.annotation) {
+            obj.annotation = message.annotation.map((e) => e ? exports.GeneratedCodeInfo_Annotation.toJSON(e) : undefined);
+        }
+        else {
+            obj.annotation = [];
+        }
+        return obj;
+    },
+};
+function createBaseGeneratedCodeInfo_Annotation() {
+    return { path: [], sourceFile: "", begin: 0, end: 0 };
+}
+exports.GeneratedCodeInfo_Annotation = {
+    fromJSON(object) {
+        return {
+            path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
+            sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "",
+            begin: isSet(object.begin) ? Number(object.begin) : 0,
+            end: isSet(object.end) ? Number(object.end) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.path) {
+            obj.path = message.path.map((e) => Math.round(e));
+        }
+        else {
+            obj.path = [];
+        }
+        message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile);
+        message.begin !== undefined && (obj.begin = Math.round(message.begin));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
new file mode 100644
index 0000000000000..159135fe87172
--- /dev/null
+++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
@@ -0,0 +1,24 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Timestamp = void 0;
+function createBaseTimestamp() {
+    return { seconds: "0", nanos: 0 };
+}
+exports.Timestamp = {
+    fromJSON(object) {
+        return {
+            seconds: isSet(object.seconds) ? String(object.seconds) : "0",
+            nanos: isSet(object.nanos) ? Number(object.nanos) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.seconds !== undefined && (obj.seconds = message.seconds);
+        message.nanos !== undefined && (obj.nanos = Math.round(message.nanos));
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
new file mode 100644
index 0000000000000..1ef3e1b3356b7
--- /dev/null
+++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
@@ -0,0 +1,106 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
+/* eslint-disable */
+const envelope_1 = require("./envelope");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_rekor_1 = require("./sigstore_rekor");
+function createBaseTimestampVerificationData() {
+    return { rfc3161Timestamps: [] };
+}
+exports.TimestampVerificationData = {
+    fromJSON(object) {
+        return {
+            rfc3161Timestamps: Array.isArray(object?.rfc3161Timestamps)
+                ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rfc3161Timestamps) {
+            obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => e ? sigstore_common_1.RFC3161SignedTimestamp.toJSON(e) : undefined);
+        }
+        else {
+            obj.rfc3161Timestamps = [];
+        }
+        return obj;
+    },
+};
+function createBaseVerificationMaterial() {
+    return { content: undefined, tlogEntries: [], timestampVerificationData: undefined };
+}
+exports.VerificationMaterial = {
+    fromJSON(object) {
+        return {
+            content: isSet(object.publicKey)
+                ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
+                : isSet(object.x509CertificateChain)
+                    ? {
+                        $case: "x509CertificateChain",
+                        x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
+                    }
+                    : undefined,
+            tlogEntries: Array.isArray(object?.tlogEntries)
+                ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
+                : [],
+            timestampVerificationData: isSet(object.timestampVerificationData)
+                ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
+                : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.content?.$case === "publicKey" &&
+            (obj.publicKey = message.content?.publicKey ? sigstore_common_1.PublicKeyIdentifier.toJSON(message.content?.publicKey) : undefined);
+        message.content?.$case === "x509CertificateChain" &&
+            (obj.x509CertificateChain = message.content?.x509CertificateChain
+                ? sigstore_common_1.X509CertificateChain.toJSON(message.content?.x509CertificateChain)
+                : undefined);
+        if (message.tlogEntries) {
+            obj.tlogEntries = message.tlogEntries.map((e) => e ? sigstore_rekor_1.TransparencyLogEntry.toJSON(e) : undefined);
+        }
+        else {
+            obj.tlogEntries = [];
+        }
+        message.timestampVerificationData !== undefined &&
+            (obj.timestampVerificationData = message.timestampVerificationData
+                ? exports.TimestampVerificationData.toJSON(message.timestampVerificationData)
+                : undefined);
+        return obj;
+    },
+};
+function createBaseBundle() {
+    return { mediaType: "", verificationMaterial: undefined, content: undefined };
+}
+exports.Bundle = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
+            verificationMaterial: isSet(object.verificationMaterial)
+                ? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
+                : undefined,
+            content: isSet(object.messageSignature)
+                ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
+                : isSet(object.dsseEnvelope)
+                    ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.mediaType !== undefined && (obj.mediaType = message.mediaType);
+        message.verificationMaterial !== undefined && (obj.verificationMaterial = message.verificationMaterial
+            ? exports.VerificationMaterial.toJSON(message.verificationMaterial)
+            : undefined);
+        message.content?.$case === "messageSignature" && (obj.messageSignature = message.content?.messageSignature
+            ? sigstore_common_1.MessageSignature.toJSON(message.content?.messageSignature)
+            : undefined);
+        message.content?.$case === "dsseEnvelope" &&
+            (obj.dsseEnvelope = message.content?.dsseEnvelope ? envelope_1.Envelope.toJSON(message.content?.dsseEnvelope) : undefined);
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
new file mode 100644
index 0000000000000..bcd654e9154b9
--- /dev/null
+++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
@@ -0,0 +1,457 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.subjectAlternativeNameTypeToJSON = exports.subjectAlternativeNameTypeFromJSON = exports.SubjectAlternativeNameType = exports.publicKeyDetailsToJSON = exports.publicKeyDetailsFromJSON = exports.PublicKeyDetails = exports.hashAlgorithmToJSON = exports.hashAlgorithmFromJSON = exports.HashAlgorithm = void 0;
+/* eslint-disable */
+const timestamp_1 = require("./google/protobuf/timestamp");
+/**
+ * Only a subset of the secure hash standard algorithms are supported.
+ * See  for more
+ * details.
+ * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
+ * any proto JSON serialization to emit the used hash algorithm, as default
+ * option is to *omit* the default value of an enum (which is the first
+ * value, represented by '0'.
+ */
+var HashAlgorithm;
+(function (HashAlgorithm) {
+    HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
+    HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
+})(HashAlgorithm = exports.HashAlgorithm || (exports.HashAlgorithm = {}));
+function hashAlgorithmFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "HASH_ALGORITHM_UNSPECIFIED":
+            return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
+        case 1:
+        case "SHA2_256":
+            return HashAlgorithm.SHA2_256;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+    }
+}
+exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
+function hashAlgorithmToJSON(object) {
+    switch (object) {
+        case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
+            return "HASH_ALGORITHM_UNSPECIFIED";
+        case HashAlgorithm.SHA2_256:
+            return "SHA2_256";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+    }
+}
+exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
+/**
+ * Details of a specific public key, capturing the the key encoding method,
+ * and signature algorithm.
+ * To avoid the possibility of contradicting formats such as PKCS1 with
+ * ED25519 the valid permutations are listed as a linear set instead of a
+ * cartesian set (i.e one combined variable instead of two, one for encoding
+ * and one for the signature algorithm).
+ */
+var PublicKeyDetails;
+(function (PublicKeyDetails) {
+    PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+    /** PKCS1_RSA_PKCS1V5 - RSA */
+    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
+    /** PKCS1_RSA_PSS - See RFC8017 */
+    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
+    /** PKIX_ECDSA_P256_SHA_256 - ECDSA */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
+    /** PKIX_ECDSA_P256_HMAC_SHA_256 - See RFC6979 */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
+    /** PKIX_ED25519 - Ed 25519 */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
+})(PublicKeyDetails = exports.PublicKeyDetails || (exports.PublicKeyDetails = {}));
+function publicKeyDetailsFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
+            return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
+        case 1:
+        case "PKCS1_RSA_PKCS1V5":
+            return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
+        case 2:
+        case "PKCS1_RSA_PSS":
+            return PublicKeyDetails.PKCS1_RSA_PSS;
+        case 3:
+        case "PKIX_RSA_PKCS1V5":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V5;
+        case 4:
+        case "PKIX_RSA_PSS":
+            return PublicKeyDetails.PKIX_RSA_PSS;
+        case 5:
+        case "PKIX_ECDSA_P256_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
+        case 6:
+        case "PKIX_ECDSA_P256_HMAC_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
+        case 7:
+        case "PKIX_ED25519":
+            return PublicKeyDetails.PKIX_ED25519;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+    }
+}
+exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
+function publicKeyDetailsToJSON(object) {
+    switch (object) {
+        case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
+            return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+        case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
+            return "PKCS1_RSA_PKCS1V5";
+        case PublicKeyDetails.PKCS1_RSA_PSS:
+            return "PKCS1_RSA_PSS";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V5:
+            return "PKIX_RSA_PKCS1V5";
+        case PublicKeyDetails.PKIX_RSA_PSS:
+            return "PKIX_RSA_PSS";
+        case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
+            return "PKIX_ECDSA_P256_SHA_256";
+        case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
+            return "PKIX_ECDSA_P256_HMAC_SHA_256";
+        case PublicKeyDetails.PKIX_ED25519:
+            return "PKIX_ED25519";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+    }
+}
+exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
+var SubjectAlternativeNameType;
+(function (SubjectAlternativeNameType) {
+    SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+    SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
+    SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
+    /**
+     * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
+     * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
+     * for more details.
+     */
+    SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
+})(SubjectAlternativeNameType = exports.SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = {}));
+function subjectAlternativeNameTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
+            return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
+        case 1:
+        case "EMAIL":
+            return SubjectAlternativeNameType.EMAIL;
+        case 2:
+        case "URI":
+            return SubjectAlternativeNameType.URI;
+        case 3:
+        case "OTHER_NAME":
+            return SubjectAlternativeNameType.OTHER_NAME;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+    }
+}
+exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
+function subjectAlternativeNameTypeToJSON(object) {
+    switch (object) {
+        case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
+            return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+        case SubjectAlternativeNameType.EMAIL:
+            return "EMAIL";
+        case SubjectAlternativeNameType.URI:
+            return "URI";
+        case SubjectAlternativeNameType.OTHER_NAME:
+            return "OTHER_NAME";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+    }
+}
+exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
+function createBaseHashOutput() {
+    return { algorithm: 0, digest: Buffer.alloc(0) };
+}
+exports.HashOutput = {
+    fromJSON(object) {
+        return {
+            algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
+            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.algorithm !== undefined && (obj.algorithm = hashAlgorithmToJSON(message.algorithm));
+        message.digest !== undefined &&
+            (obj.digest = base64FromBytes(message.digest !== undefined ? message.digest : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseMessageSignature() {
+    return { messageDigest: undefined, signature: Buffer.alloc(0) };
+}
+exports.MessageSignature = {
+    fromJSON(object) {
+        return {
+            messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
+            signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.messageDigest !== undefined &&
+            (obj.messageDigest = message.messageDigest ? exports.HashOutput.toJSON(message.messageDigest) : undefined);
+        message.signature !== undefined &&
+            (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseLogId() {
+    return { keyId: Buffer.alloc(0) };
+}
+exports.LogId = {
+    fromJSON(object) {
+        return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.keyId !== undefined &&
+            (obj.keyId = base64FromBytes(message.keyId !== undefined ? message.keyId : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseRFC3161SignedTimestamp() {
+    return { signedTimestamp: Buffer.alloc(0) };
+}
+exports.RFC3161SignedTimestamp = {
+    fromJSON(object) {
+        return {
+            signedTimestamp: isSet(object.signedTimestamp)
+                ? Buffer.from(bytesFromBase64(object.signedTimestamp))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.signedTimestamp !== undefined &&
+            (obj.signedTimestamp = base64FromBytes(message.signedTimestamp !== undefined ? message.signedTimestamp : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBasePublicKey() {
+    return { rawBytes: undefined, keyDetails: 0, validFor: undefined };
+}
+exports.PublicKey = {
+    fromJSON(object) {
+        return {
+            rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
+            keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
+            validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.rawBytes !== undefined &&
+            (obj.rawBytes = message.rawBytes !== undefined ? base64FromBytes(message.rawBytes) : undefined);
+        message.keyDetails !== undefined && (obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails));
+        message.validFor !== undefined &&
+            (obj.validFor = message.validFor ? exports.TimeRange.toJSON(message.validFor) : undefined);
+        return obj;
+    },
+};
+function createBasePublicKeyIdentifier() {
+    return { hint: "" };
+}
+exports.PublicKeyIdentifier = {
+    fromJSON(object) {
+        return { hint: isSet(object.hint) ? String(object.hint) : "" };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.hint !== undefined && (obj.hint = message.hint);
+        return obj;
+    },
+};
+function createBaseObjectIdentifier() {
+    return { id: [] };
+}
+exports.ObjectIdentifier = {
+    fromJSON(object) {
+        return { id: Array.isArray(object?.id) ? object.id.map((e) => Number(e)) : [] };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.id) {
+            obj.id = message.id.map((e) => Math.round(e));
+        }
+        else {
+            obj.id = [];
+        }
+        return obj;
+    },
+};
+function createBaseObjectIdentifierValuePair() {
+    return { oid: undefined, value: Buffer.alloc(0) };
+}
+exports.ObjectIdentifierValuePair = {
+    fromJSON(object) {
+        return {
+            oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
+            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.oid !== undefined && (obj.oid = message.oid ? exports.ObjectIdentifier.toJSON(message.oid) : undefined);
+        message.value !== undefined &&
+            (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseDistinguishedName() {
+    return { organization: "", commonName: "" };
+}
+exports.DistinguishedName = {
+    fromJSON(object) {
+        return {
+            organization: isSet(object.organization) ? String(object.organization) : "",
+            commonName: isSet(object.commonName) ? String(object.commonName) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.organization !== undefined && (obj.organization = message.organization);
+        message.commonName !== undefined && (obj.commonName = message.commonName);
+        return obj;
+    },
+};
+function createBaseX509Certificate() {
+    return { rawBytes: Buffer.alloc(0) };
+}
+exports.X509Certificate = {
+    fromJSON(object) {
+        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.rawBytes !== undefined &&
+            (obj.rawBytes = base64FromBytes(message.rawBytes !== undefined ? message.rawBytes : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseSubjectAlternativeName() {
+    return { type: 0, identity: undefined };
+}
+exports.SubjectAlternativeName = {
+    fromJSON(object) {
+        return {
+            type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
+            identity: isSet(object.regexp)
+                ? { $case: "regexp", regexp: String(object.regexp) }
+                : isSet(object.value)
+                    ? { $case: "value", value: String(object.value) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.type !== undefined && (obj.type = subjectAlternativeNameTypeToJSON(message.type));
+        message.identity?.$case === "regexp" && (obj.regexp = message.identity?.regexp);
+        message.identity?.$case === "value" && (obj.value = message.identity?.value);
+        return obj;
+    },
+};
+function createBaseX509CertificateChain() {
+    return { certificates: [] };
+}
+exports.X509CertificateChain = {
+    fromJSON(object) {
+        return {
+            certificates: Array.isArray(object?.certificates)
+                ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.certificates) {
+            obj.certificates = message.certificates.map((e) => e ? exports.X509Certificate.toJSON(e) : undefined);
+        }
+        else {
+            obj.certificates = [];
+        }
+        return obj;
+    },
+};
+function createBaseTimeRange() {
+    return { start: undefined, end: undefined };
+}
+exports.TimeRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
+            end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = message.start.toISOString());
+        message.end !== undefined && (obj.end = message.end.toISOString());
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function fromTimestamp(t) {
+    let millis = Number(t.seconds) * 1000;
+    millis += t.nanos / 1000000;
+    return new Date(millis);
+}
+function fromJsonTimestamp(o) {
+    if (o instanceof Date) {
+        return o;
+    }
+    else if (typeof o === "string") {
+        return new Date(o);
+    }
+    else {
+        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
new file mode 100644
index 0000000000000..398193b2075a7
--- /dev/null
+++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
@@ -0,0 +1,167 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+function createBaseKindVersion() {
+    return { kind: "", version: "" };
+}
+exports.KindVersion = {
+    fromJSON(object) {
+        return {
+            kind: isSet(object.kind) ? String(object.kind) : "",
+            version: isSet(object.version) ? String(object.version) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.kind !== undefined && (obj.kind = message.kind);
+        message.version !== undefined && (obj.version = message.version);
+        return obj;
+    },
+};
+function createBaseCheckpoint() {
+    return { envelope: "" };
+}
+exports.Checkpoint = {
+    fromJSON(object) {
+        return { envelope: isSet(object.envelope) ? String(object.envelope) : "" };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.envelope !== undefined && (obj.envelope = message.envelope);
+        return obj;
+    },
+};
+function createBaseInclusionProof() {
+    return { logIndex: "0", rootHash: Buffer.alloc(0), treeSize: "0", hashes: [], checkpoint: undefined };
+}
+exports.InclusionProof = {
+    fromJSON(object) {
+        return {
+            logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
+            rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
+            treeSize: isSet(object.treeSize) ? String(object.treeSize) : "0",
+            hashes: Array.isArray(object?.hashes) ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e))) : [],
+            checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.logIndex !== undefined && (obj.logIndex = message.logIndex);
+        message.rootHash !== undefined &&
+            (obj.rootHash = base64FromBytes(message.rootHash !== undefined ? message.rootHash : Buffer.alloc(0)));
+        message.treeSize !== undefined && (obj.treeSize = message.treeSize);
+        if (message.hashes) {
+            obj.hashes = message.hashes.map((e) => base64FromBytes(e !== undefined ? e : Buffer.alloc(0)));
+        }
+        else {
+            obj.hashes = [];
+        }
+        message.checkpoint !== undefined &&
+            (obj.checkpoint = message.checkpoint ? exports.Checkpoint.toJSON(message.checkpoint) : undefined);
+        return obj;
+    },
+};
+function createBaseInclusionPromise() {
+    return { signedEntryTimestamp: Buffer.alloc(0) };
+}
+exports.InclusionPromise = {
+    fromJSON(object) {
+        return {
+            signedEntryTimestamp: isSet(object.signedEntryTimestamp)
+                ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.signedEntryTimestamp !== undefined &&
+            (obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp !== undefined ? message.signedEntryTimestamp : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseTransparencyLogEntry() {
+    return {
+        logIndex: "0",
+        logId: undefined,
+        kindVersion: undefined,
+        integratedTime: "0",
+        inclusionPromise: undefined,
+        inclusionProof: undefined,
+        canonicalizedBody: Buffer.alloc(0),
+    };
+}
+exports.TransparencyLogEntry = {
+    fromJSON(object) {
+        return {
+            logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
+            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+            kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
+            integratedTime: isSet(object.integratedTime) ? String(object.integratedTime) : "0",
+            inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
+            inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
+            canonicalizedBody: isSet(object.canonicalizedBody)
+                ? Buffer.from(bytesFromBase64(object.canonicalizedBody))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.logIndex !== undefined && (obj.logIndex = message.logIndex);
+        message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
+        message.kindVersion !== undefined &&
+            (obj.kindVersion = message.kindVersion ? exports.KindVersion.toJSON(message.kindVersion) : undefined);
+        message.integratedTime !== undefined && (obj.integratedTime = message.integratedTime);
+        message.inclusionPromise !== undefined &&
+            (obj.inclusionPromise = message.inclusionPromise ? exports.InclusionPromise.toJSON(message.inclusionPromise) : undefined);
+        message.inclusionProof !== undefined &&
+            (obj.inclusionProof = message.inclusionProof ? exports.InclusionProof.toJSON(message.inclusionProof) : undefined);
+        message.canonicalizedBody !== undefined &&
+            (obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody !== undefined ? message.canonicalizedBody : Buffer.alloc(0)));
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
new file mode 100644
index 0000000000000..05e566767cdb2
--- /dev/null
+++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
@@ -0,0 +1,103 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+function createBaseTransparencyLogInstance() {
+    return { baseUrl: "", hashAlgorithm: 0, publicKey: undefined, logId: undefined };
+}
+exports.TransparencyLogInstance = {
+    fromJSON(object) {
+        return {
+            baseUrl: isSet(object.baseUrl) ? String(object.baseUrl) : "",
+            hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
+            publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
+            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.baseUrl !== undefined && (obj.baseUrl = message.baseUrl);
+        message.hashAlgorithm !== undefined && (obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm));
+        message.publicKey !== undefined &&
+            (obj.publicKey = message.publicKey ? sigstore_common_1.PublicKey.toJSON(message.publicKey) : undefined);
+        message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
+        return obj;
+    },
+};
+function createBaseCertificateAuthority() {
+    return { subject: undefined, uri: "", certChain: undefined, validFor: undefined };
+}
+exports.CertificateAuthority = {
+    fromJSON(object) {
+        return {
+            subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
+            uri: isSet(object.uri) ? String(object.uri) : "",
+            certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
+            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.subject !== undefined &&
+            (obj.subject = message.subject ? sigstore_common_1.DistinguishedName.toJSON(message.subject) : undefined);
+        message.uri !== undefined && (obj.uri = message.uri);
+        message.certChain !== undefined &&
+            (obj.certChain = message.certChain ? sigstore_common_1.X509CertificateChain.toJSON(message.certChain) : undefined);
+        message.validFor !== undefined &&
+            (obj.validFor = message.validFor ? sigstore_common_1.TimeRange.toJSON(message.validFor) : undefined);
+        return obj;
+    },
+};
+function createBaseTrustedRoot() {
+    return { mediaType: "", tlogs: [], certificateAuthorities: [], ctlogs: [], timestampAuthorities: [] };
+}
+exports.TrustedRoot = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
+            tlogs: Array.isArray(object?.tlogs) ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) : [],
+            certificateAuthorities: Array.isArray(object?.certificateAuthorities)
+                ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+                : [],
+            ctlogs: Array.isArray(object?.ctlogs)
+                ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
+                : [],
+            timestampAuthorities: Array.isArray(object?.timestampAuthorities)
+                ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.mediaType !== undefined && (obj.mediaType = message.mediaType);
+        if (message.tlogs) {
+            obj.tlogs = message.tlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
+        }
+        else {
+            obj.tlogs = [];
+        }
+        if (message.certificateAuthorities) {
+            obj.certificateAuthorities = message.certificateAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
+        }
+        else {
+            obj.certificateAuthorities = [];
+        }
+        if (message.ctlogs) {
+            obj.ctlogs = message.ctlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
+        }
+        else {
+            obj.ctlogs = [];
+        }
+        if (message.timestampAuthorities) {
+            obj.timestampAuthorities = message.timestampAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
+        }
+        else {
+            obj.timestampAuthorities = [];
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
new file mode 100644
index 0000000000000..8a72b89761869
--- /dev/null
+++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
@@ -0,0 +1,273 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
+/* eslint-disable */
+const sigstore_bundle_1 = require("./sigstore_bundle");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_trustroot_1 = require("./sigstore_trustroot");
+function createBaseCertificateIdentity() {
+    return { issuer: "", san: undefined, oids: [] };
+}
+exports.CertificateIdentity = {
+    fromJSON(object) {
+        return {
+            issuer: isSet(object.issuer) ? String(object.issuer) : "",
+            san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
+            oids: Array.isArray(object?.oids) ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.issuer !== undefined && (obj.issuer = message.issuer);
+        message.san !== undefined && (obj.san = message.san ? sigstore_common_1.SubjectAlternativeName.toJSON(message.san) : undefined);
+        if (message.oids) {
+            obj.oids = message.oids.map((e) => e ? sigstore_common_1.ObjectIdentifierValuePair.toJSON(e) : undefined);
+        }
+        else {
+            obj.oids = [];
+        }
+        return obj;
+    },
+};
+function createBaseCertificateIdentities() {
+    return { identities: [] };
+}
+exports.CertificateIdentities = {
+    fromJSON(object) {
+        return {
+            identities: Array.isArray(object?.identities)
+                ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.identities) {
+            obj.identities = message.identities.map((e) => e ? exports.CertificateIdentity.toJSON(e) : undefined);
+        }
+        else {
+            obj.identities = [];
+        }
+        return obj;
+    },
+};
+function createBasePublicKeyIdentities() {
+    return { publicKeys: [] };
+}
+exports.PublicKeyIdentities = {
+    fromJSON(object) {
+        return {
+            publicKeys: Array.isArray(object?.publicKeys) ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.publicKeys) {
+            obj.publicKeys = message.publicKeys.map((e) => e ? sigstore_common_1.PublicKey.toJSON(e) : undefined);
+        }
+        else {
+            obj.publicKeys = [];
+        }
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions() {
+    return { signers: undefined, tlogOptions: undefined, ctlogOptions: undefined, tsaOptions: undefined };
+}
+exports.ArtifactVerificationOptions = {
+    fromJSON(object) {
+        return {
+            signers: isSet(object.certificateIdentities)
+                ? {
+                    $case: "certificateIdentities",
+                    certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
+                }
+                : isSet(object.publicKeys)
+                    ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
+                    : undefined,
+            tlogOptions: isSet(object.tlogOptions)
+                ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
+                : undefined,
+            ctlogOptions: isSet(object.ctlogOptions)
+                ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
+                : undefined,
+            tsaOptions: isSet(object.tsaOptions)
+                ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
+                : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.signers?.$case === "certificateIdentities" &&
+            (obj.certificateIdentities = message.signers?.certificateIdentities
+                ? exports.CertificateIdentities.toJSON(message.signers?.certificateIdentities)
+                : undefined);
+        message.signers?.$case === "publicKeys" && (obj.publicKeys = message.signers?.publicKeys
+            ? exports.PublicKeyIdentities.toJSON(message.signers?.publicKeys)
+            : undefined);
+        message.tlogOptions !== undefined && (obj.tlogOptions = message.tlogOptions
+            ? exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions)
+            : undefined);
+        message.ctlogOptions !== undefined && (obj.ctlogOptions = message.ctlogOptions
+            ? exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions)
+            : undefined);
+        message.tsaOptions !== undefined && (obj.tsaOptions = message.tsaOptions
+            ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions)
+            : undefined);
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions_TlogOptions() {
+    return { threshold: 0, performOnlineVerification: false, disable: false };
+}
+exports.ArtifactVerificationOptions_TlogOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+            performOnlineVerification: isSet(object.performOnlineVerification)
+                ? Boolean(object.performOnlineVerification)
+                : false,
+            disable: isSet(object.disable) ? Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+        message.performOnlineVerification !== undefined &&
+            (obj.performOnlineVerification = message.performOnlineVerification);
+        message.disable !== undefined && (obj.disable = message.disable);
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions_CtlogOptions() {
+    return { threshold: 0, detachedSct: false, disable: false };
+}
+exports.ArtifactVerificationOptions_CtlogOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+            detachedSct: isSet(object.detachedSct) ? Boolean(object.detachedSct) : false,
+            disable: isSet(object.disable) ? Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+        message.detachedSct !== undefined && (obj.detachedSct = message.detachedSct);
+        message.disable !== undefined && (obj.disable = message.disable);
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions_TimestampAuthorityOptions() {
+    return { threshold: 0, disable: false };
+}
+exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+        message.disable !== undefined && (obj.disable = message.disable);
+        return obj;
+    },
+};
+function createBaseArtifact() {
+    return { data: undefined };
+}
+exports.Artifact = {
+    fromJSON(object) {
+        return {
+            data: isSet(object.artifactUri)
+                ? { $case: "artifactUri", artifactUri: String(object.artifactUri) }
+                : isSet(object.artifact)
+                    ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.data?.$case === "artifactUri" && (obj.artifactUri = message.data?.artifactUri);
+        message.data?.$case === "artifact" &&
+            (obj.artifact = message.data?.artifact !== undefined ? base64FromBytes(message.data?.artifact) : undefined);
+        return obj;
+    },
+};
+function createBaseInput() {
+    return {
+        artifactTrustRoot: undefined,
+        artifactVerificationOptions: undefined,
+        bundle: undefined,
+        artifact: undefined,
+    };
+}
+exports.Input = {
+    fromJSON(object) {
+        return {
+            artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
+            artifactVerificationOptions: isSet(object.artifactVerificationOptions)
+                ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
+                : undefined,
+            bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
+            artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.artifactTrustRoot !== undefined &&
+            (obj.artifactTrustRoot = message.artifactTrustRoot ? sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot) : undefined);
+        message.artifactVerificationOptions !== undefined &&
+            (obj.artifactVerificationOptions = message.artifactVerificationOptions
+                ? exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions)
+                : undefined);
+        message.bundle !== undefined && (obj.bundle = message.bundle ? sigstore_bundle_1.Bundle.toJSON(message.bundle) : undefined);
+        message.artifact !== undefined && (obj.artifact = message.artifact ? exports.Artifact.toJSON(message.artifact) : undefined);
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js
new file mode 100644
index 0000000000000..eafb768c48fca
--- /dev/null
+++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js
@@ -0,0 +1,37 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+__exportStar(require("./__generated__/envelope"), exports);
+__exportStar(require("./__generated__/sigstore_bundle"), exports);
+__exportStar(require("./__generated__/sigstore_common"), exports);
+__exportStar(require("./__generated__/sigstore_rekor"), exports);
+__exportStar(require("./__generated__/sigstore_trustroot"), exports);
+__exportStar(require("./__generated__/sigstore_verification"), exports);
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json
new file mode 100644
index 0000000000000..450abb157f31a
--- /dev/null
+++ b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json
@@ -0,0 +1,31 @@
+{
+  "name": "@sigstore/protobuf-specs",
+  "version": "0.2.1",
+  "description": "code-signing for npm packages",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "scripts": {
+    "build": "tsc"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sigstore/protobuf-specs.git"
+  },
+  "files": [
+    "dist"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "Apache-2.0",
+  "bugs": {
+    "url": "https://github.com/sigstore/protobuf-specs/issues"
+  },
+  "homepage": "https://github.com/sigstore/protobuf-specs#readme",
+  "devDependencies": {
+    "@tsconfig/node14": "^1.0.3",
+    "@types/node": "^18.14.0",
+    "typescript": "^4.9.5"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  }
+}
diff --git a/node_modules/@sigstore/tuf/package.json b/node_modules/@sigstore/tuf/package.json
index 286d481a4d39f..4c534fa766a5e 100644
--- a/node_modules/@sigstore/tuf/package.json
+++ b/node_modules/@sigstore/tuf/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@sigstore/tuf",
-  "version": "1.0.2",
+  "version": "2.0.0",
   "description": "Client for the Sigstore TUF repository",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
@@ -28,14 +28,14 @@
   },
   "devDependencies": {
     "@sigstore/jest": "^0.0.0",
-    "@tufjs/repo-mock": "^1.1.0",
+    "@tufjs/repo-mock": "^2.0.0",
     "@types/make-fetch-happen": "^10.0.0"
   },
   "dependencies": {
-    "@sigstore/protobuf-specs": "^0.1.0",
-    "tuf-js": "^1.1.7"
+    "@sigstore/protobuf-specs": "^0.2.1",
+    "tuf-js": "^2.0.0"
   },
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   }
 }
diff --git a/node_modules/@tufjs/canonical-json/package.json b/node_modules/@tufjs/canonical-json/package.json
index 688c9b93c3a4e..886c0c3969225 100644
--- a/node_modules/@tufjs/canonical-json/package.json
+++ b/node_modules/@tufjs/canonical-json/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@tufjs/canonical-json",
-  "version": "1.0.0",
+  "version": "2.0.0",
   "description": "OLPC JSON canonicalization",
   "main": "lib/index.js",
   "typings": "lib/index.d.ts",
@@ -19,7 +19,7 @@
     "type": "git",
     "url": "git+https://github.com/theupdateframework/tuf-js.git"
   },
-  "homepage": "https://github.com/theupdateframework/tuf-js/packages/canonical-json#readme",
+  "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/canonical-json#readme",
   "bugs": {
     "url": "https://github.com/theupdateframework/tuf-js/issues"
   },
@@ -29,11 +29,7 @@
   "scripts": {
     "test": "jest"
   },
-  "devDependencies": {
-    "@types/node": "^18.14.1",
-    "typescript": "^4.9.5"
-  },
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   }
 }
diff --git a/node_modules/@tufjs/models/dist/base.js b/node_modules/@tufjs/models/dist/base.js
index d89a089c33092..259f6799c13a0 100644
--- a/node_modules/@tufjs/models/dist/base.js
+++ b/node_modules/@tufjs/models/dist/base.js
@@ -14,7 +14,7 @@ var MetadataKind;
     MetadataKind["Timestamp"] = "timestamp";
     MetadataKind["Snapshot"] = "snapshot";
     MetadataKind["Targets"] = "targets";
-})(MetadataKind = exports.MetadataKind || (exports.MetadataKind = {}));
+})(MetadataKind || (exports.MetadataKind = MetadataKind = {}));
 function isMetadataKind(value) {
     return (typeof value === 'string' &&
         Object.values(MetadataKind).includes(value));
diff --git a/node_modules/@tufjs/models/package.json b/node_modules/@tufjs/models/package.json
index 6711ee0dababc..60368242ab556 100644
--- a/node_modules/@tufjs/models/package.json
+++ b/node_modules/@tufjs/models/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@tufjs/models",
-  "version": "1.0.4",
+  "version": "2.0.0",
   "description": "TUF metadata models",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
@@ -27,15 +27,11 @@
     "url": "https://github.com/theupdateframework/tuf-js/issues"
   },
   "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/models#readme",
-  "devDependencies": {
-    "@types/node": "^18.16.3",
-    "typescript": "^5.0.4"
-  },
   "dependencies": {
-    "@tufjs/canonical-json": "1.0.0",
-    "minimatch": "^9.0.0"
+    "@tufjs/canonical-json": "2.0.0",
+    "minimatch": "^9.0.3"
   },
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   }
 }
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/LICENSE b/node_modules/sigstore/node_modules/@sigstore/tuf/LICENSE
new file mode 100644
index 0000000000000..e9e7c1679a09d
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright 2023 The Sigstore Authors
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/appdata.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/appdata.js
new file mode 100644
index 0000000000000..c9a8ee92b531e
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/appdata.js
@@ -0,0 +1,44 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.appDataPath = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const os_1 = __importDefault(require("os"));
+const path_1 = __importDefault(require("path"));
+function appDataPath(name) {
+    const homedir = os_1.default.homedir();
+    switch (process.platform) {
+        /* istanbul ignore next */
+        case 'darwin': {
+            const appSupport = path_1.default.join(homedir, 'Library', 'Application Support');
+            return path_1.default.join(appSupport, name);
+        }
+        /* istanbul ignore next */
+        case 'win32': {
+            const localAppData = process.env.LOCALAPPDATA || path_1.default.join(homedir, 'AppData', 'Local');
+            return path_1.default.join(localAppData, name, 'Data');
+        }
+        /* istanbul ignore next */
+        default: {
+            const localData = process.env.XDG_DATA_HOME || path_1.default.join(homedir, '.local', 'share');
+            return path_1.default.join(localData, name);
+        }
+    }
+}
+exports.appDataPath = appDataPath;
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/client.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/client.js
new file mode 100644
index 0000000000000..08d6b61840909
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/client.js
@@ -0,0 +1,101 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TUFClient = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const fs_1 = __importDefault(require("fs"));
+const path_1 = __importDefault(require("path"));
+const tuf_js_1 = require("tuf-js");
+const target_1 = require("./target");
+class TUFClient {
+    constructor(options) {
+        initTufCache(options.cachePath, options.rootPath);
+        const remote = initRemoteConfig(options.cachePath, options.mirrorURL);
+        this.updater = initClient(options.cachePath, remote, options);
+    }
+    async refresh() {
+        return this.updater.refresh();
+    }
+    getTarget(targetName) {
+        return (0, target_1.readTarget)(this.updater, targetName);
+    }
+}
+exports.TUFClient = TUFClient;
+// Initializes the TUF cache directory structure including the initial
+// root.json file. If the cache directory does not exist, it will be
+// created. If the targets directory does not exist, it will be created.
+// If the root.json file does not exist, it will be copied from the
+// rootPath argument.
+function initTufCache(cachePath, tufRootPath) {
+    const targetsPath = path_1.default.join(cachePath, 'targets');
+    const cachedRootPath = path_1.default.join(cachePath, 'root.json');
+    if (!fs_1.default.existsSync(cachePath)) {
+        fs_1.default.mkdirSync(cachePath, { recursive: true });
+    }
+    if (!fs_1.default.existsSync(targetsPath)) {
+        fs_1.default.mkdirSync(targetsPath);
+    }
+    if (!fs_1.default.existsSync(cachedRootPath)) {
+        fs_1.default.copyFileSync(tufRootPath, cachedRootPath);
+    }
+    return cachePath;
+}
+// Initializes the remote.json file, which contains the URL of the TUF
+// repository. If the file does not exist, it will be created. If the file
+// exists, it will be parsed and returned.
+function initRemoteConfig(rootDir, mirrorURL) {
+    let remoteConfig;
+    const remoteConfigPath = path_1.default.join(rootDir, 'remote.json');
+    if (fs_1.default.existsSync(remoteConfigPath)) {
+        const data = fs_1.default.readFileSync(remoteConfigPath, 'utf-8');
+        remoteConfig = JSON.parse(data);
+    }
+    if (!remoteConfig) {
+        remoteConfig = { mirror: mirrorURL };
+        fs_1.default.writeFileSync(remoteConfigPath, JSON.stringify(remoteConfig));
+    }
+    return remoteConfig;
+}
+function initClient(cachePath, remote, options) {
+    const baseURL = remote.mirror;
+    const config = {
+        fetchTimeout: options.timeout,
+    };
+    // tuf-js only supports a number for fetchRetries so we have to
+    // convert the boolean and object options to a number.
+    /* istanbul ignore if */
+    if (typeof options.retry !== 'undefined') {
+        if (typeof options.retry === 'number') {
+            config.fetchRetries = options.retry;
+        }
+        else if (typeof options.retry === 'object') {
+            config.fetchRetries = options.retry.retries;
+        }
+        else if (options.retry === true) {
+            config.fetchRetries = 1;
+        }
+    }
+    return new tuf_js_1.Updater({
+        metadataBaseUrl: baseURL,
+        targetBaseUrl: `${baseURL}/targets`,
+        metadataDir: cachePath,
+        targetDir: path_1.default.join(cachePath, 'targets'),
+        config,
+    });
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/error.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/error.js
new file mode 100644
index 0000000000000..e13971b289ff2
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/error.js
@@ -0,0 +1,12 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TUFError = void 0;
+class TUFError extends Error {
+    constructor({ code, message, cause, }) {
+        super(message);
+        this.code = code;
+        this.cause = cause;
+        this.name = this.constructor.name;
+    }
+}
+exports.TUFError = TUFError;
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/index.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/index.js
new file mode 100644
index 0000000000000..0d201c356dffc
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/index.js
@@ -0,0 +1,55 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TUFError = exports.initTUF = exports.getTrustedRoot = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const protobuf_specs_1 = require("@sigstore/protobuf-specs");
+const appdata_1 = require("./appdata");
+const client_1 = require("./client");
+const DEFAULT_CACHE_DIR = 'sigstore-js';
+const DEFAULT_MIRROR_URL = 'https://tuf-repo-cdn.sigstore.dev';
+const DEFAULT_TUF_ROOT_PATH = '../store/public-good-instance-root.json';
+const DEFAULT_RETRY = { retries: 2 };
+const DEFAULT_TIMEOUT = 5000;
+const TRUSTED_ROOT_TARGET = 'trusted_root.json';
+async function getTrustedRoot(
+/* istanbul ignore next */
+options = {}) {
+    const client = createClient(options);
+    const trustedRoot = await client.getTarget(TRUSTED_ROOT_TARGET);
+    return protobuf_specs_1.TrustedRoot.fromJSON(JSON.parse(trustedRoot));
+}
+exports.getTrustedRoot = getTrustedRoot;
+async function initTUF(
+/* istanbul ignore next */
+options = {}) {
+    const client = createClient(options);
+    return client.refresh().then(() => client);
+}
+exports.initTUF = initTUF;
+// Create a TUF client with default options
+function createClient(options) {
+    /* istanbul ignore next */
+    return new client_1.TUFClient({
+        cachePath: options.cachePath || (0, appdata_1.appDataPath)(DEFAULT_CACHE_DIR),
+        rootPath: options.rootPath || require.resolve(DEFAULT_TUF_ROOT_PATH),
+        mirrorURL: options.mirrorURL || DEFAULT_MIRROR_URL,
+        retry: options.retry ?? DEFAULT_RETRY,
+        timeout: options.timeout ?? DEFAULT_TIMEOUT,
+    });
+}
+var error_1 = require("./error");
+Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return error_1.TUFError; } });
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/target.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/target.js
new file mode 100644
index 0000000000000..29eaf99a7e721
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/target.js
@@ -0,0 +1,80 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.readTarget = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const fs_1 = __importDefault(require("fs"));
+const error_1 = require("./error");
+// Downloads and returns the specified target from the provided TUF Updater.
+async function readTarget(tuf, targetPath) {
+    const path = await getTargetPath(tuf, targetPath);
+    return new Promise((resolve, reject) => {
+        fs_1.default.readFile(path, 'utf-8', (err, data) => {
+            if (err) {
+                reject(new error_1.TUFError({
+                    code: 'TUF_READ_TARGET_ERROR',
+                    message: `error reading target ${path}`,
+                    cause: err,
+                }));
+            }
+            else {
+                resolve(data);
+            }
+        });
+    });
+}
+exports.readTarget = readTarget;
+// Returns the local path to the specified target. If the target is not yet
+// cached locally, the provided TUF Updater will be used to download and
+// cache the target.
+async function getTargetPath(tuf, target) {
+    let targetInfo;
+    try {
+        targetInfo = await tuf.getTargetInfo(target);
+    }
+    catch (err) {
+        throw new error_1.TUFError({
+            code: 'TUF_REFRESH_METADATA_ERROR',
+            message: 'error refreshing TUF metadata',
+            cause: err,
+        });
+    }
+    if (!targetInfo) {
+        throw new error_1.TUFError({
+            code: 'TUF_FIND_TARGET_ERROR',
+            message: `target ${target} not found`,
+        });
+    }
+    let path = await tuf.findCachedTarget(targetInfo);
+    // An empty path here means the target has not been cached locally, or is
+    // out of date. In either case, we need to download it.
+    if (!path) {
+        try {
+            path = await tuf.downloadTarget(targetInfo);
+        }
+        catch (err) {
+            throw new error_1.TUFError({
+                code: 'TUF_DOWNLOAD_TARGET_ERROR',
+                message: `error downloading target ${path}`,
+                cause: err,
+            });
+        }
+    }
+    return path;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE
new file mode 100644
index 0000000000000..e9e7c1679a09d
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright 2023 The Sigstore Authors
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
new file mode 100644
index 0000000000000..0c367a8384454
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
@@ -0,0 +1,89 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = exports.Envelope = void 0;
+function createBaseEnvelope() {
+    return { payload: Buffer.alloc(0), payloadType: "", signatures: [] };
+}
+exports.Envelope = {
+    fromJSON(object) {
+        return {
+            payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
+            payloadType: isSet(object.payloadType) ? String(object.payloadType) : "",
+            signatures: Array.isArray(object?.signatures) ? object.signatures.map((e) => exports.Signature.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.payload !== undefined &&
+            (obj.payload = base64FromBytes(message.payload !== undefined ? message.payload : Buffer.alloc(0)));
+        message.payloadType !== undefined && (obj.payloadType = message.payloadType);
+        if (message.signatures) {
+            obj.signatures = message.signatures.map((e) => e ? exports.Signature.toJSON(e) : undefined);
+        }
+        else {
+            obj.signatures = [];
+        }
+        return obj;
+    },
+};
+function createBaseSignature() {
+    return { sig: Buffer.alloc(0), keyid: "" };
+}
+exports.Signature = {
+    fromJSON(object) {
+        return {
+            sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
+            keyid: isSet(object.keyid) ? String(object.keyid) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.sig !== undefined && (obj.sig = base64FromBytes(message.sig !== undefined ? message.sig : Buffer.alloc(0)));
+        message.keyid !== undefined && (obj.keyid = message.keyid);
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
new file mode 100644
index 0000000000000..073093b8371a8
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
@@ -0,0 +1,185 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
+/* eslint-disable */
+const any_1 = require("./google/protobuf/any");
+const timestamp_1 = require("./google/protobuf/timestamp");
+function createBaseCloudEvent() {
+    return { id: "", source: "", specVersion: "", type: "", attributes: {}, data: undefined };
+}
+exports.CloudEvent = {
+    fromJSON(object) {
+        return {
+            id: isSet(object.id) ? String(object.id) : "",
+            source: isSet(object.source) ? String(object.source) : "",
+            specVersion: isSet(object.specVersion) ? String(object.specVersion) : "",
+            type: isSet(object.type) ? String(object.type) : "",
+            attributes: isObject(object.attributes)
+                ? Object.entries(object.attributes).reduce((acc, [key, value]) => {
+                    acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
+                    return acc;
+                }, {})
+                : {},
+            data: isSet(object.binaryData)
+                ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
+                : isSet(object.textData)
+                    ? { $case: "textData", textData: String(object.textData) }
+                    : isSet(object.protoData)
+                        ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
+                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.id !== undefined && (obj.id = message.id);
+        message.source !== undefined && (obj.source = message.source);
+        message.specVersion !== undefined && (obj.specVersion = message.specVersion);
+        message.type !== undefined && (obj.type = message.type);
+        obj.attributes = {};
+        if (message.attributes) {
+            Object.entries(message.attributes).forEach(([k, v]) => {
+                obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
+            });
+        }
+        message.data?.$case === "binaryData" &&
+            (obj.binaryData = message.data?.binaryData !== undefined ? base64FromBytes(message.data?.binaryData) : undefined);
+        message.data?.$case === "textData" && (obj.textData = message.data?.textData);
+        message.data?.$case === "protoData" &&
+            (obj.protoData = message.data?.protoData ? any_1.Any.toJSON(message.data?.protoData) : undefined);
+        return obj;
+    },
+};
+function createBaseCloudEvent_AttributesEntry() {
+    return { key: "", value: undefined };
+}
+exports.CloudEvent_AttributesEntry = {
+    fromJSON(object) {
+        return {
+            key: isSet(object.key) ? String(object.key) : "",
+            value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.key !== undefined && (obj.key = message.key);
+        message.value !== undefined &&
+            (obj.value = message.value ? exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value) : undefined);
+        return obj;
+    },
+};
+function createBaseCloudEvent_CloudEventAttributeValue() {
+    return { attr: undefined };
+}
+exports.CloudEvent_CloudEventAttributeValue = {
+    fromJSON(object) {
+        return {
+            attr: isSet(object.ceBoolean)
+                ? { $case: "ceBoolean", ceBoolean: Boolean(object.ceBoolean) }
+                : isSet(object.ceInteger)
+                    ? { $case: "ceInteger", ceInteger: Number(object.ceInteger) }
+                    : isSet(object.ceString)
+                        ? { $case: "ceString", ceString: String(object.ceString) }
+                        : isSet(object.ceBytes)
+                            ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
+                            : isSet(object.ceUri)
+                                ? { $case: "ceUri", ceUri: String(object.ceUri) }
+                                : isSet(object.ceUriRef)
+                                    ? { $case: "ceUriRef", ceUriRef: String(object.ceUriRef) }
+                                    : isSet(object.ceTimestamp)
+                                        ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
+                                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.attr?.$case === "ceBoolean" && (obj.ceBoolean = message.attr?.ceBoolean);
+        message.attr?.$case === "ceInteger" && (obj.ceInteger = Math.round(message.attr?.ceInteger));
+        message.attr?.$case === "ceString" && (obj.ceString = message.attr?.ceString);
+        message.attr?.$case === "ceBytes" &&
+            (obj.ceBytes = message.attr?.ceBytes !== undefined ? base64FromBytes(message.attr?.ceBytes) : undefined);
+        message.attr?.$case === "ceUri" && (obj.ceUri = message.attr?.ceUri);
+        message.attr?.$case === "ceUriRef" && (obj.ceUriRef = message.attr?.ceUriRef);
+        message.attr?.$case === "ceTimestamp" && (obj.ceTimestamp = message.attr?.ceTimestamp.toISOString());
+        return obj;
+    },
+};
+function createBaseCloudEventBatch() {
+    return { events: [] };
+}
+exports.CloudEventBatch = {
+    fromJSON(object) {
+        return { events: Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [] };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.events) {
+            obj.events = message.events.map((e) => e ? exports.CloudEvent.toJSON(e) : undefined);
+        }
+        else {
+            obj.events = [];
+        }
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function fromTimestamp(t) {
+    let millis = Number(t.seconds) * 1000;
+    millis += t.nanos / 1000000;
+    return new Date(millis);
+}
+function fromJsonTimestamp(o) {
+    if (o instanceof Date) {
+        return o;
+    }
+    else if (typeof o === "string") {
+        return new Date(o);
+    }
+    else {
+        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+    }
+}
+function isObject(value) {
+    return typeof value === "object" && value !== null;
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
new file mode 100644
index 0000000000000..da627499ad765
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
@@ -0,0 +1,119 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.fieldBehaviorToJSON = exports.fieldBehaviorFromJSON = exports.FieldBehavior = void 0;
+/**
+ * An indicator of the behavior of a given field (for example, that a field
+ * is required in requests, or given as output but ignored as input).
+ * This **does not** change the behavior in protocol buffers itself; it only
+ * denotes the behavior and may affect how API tooling handles the field.
+ *
+ * Note: This enum **may** receive new values in the future.
+ */
+var FieldBehavior;
+(function (FieldBehavior) {
+    /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
+    FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
+    /**
+     * OPTIONAL - Specifically denotes a field as optional.
+     * While all fields in protocol buffers are optional, this may be specified
+     * for emphasis if appropriate.
+     */
+    FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
+    /**
+     * REQUIRED - Denotes a field as required.
+     * This indicates that the field **must** be provided as part of the request,
+     * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
+     */
+    FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
+    /**
+     * OUTPUT_ONLY - Denotes a field as output only.
+     * This indicates that the field is provided in responses, but including the
+     * field in a request does nothing (the server *must* ignore it and
+     * *must not* throw an error as a result of the field's presence).
+     */
+    FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
+    /**
+     * INPUT_ONLY - Denotes a field as input only.
+     * This indicates that the field is provided in requests, and the
+     * corresponding field is not included in output.
+     */
+    FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
+    /**
+     * IMMUTABLE - Denotes a field as immutable.
+     * This indicates that the field may be set once in a request to create a
+     * resource, but may not be changed thereafter.
+     */
+    FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
+    /**
+     * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
+     * This indicates that the service may provide the elements of the list
+     * in any arbitrary order, rather than the order the user originally
+     * provided. Additionally, the list's order may or may not be stable.
+     */
+    FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
+})(FieldBehavior = exports.FieldBehavior || (exports.FieldBehavior = {}));
+function fieldBehaviorFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "FIELD_BEHAVIOR_UNSPECIFIED":
+            return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
+        case 1:
+        case "OPTIONAL":
+            return FieldBehavior.OPTIONAL;
+        case 2:
+        case "REQUIRED":
+            return FieldBehavior.REQUIRED;
+        case 3:
+        case "OUTPUT_ONLY":
+            return FieldBehavior.OUTPUT_ONLY;
+        case 4:
+        case "INPUT_ONLY":
+            return FieldBehavior.INPUT_ONLY;
+        case 5:
+        case "IMMUTABLE":
+            return FieldBehavior.IMMUTABLE;
+        case 6:
+        case "UNORDERED_LIST":
+            return FieldBehavior.UNORDERED_LIST;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+    }
+}
+exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
+function fieldBehaviorToJSON(object) {
+    switch (object) {
+        case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
+            return "FIELD_BEHAVIOR_UNSPECIFIED";
+        case FieldBehavior.OPTIONAL:
+            return "OPTIONAL";
+        case FieldBehavior.REQUIRED:
+            return "REQUIRED";
+        case FieldBehavior.OUTPUT_ONLY:
+            return "OUTPUT_ONLY";
+        case FieldBehavior.INPUT_ONLY:
+            return "INPUT_ONLY";
+        case FieldBehavior.IMMUTABLE:
+            return "IMMUTABLE";
+        case FieldBehavior.UNORDERED_LIST:
+            return "UNORDERED_LIST";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+    }
+}
+exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
new file mode 100644
index 0000000000000..6b3f3c97a6647
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
@@ -0,0 +1,65 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Any = void 0;
+function createBaseAny() {
+    return { typeUrl: "", value: Buffer.alloc(0) };
+}
+exports.Any = {
+    fromJSON(object) {
+        return {
+            typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "",
+            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl);
+        message.value !== undefined &&
+            (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
new file mode 100644
index 0000000000000..d429aac846043
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
@@ -0,0 +1,1308 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.GeneratedCodeInfo_Annotation = exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.methodOptions_IdempotencyLevelToJSON = exports.methodOptions_IdempotencyLevelFromJSON = exports.MethodOptions_IdempotencyLevel = exports.fieldOptions_JSTypeToJSON = exports.fieldOptions_JSTypeFromJSON = exports.FieldOptions_JSType = exports.fieldOptions_CTypeToJSON = exports.fieldOptions_CTypeFromJSON = exports.FieldOptions_CType = exports.fileOptions_OptimizeModeToJSON = exports.fileOptions_OptimizeModeFromJSON = exports.FileOptions_OptimizeMode = exports.fieldDescriptorProto_LabelToJSON = exports.fieldDescriptorProto_LabelFromJSON = exports.FieldDescriptorProto_Label = exports.fieldDescriptorProto_TypeToJSON = exports.fieldDescriptorProto_TypeFromJSON = exports.FieldDescriptorProto_Type = void 0;
+var FieldDescriptorProto_Type;
+(function (FieldDescriptorProto_Type) {
+    /**
+     * TYPE_DOUBLE - 0 is reserved for errors.
+     * Order is weird for historical reasons.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT";
+    /**
+     * TYPE_INT64 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
+     * negative values are likely.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64";
+    /**
+     * TYPE_INT32 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
+     * negative values are likely.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
+    /**
+     * TYPE_GROUP - Tag-delimited aggregate.
+     * Group type is deprecated and not supported in proto3. However, Proto3
+     * implementations should still be able to parse the group wire format and
+     * treat group fields as unknown fields.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
+    /** TYPE_MESSAGE - Length-delimited aggregate. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE";
+    /** TYPE_BYTES - New in version 2. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64";
+    /** TYPE_SINT32 - Uses ZigZag encoding. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
+    /** TYPE_SINT64 - Uses ZigZag encoding. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
+})(FieldDescriptorProto_Type = exports.FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = {}));
+function fieldDescriptorProto_TypeFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "TYPE_DOUBLE":
+            return FieldDescriptorProto_Type.TYPE_DOUBLE;
+        case 2:
+        case "TYPE_FLOAT":
+            return FieldDescriptorProto_Type.TYPE_FLOAT;
+        case 3:
+        case "TYPE_INT64":
+            return FieldDescriptorProto_Type.TYPE_INT64;
+        case 4:
+        case "TYPE_UINT64":
+            return FieldDescriptorProto_Type.TYPE_UINT64;
+        case 5:
+        case "TYPE_INT32":
+            return FieldDescriptorProto_Type.TYPE_INT32;
+        case 6:
+        case "TYPE_FIXED64":
+            return FieldDescriptorProto_Type.TYPE_FIXED64;
+        case 7:
+        case "TYPE_FIXED32":
+            return FieldDescriptorProto_Type.TYPE_FIXED32;
+        case 8:
+        case "TYPE_BOOL":
+            return FieldDescriptorProto_Type.TYPE_BOOL;
+        case 9:
+        case "TYPE_STRING":
+            return FieldDescriptorProto_Type.TYPE_STRING;
+        case 10:
+        case "TYPE_GROUP":
+            return FieldDescriptorProto_Type.TYPE_GROUP;
+        case 11:
+        case "TYPE_MESSAGE":
+            return FieldDescriptorProto_Type.TYPE_MESSAGE;
+        case 12:
+        case "TYPE_BYTES":
+            return FieldDescriptorProto_Type.TYPE_BYTES;
+        case 13:
+        case "TYPE_UINT32":
+            return FieldDescriptorProto_Type.TYPE_UINT32;
+        case 14:
+        case "TYPE_ENUM":
+            return FieldDescriptorProto_Type.TYPE_ENUM;
+        case 15:
+        case "TYPE_SFIXED32":
+            return FieldDescriptorProto_Type.TYPE_SFIXED32;
+        case 16:
+        case "TYPE_SFIXED64":
+            return FieldDescriptorProto_Type.TYPE_SFIXED64;
+        case 17:
+        case "TYPE_SINT32":
+            return FieldDescriptorProto_Type.TYPE_SINT32;
+        case 18:
+        case "TYPE_SINT64":
+            return FieldDescriptorProto_Type.TYPE_SINT64;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+    }
+}
+exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
+function fieldDescriptorProto_TypeToJSON(object) {
+    switch (object) {
+        case FieldDescriptorProto_Type.TYPE_DOUBLE:
+            return "TYPE_DOUBLE";
+        case FieldDescriptorProto_Type.TYPE_FLOAT:
+            return "TYPE_FLOAT";
+        case FieldDescriptorProto_Type.TYPE_INT64:
+            return "TYPE_INT64";
+        case FieldDescriptorProto_Type.TYPE_UINT64:
+            return "TYPE_UINT64";
+        case FieldDescriptorProto_Type.TYPE_INT32:
+            return "TYPE_INT32";
+        case FieldDescriptorProto_Type.TYPE_FIXED64:
+            return "TYPE_FIXED64";
+        case FieldDescriptorProto_Type.TYPE_FIXED32:
+            return "TYPE_FIXED32";
+        case FieldDescriptorProto_Type.TYPE_BOOL:
+            return "TYPE_BOOL";
+        case FieldDescriptorProto_Type.TYPE_STRING:
+            return "TYPE_STRING";
+        case FieldDescriptorProto_Type.TYPE_GROUP:
+            return "TYPE_GROUP";
+        case FieldDescriptorProto_Type.TYPE_MESSAGE:
+            return "TYPE_MESSAGE";
+        case FieldDescriptorProto_Type.TYPE_BYTES:
+            return "TYPE_BYTES";
+        case FieldDescriptorProto_Type.TYPE_UINT32:
+            return "TYPE_UINT32";
+        case FieldDescriptorProto_Type.TYPE_ENUM:
+            return "TYPE_ENUM";
+        case FieldDescriptorProto_Type.TYPE_SFIXED32:
+            return "TYPE_SFIXED32";
+        case FieldDescriptorProto_Type.TYPE_SFIXED64:
+            return "TYPE_SFIXED64";
+        case FieldDescriptorProto_Type.TYPE_SINT32:
+            return "TYPE_SINT32";
+        case FieldDescriptorProto_Type.TYPE_SINT64:
+            return "TYPE_SINT64";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+    }
+}
+exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
+var FieldDescriptorProto_Label;
+(function (FieldDescriptorProto_Label) {
+    /** LABEL_OPTIONAL - 0 is reserved for errors */
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
+})(FieldDescriptorProto_Label = exports.FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = {}));
+function fieldDescriptorProto_LabelFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "LABEL_OPTIONAL":
+            return FieldDescriptorProto_Label.LABEL_OPTIONAL;
+        case 2:
+        case "LABEL_REQUIRED":
+            return FieldDescriptorProto_Label.LABEL_REQUIRED;
+        case 3:
+        case "LABEL_REPEATED":
+            return FieldDescriptorProto_Label.LABEL_REPEATED;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+    }
+}
+exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
+function fieldDescriptorProto_LabelToJSON(object) {
+    switch (object) {
+        case FieldDescriptorProto_Label.LABEL_OPTIONAL:
+            return "LABEL_OPTIONAL";
+        case FieldDescriptorProto_Label.LABEL_REQUIRED:
+            return "LABEL_REQUIRED";
+        case FieldDescriptorProto_Label.LABEL_REPEATED:
+            return "LABEL_REPEATED";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+    }
+}
+exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
+/** Generated classes can be optimized for speed or code size. */
+var FileOptions_OptimizeMode;
+(function (FileOptions_OptimizeMode) {
+    /** SPEED - Generate complete code for parsing, serialization, */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED";
+    /** CODE_SIZE - etc. */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
+    /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
+})(FileOptions_OptimizeMode = exports.FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = {}));
+function fileOptions_OptimizeModeFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "SPEED":
+            return FileOptions_OptimizeMode.SPEED;
+        case 2:
+        case "CODE_SIZE":
+            return FileOptions_OptimizeMode.CODE_SIZE;
+        case 3:
+        case "LITE_RUNTIME":
+            return FileOptions_OptimizeMode.LITE_RUNTIME;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+    }
+}
+exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
+function fileOptions_OptimizeModeToJSON(object) {
+    switch (object) {
+        case FileOptions_OptimizeMode.SPEED:
+            return "SPEED";
+        case FileOptions_OptimizeMode.CODE_SIZE:
+            return "CODE_SIZE";
+        case FileOptions_OptimizeMode.LITE_RUNTIME:
+            return "LITE_RUNTIME";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+    }
+}
+exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
+var FieldOptions_CType;
+(function (FieldOptions_CType) {
+    /** STRING - Default mode. */
+    FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
+    FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
+    FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
+})(FieldOptions_CType = exports.FieldOptions_CType || (exports.FieldOptions_CType = {}));
+function fieldOptions_CTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "STRING":
+            return FieldOptions_CType.STRING;
+        case 1:
+        case "CORD":
+            return FieldOptions_CType.CORD;
+        case 2:
+        case "STRING_PIECE":
+            return FieldOptions_CType.STRING_PIECE;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+    }
+}
+exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
+function fieldOptions_CTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_CType.STRING:
+            return "STRING";
+        case FieldOptions_CType.CORD:
+            return "CORD";
+        case FieldOptions_CType.STRING_PIECE:
+            return "STRING_PIECE";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+    }
+}
+exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
+var FieldOptions_JSType;
+(function (FieldOptions_JSType) {
+    /** JS_NORMAL - Use the default type. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL";
+    /** JS_STRING - Use JavaScript strings. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
+    /** JS_NUMBER - Use JavaScript numbers. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
+})(FieldOptions_JSType = exports.FieldOptions_JSType || (exports.FieldOptions_JSType = {}));
+function fieldOptions_JSTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "JS_NORMAL":
+            return FieldOptions_JSType.JS_NORMAL;
+        case 1:
+        case "JS_STRING":
+            return FieldOptions_JSType.JS_STRING;
+        case 2:
+        case "JS_NUMBER":
+            return FieldOptions_JSType.JS_NUMBER;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+    }
+}
+exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
+function fieldOptions_JSTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_JSType.JS_NORMAL:
+            return "JS_NORMAL";
+        case FieldOptions_JSType.JS_STRING:
+            return "JS_STRING";
+        case FieldOptions_JSType.JS_NUMBER:
+            return "JS_NUMBER";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+    }
+}
+exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
+/**
+ * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
+ * or neither? HTTP based RPC implementation may choose GET verb for safe
+ * methods, and PUT verb for idempotent methods instead of the default POST.
+ */
+var MethodOptions_IdempotencyLevel;
+(function (MethodOptions_IdempotencyLevel) {
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN";
+    /** NO_SIDE_EFFECTS - implies idempotent */
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
+    /** IDEMPOTENT - idempotent, but may have side effects */
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
+})(MethodOptions_IdempotencyLevel = exports.MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = {}));
+function methodOptions_IdempotencyLevelFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "IDEMPOTENCY_UNKNOWN":
+            return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN;
+        case 1:
+        case "NO_SIDE_EFFECTS":
+            return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS;
+        case 2:
+        case "IDEMPOTENT":
+            return MethodOptions_IdempotencyLevel.IDEMPOTENT;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+    }
+}
+exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
+function methodOptions_IdempotencyLevelToJSON(object) {
+    switch (object) {
+        case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
+            return "IDEMPOTENCY_UNKNOWN";
+        case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS:
+            return "NO_SIDE_EFFECTS";
+        case MethodOptions_IdempotencyLevel.IDEMPOTENT:
+            return "IDEMPOTENT";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+    }
+}
+exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
+function createBaseFileDescriptorSet() {
+    return { file: [] };
+}
+exports.FileDescriptorSet = {
+    fromJSON(object) {
+        return { file: Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [] };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.file) {
+            obj.file = message.file.map((e) => e ? exports.FileDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.file = [];
+        }
+        return obj;
+    },
+};
+function createBaseFileDescriptorProto() {
+    return {
+        name: "",
+        package: "",
+        dependency: [],
+        publicDependency: [],
+        weakDependency: [],
+        messageType: [],
+        enumType: [],
+        service: [],
+        extension: [],
+        options: undefined,
+        sourceCodeInfo: undefined,
+        syntax: "",
+    };
+}
+exports.FileDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            package: isSet(object.package) ? String(object.package) : "",
+            dependency: Array.isArray(object?.dependency) ? object.dependency.map((e) => String(e)) : [],
+            publicDependency: Array.isArray(object?.publicDependency)
+                ? object.publicDependency.map((e) => Number(e))
+                : [],
+            weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e) => Number(e)) : [],
+            messageType: Array.isArray(object?.messageType)
+                ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
+                : [],
+            enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
+            service: Array.isArray(object?.service) ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e)) : [],
+            extension: Array.isArray(object?.extension)
+                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
+            sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
+            syntax: isSet(object.syntax) ? String(object.syntax) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.package !== undefined && (obj.package = message.package);
+        if (message.dependency) {
+            obj.dependency = message.dependency.map((e) => e);
+        }
+        else {
+            obj.dependency = [];
+        }
+        if (message.publicDependency) {
+            obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
+        }
+        else {
+            obj.publicDependency = [];
+        }
+        if (message.weakDependency) {
+            obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
+        }
+        else {
+            obj.weakDependency = [];
+        }
+        if (message.messageType) {
+            obj.messageType = message.messageType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.messageType = [];
+        }
+        if (message.enumType) {
+            obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.enumType = [];
+        }
+        if (message.service) {
+            obj.service = message.service.map((e) => e ? exports.ServiceDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.service = [];
+        }
+        if (message.extension) {
+            obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.extension = [];
+        }
+        message.options !== undefined && (obj.options = message.options ? exports.FileOptions.toJSON(message.options) : undefined);
+        message.sourceCodeInfo !== undefined &&
+            (obj.sourceCodeInfo = message.sourceCodeInfo ? exports.SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined);
+        message.syntax !== undefined && (obj.syntax = message.syntax);
+        return obj;
+    },
+};
+function createBaseDescriptorProto() {
+    return {
+        name: "",
+        field: [],
+        extension: [],
+        nestedType: [],
+        enumType: [],
+        extensionRange: [],
+        oneofDecl: [],
+        options: undefined,
+        reservedRange: [],
+        reservedName: [],
+    };
+}
+exports.DescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            field: Array.isArray(object?.field) ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e)) : [],
+            extension: Array.isArray(object?.extension)
+                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            nestedType: Array.isArray(object?.nestedType)
+                ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
+                : [],
+            enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
+            extensionRange: Array.isArray(object?.extensionRange)
+                ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
+                : [],
+            oneofDecl: Array.isArray(object?.oneofDecl)
+                ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
+            reservedRange: Array.isArray(object?.reservedRange)
+                ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
+                : [],
+            reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e) => String(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        if (message.field) {
+            obj.field = message.field.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.field = [];
+        }
+        if (message.extension) {
+            obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.extension = [];
+        }
+        if (message.nestedType) {
+            obj.nestedType = message.nestedType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.nestedType = [];
+        }
+        if (message.enumType) {
+            obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.enumType = [];
+        }
+        if (message.extensionRange) {
+            obj.extensionRange = message.extensionRange.map((e) => e ? exports.DescriptorProto_ExtensionRange.toJSON(e) : undefined);
+        }
+        else {
+            obj.extensionRange = [];
+        }
+        if (message.oneofDecl) {
+            obj.oneofDecl = message.oneofDecl.map((e) => e ? exports.OneofDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.oneofDecl = [];
+        }
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.MessageOptions.toJSON(message.options) : undefined);
+        if (message.reservedRange) {
+            obj.reservedRange = message.reservedRange.map((e) => e ? exports.DescriptorProto_ReservedRange.toJSON(e) : undefined);
+        }
+        else {
+            obj.reservedRange = [];
+        }
+        if (message.reservedName) {
+            obj.reservedName = message.reservedName.map((e) => e);
+        }
+        else {
+            obj.reservedName = [];
+        }
+        return obj;
+    },
+};
+function createBaseDescriptorProto_ExtensionRange() {
+    return { start: 0, end: 0, options: undefined };
+}
+exports.DescriptorProto_ExtensionRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? Number(object.start) : 0,
+            end: isSet(object.end) ? Number(object.end) : 0,
+            options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = Math.round(message.start));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.ExtensionRangeOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseDescriptorProto_ReservedRange() {
+    return { start: 0, end: 0 };
+}
+exports.DescriptorProto_ReservedRange = {
+    fromJSON(object) {
+        return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = Math.round(message.start));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        return obj;
+    },
+};
+function createBaseExtensionRangeOptions() {
+    return { uninterpretedOption: [] };
+}
+exports.ExtensionRangeOptions = {
+    fromJSON(object) {
+        return {
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseFieldDescriptorProto() {
+    return {
+        name: "",
+        number: 0,
+        label: 1,
+        type: 1,
+        typeName: "",
+        extendee: "",
+        defaultValue: "",
+        oneofIndex: 0,
+        jsonName: "",
+        options: undefined,
+        proto3Optional: false,
+    };
+}
+exports.FieldDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            number: isSet(object.number) ? Number(object.number) : 0,
+            label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
+            type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
+            typeName: isSet(object.typeName) ? String(object.typeName) : "",
+            extendee: isSet(object.extendee) ? String(object.extendee) : "",
+            defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "",
+            oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0,
+            jsonName: isSet(object.jsonName) ? String(object.jsonName) : "",
+            options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
+            proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.number !== undefined && (obj.number = Math.round(message.number));
+        message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label));
+        message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type));
+        message.typeName !== undefined && (obj.typeName = message.typeName);
+        message.extendee !== undefined && (obj.extendee = message.extendee);
+        message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue);
+        message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex));
+        message.jsonName !== undefined && (obj.jsonName = message.jsonName);
+        message.options !== undefined && (obj.options = message.options ? exports.FieldOptions.toJSON(message.options) : undefined);
+        message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional);
+        return obj;
+    },
+};
+function createBaseOneofDescriptorProto() {
+    return { name: "", options: undefined };
+}
+exports.OneofDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.options !== undefined && (obj.options = message.options ? exports.OneofOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseEnumDescriptorProto() {
+    return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] };
+}
+exports.EnumDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            value: Array.isArray(object?.value) ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e)) : [],
+            options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
+            reservedRange: Array.isArray(object?.reservedRange)
+                ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
+                : [],
+            reservedName: Array.isArray(object?.reservedName)
+                ? object.reservedName.map((e) => String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        if (message.value) {
+            obj.value = message.value.map((e) => e ? exports.EnumValueDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.value = [];
+        }
+        message.options !== undefined && (obj.options = message.options ? exports.EnumOptions.toJSON(message.options) : undefined);
+        if (message.reservedRange) {
+            obj.reservedRange = message.reservedRange.map((e) => e ? exports.EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined);
+        }
+        else {
+            obj.reservedRange = [];
+        }
+        if (message.reservedName) {
+            obj.reservedName = message.reservedName.map((e) => e);
+        }
+        else {
+            obj.reservedName = [];
+        }
+        return obj;
+    },
+};
+function createBaseEnumDescriptorProto_EnumReservedRange() {
+    return { start: 0, end: 0 };
+}
+exports.EnumDescriptorProto_EnumReservedRange = {
+    fromJSON(object) {
+        return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = Math.round(message.start));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        return obj;
+    },
+};
+function createBaseEnumValueDescriptorProto() {
+    return { name: "", number: 0, options: undefined };
+}
+exports.EnumValueDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            number: isSet(object.number) ? Number(object.number) : 0,
+            options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.number !== undefined && (obj.number = Math.round(message.number));
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.EnumValueOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseServiceDescriptorProto() {
+    return { name: "", method: [], options: undefined };
+}
+exports.ServiceDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            method: Array.isArray(object?.method) ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e)) : [],
+            options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        if (message.method) {
+            obj.method = message.method.map((e) => e ? exports.MethodDescriptorProto.toJSON(e) : undefined);
+        }
+        else {
+            obj.method = [];
+        }
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.ServiceOptions.toJSON(message.options) : undefined);
+        return obj;
+    },
+};
+function createBaseMethodDescriptorProto() {
+    return {
+        name: "",
+        inputType: "",
+        outputType: "",
+        options: undefined,
+        clientStreaming: false,
+        serverStreaming: false,
+    };
+}
+exports.MethodDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? String(object.name) : "",
+            inputType: isSet(object.inputType) ? String(object.inputType) : "",
+            outputType: isSet(object.outputType) ? String(object.outputType) : "",
+            options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
+            clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false,
+            serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.name !== undefined && (obj.name = message.name);
+        message.inputType !== undefined && (obj.inputType = message.inputType);
+        message.outputType !== undefined && (obj.outputType = message.outputType);
+        message.options !== undefined &&
+            (obj.options = message.options ? exports.MethodOptions.toJSON(message.options) : undefined);
+        message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming);
+        message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming);
+        return obj;
+    },
+};
+function createBaseFileOptions() {
+    return {
+        javaPackage: "",
+        javaOuterClassname: "",
+        javaMultipleFiles: false,
+        javaGenerateEqualsAndHash: false,
+        javaStringCheckUtf8: false,
+        optimizeFor: 1,
+        goPackage: "",
+        ccGenericServices: false,
+        javaGenericServices: false,
+        pyGenericServices: false,
+        phpGenericServices: false,
+        deprecated: false,
+        ccEnableArenas: false,
+        objcClassPrefix: "",
+        csharpNamespace: "",
+        swiftPrefix: "",
+        phpClassPrefix: "",
+        phpNamespace: "",
+        phpMetadataNamespace: "",
+        rubyPackage: "",
+        uninterpretedOption: [],
+    };
+}
+exports.FileOptions = {
+    fromJSON(object) {
+        return {
+            javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "",
+            javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "",
+            javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false,
+            javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
+                ? Boolean(object.javaGenerateEqualsAndHash)
+                : false,
+            javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false,
+            optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
+            goPackage: isSet(object.goPackage) ? String(object.goPackage) : "",
+            ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false,
+            javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false,
+            pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false,
+            phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false,
+            objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "",
+            csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "",
+            swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "",
+            phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "",
+            phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "",
+            phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "",
+            rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "",
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage);
+        message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname);
+        message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles);
+        message.javaGenerateEqualsAndHash !== undefined &&
+            (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash);
+        message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8);
+        message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor));
+        message.goPackage !== undefined && (obj.goPackage = message.goPackage);
+        message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices);
+        message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices);
+        message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices);
+        message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas);
+        message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix);
+        message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace);
+        message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix);
+        message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix);
+        message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace);
+        message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace);
+        message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseMessageOptions() {
+    return {
+        messageSetWireFormat: false,
+        noStandardDescriptorAccessor: false,
+        deprecated: false,
+        mapEntry: false,
+        uninterpretedOption: [],
+    };
+}
+exports.MessageOptions = {
+    fromJSON(object) {
+        return {
+            messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false,
+            noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
+                ? Boolean(object.noStandardDescriptorAccessor)
+                : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat);
+        message.noStandardDescriptorAccessor !== undefined &&
+            (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseFieldOptions() {
+    return {
+        ctype: 0,
+        packed: false,
+        jstype: 0,
+        lazy: false,
+        unverifiedLazy: false,
+        deprecated: false,
+        weak: false,
+        uninterpretedOption: [],
+    };
+}
+exports.FieldOptions = {
+    fromJSON(object) {
+        return {
+            ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
+            packed: isSet(object.packed) ? Boolean(object.packed) : false,
+            jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
+            lazy: isSet(object.lazy) ? Boolean(object.lazy) : false,
+            unverifiedLazy: isSet(object.unverifiedLazy) ? Boolean(object.unverifiedLazy) : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            weak: isSet(object.weak) ? Boolean(object.weak) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype));
+        message.packed !== undefined && (obj.packed = message.packed);
+        message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype));
+        message.lazy !== undefined && (obj.lazy = message.lazy);
+        message.unverifiedLazy !== undefined && (obj.unverifiedLazy = message.unverifiedLazy);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.weak !== undefined && (obj.weak = message.weak);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseOneofOptions() {
+    return { uninterpretedOption: [] };
+}
+exports.OneofOptions = {
+    fromJSON(object) {
+        return {
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseEnumOptions() {
+    return { allowAlias: false, deprecated: false, uninterpretedOption: [] };
+}
+exports.EnumOptions = {
+    fromJSON(object) {
+        return {
+            allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false,
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias);
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseEnumValueOptions() {
+    return { deprecated: false, uninterpretedOption: [] };
+}
+exports.EnumValueOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseServiceOptions() {
+    return { deprecated: false, uninterpretedOption: [] };
+}
+exports.ServiceOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseMethodOptions() {
+    return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] };
+}
+exports.MethodOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+            idempotencyLevel: isSet(object.idempotencyLevel)
+                ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
+                : 0,
+            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+        message.idempotencyLevel !== undefined &&
+            (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel));
+        if (message.uninterpretedOption) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+        }
+        else {
+            obj.uninterpretedOption = [];
+        }
+        return obj;
+    },
+};
+function createBaseUninterpretedOption() {
+    return {
+        name: [],
+        identifierValue: "",
+        positiveIntValue: "0",
+        negativeIntValue: "0",
+        doubleValue: 0,
+        stringValue: Buffer.alloc(0),
+        aggregateValue: "",
+    };
+}
+exports.UninterpretedOption = {
+    fromJSON(object) {
+        return {
+            name: Array.isArray(object?.name) ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e)) : [],
+            identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "",
+            positiveIntValue: isSet(object.positiveIntValue) ? String(object.positiveIntValue) : "0",
+            negativeIntValue: isSet(object.negativeIntValue) ? String(object.negativeIntValue) : "0",
+            doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0,
+            stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
+            aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name) {
+            obj.name = message.name.map((e) => e ? exports.UninterpretedOption_NamePart.toJSON(e) : undefined);
+        }
+        else {
+            obj.name = [];
+        }
+        message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue);
+        message.positiveIntValue !== undefined && (obj.positiveIntValue = message.positiveIntValue);
+        message.negativeIntValue !== undefined && (obj.negativeIntValue = message.negativeIntValue);
+        message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue);
+        message.stringValue !== undefined &&
+            (obj.stringValue = base64FromBytes(message.stringValue !== undefined ? message.stringValue : Buffer.alloc(0)));
+        message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue);
+        return obj;
+    },
+};
+function createBaseUninterpretedOption_NamePart() {
+    return { namePart: "", isExtension: false };
+}
+exports.UninterpretedOption_NamePart = {
+    fromJSON(object) {
+        return {
+            namePart: isSet(object.namePart) ? String(object.namePart) : "",
+            isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.namePart !== undefined && (obj.namePart = message.namePart);
+        message.isExtension !== undefined && (obj.isExtension = message.isExtension);
+        return obj;
+    },
+};
+function createBaseSourceCodeInfo() {
+    return { location: [] };
+}
+exports.SourceCodeInfo = {
+    fromJSON(object) {
+        return {
+            location: Array.isArray(object?.location)
+                ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.location) {
+            obj.location = message.location.map((e) => e ? exports.SourceCodeInfo_Location.toJSON(e) : undefined);
+        }
+        else {
+            obj.location = [];
+        }
+        return obj;
+    },
+};
+function createBaseSourceCodeInfo_Location() {
+    return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] };
+}
+exports.SourceCodeInfo_Location = {
+    fromJSON(object) {
+        return {
+            path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
+            span: Array.isArray(object?.span) ? object.span.map((e) => Number(e)) : [],
+            leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "",
+            trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "",
+            leadingDetachedComments: Array.isArray(object?.leadingDetachedComments)
+                ? object.leadingDetachedComments.map((e) => String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.path) {
+            obj.path = message.path.map((e) => Math.round(e));
+        }
+        else {
+            obj.path = [];
+        }
+        if (message.span) {
+            obj.span = message.span.map((e) => Math.round(e));
+        }
+        else {
+            obj.span = [];
+        }
+        message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments);
+        message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments);
+        if (message.leadingDetachedComments) {
+            obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e);
+        }
+        else {
+            obj.leadingDetachedComments = [];
+        }
+        return obj;
+    },
+};
+function createBaseGeneratedCodeInfo() {
+    return { annotation: [] };
+}
+exports.GeneratedCodeInfo = {
+    fromJSON(object) {
+        return {
+            annotation: Array.isArray(object?.annotation)
+                ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.annotation) {
+            obj.annotation = message.annotation.map((e) => e ? exports.GeneratedCodeInfo_Annotation.toJSON(e) : undefined);
+        }
+        else {
+            obj.annotation = [];
+        }
+        return obj;
+    },
+};
+function createBaseGeneratedCodeInfo_Annotation() {
+    return { path: [], sourceFile: "", begin: 0, end: 0 };
+}
+exports.GeneratedCodeInfo_Annotation = {
+    fromJSON(object) {
+        return {
+            path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
+            sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "",
+            begin: isSet(object.begin) ? Number(object.begin) : 0,
+            end: isSet(object.end) ? Number(object.end) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.path) {
+            obj.path = message.path.map((e) => Math.round(e));
+        }
+        else {
+            obj.path = [];
+        }
+        message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile);
+        message.begin !== undefined && (obj.begin = Math.round(message.begin));
+        message.end !== undefined && (obj.end = Math.round(message.end));
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
new file mode 100644
index 0000000000000..159135fe87172
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
@@ -0,0 +1,24 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Timestamp = void 0;
+function createBaseTimestamp() {
+    return { seconds: "0", nanos: 0 };
+}
+exports.Timestamp = {
+    fromJSON(object) {
+        return {
+            seconds: isSet(object.seconds) ? String(object.seconds) : "0",
+            nanos: isSet(object.nanos) ? Number(object.nanos) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.seconds !== undefined && (obj.seconds = message.seconds);
+        message.nanos !== undefined && (obj.nanos = Math.round(message.nanos));
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
new file mode 100644
index 0000000000000..1ef3e1b3356b7
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
@@ -0,0 +1,106 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
+/* eslint-disable */
+const envelope_1 = require("./envelope");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_rekor_1 = require("./sigstore_rekor");
+function createBaseTimestampVerificationData() {
+    return { rfc3161Timestamps: [] };
+}
+exports.TimestampVerificationData = {
+    fromJSON(object) {
+        return {
+            rfc3161Timestamps: Array.isArray(object?.rfc3161Timestamps)
+                ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rfc3161Timestamps) {
+            obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => e ? sigstore_common_1.RFC3161SignedTimestamp.toJSON(e) : undefined);
+        }
+        else {
+            obj.rfc3161Timestamps = [];
+        }
+        return obj;
+    },
+};
+function createBaseVerificationMaterial() {
+    return { content: undefined, tlogEntries: [], timestampVerificationData: undefined };
+}
+exports.VerificationMaterial = {
+    fromJSON(object) {
+        return {
+            content: isSet(object.publicKey)
+                ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
+                : isSet(object.x509CertificateChain)
+                    ? {
+                        $case: "x509CertificateChain",
+                        x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
+                    }
+                    : undefined,
+            tlogEntries: Array.isArray(object?.tlogEntries)
+                ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
+                : [],
+            timestampVerificationData: isSet(object.timestampVerificationData)
+                ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
+                : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.content?.$case === "publicKey" &&
+            (obj.publicKey = message.content?.publicKey ? sigstore_common_1.PublicKeyIdentifier.toJSON(message.content?.publicKey) : undefined);
+        message.content?.$case === "x509CertificateChain" &&
+            (obj.x509CertificateChain = message.content?.x509CertificateChain
+                ? sigstore_common_1.X509CertificateChain.toJSON(message.content?.x509CertificateChain)
+                : undefined);
+        if (message.tlogEntries) {
+            obj.tlogEntries = message.tlogEntries.map((e) => e ? sigstore_rekor_1.TransparencyLogEntry.toJSON(e) : undefined);
+        }
+        else {
+            obj.tlogEntries = [];
+        }
+        message.timestampVerificationData !== undefined &&
+            (obj.timestampVerificationData = message.timestampVerificationData
+                ? exports.TimestampVerificationData.toJSON(message.timestampVerificationData)
+                : undefined);
+        return obj;
+    },
+};
+function createBaseBundle() {
+    return { mediaType: "", verificationMaterial: undefined, content: undefined };
+}
+exports.Bundle = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
+            verificationMaterial: isSet(object.verificationMaterial)
+                ? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
+                : undefined,
+            content: isSet(object.messageSignature)
+                ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
+                : isSet(object.dsseEnvelope)
+                    ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.mediaType !== undefined && (obj.mediaType = message.mediaType);
+        message.verificationMaterial !== undefined && (obj.verificationMaterial = message.verificationMaterial
+            ? exports.VerificationMaterial.toJSON(message.verificationMaterial)
+            : undefined);
+        message.content?.$case === "messageSignature" && (obj.messageSignature = message.content?.messageSignature
+            ? sigstore_common_1.MessageSignature.toJSON(message.content?.messageSignature)
+            : undefined);
+        message.content?.$case === "dsseEnvelope" &&
+            (obj.dsseEnvelope = message.content?.dsseEnvelope ? envelope_1.Envelope.toJSON(message.content?.dsseEnvelope) : undefined);
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
new file mode 100644
index 0000000000000..bcd654e9154b9
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
@@ -0,0 +1,457 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.subjectAlternativeNameTypeToJSON = exports.subjectAlternativeNameTypeFromJSON = exports.SubjectAlternativeNameType = exports.publicKeyDetailsToJSON = exports.publicKeyDetailsFromJSON = exports.PublicKeyDetails = exports.hashAlgorithmToJSON = exports.hashAlgorithmFromJSON = exports.HashAlgorithm = void 0;
+/* eslint-disable */
+const timestamp_1 = require("./google/protobuf/timestamp");
+/**
+ * Only a subset of the secure hash standard algorithms are supported.
+ * See  for more
+ * details.
+ * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
+ * any proto JSON serialization to emit the used hash algorithm, as default
+ * option is to *omit* the default value of an enum (which is the first
+ * value, represented by '0'.
+ */
+var HashAlgorithm;
+(function (HashAlgorithm) {
+    HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
+    HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
+})(HashAlgorithm = exports.HashAlgorithm || (exports.HashAlgorithm = {}));
+function hashAlgorithmFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "HASH_ALGORITHM_UNSPECIFIED":
+            return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
+        case 1:
+        case "SHA2_256":
+            return HashAlgorithm.SHA2_256;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+    }
+}
+exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
+function hashAlgorithmToJSON(object) {
+    switch (object) {
+        case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
+            return "HASH_ALGORITHM_UNSPECIFIED";
+        case HashAlgorithm.SHA2_256:
+            return "SHA2_256";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+    }
+}
+exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
+/**
+ * Details of a specific public key, capturing the the key encoding method,
+ * and signature algorithm.
+ * To avoid the possibility of contradicting formats such as PKCS1 with
+ * ED25519 the valid permutations are listed as a linear set instead of a
+ * cartesian set (i.e one combined variable instead of two, one for encoding
+ * and one for the signature algorithm).
+ */
+var PublicKeyDetails;
+(function (PublicKeyDetails) {
+    PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+    /** PKCS1_RSA_PKCS1V5 - RSA */
+    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
+    /** PKCS1_RSA_PSS - See RFC8017 */
+    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
+    /** PKIX_ECDSA_P256_SHA_256 - ECDSA */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
+    /** PKIX_ECDSA_P256_HMAC_SHA_256 - See RFC6979 */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
+    /** PKIX_ED25519 - Ed 25519 */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
+})(PublicKeyDetails = exports.PublicKeyDetails || (exports.PublicKeyDetails = {}));
+function publicKeyDetailsFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
+            return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
+        case 1:
+        case "PKCS1_RSA_PKCS1V5":
+            return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
+        case 2:
+        case "PKCS1_RSA_PSS":
+            return PublicKeyDetails.PKCS1_RSA_PSS;
+        case 3:
+        case "PKIX_RSA_PKCS1V5":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V5;
+        case 4:
+        case "PKIX_RSA_PSS":
+            return PublicKeyDetails.PKIX_RSA_PSS;
+        case 5:
+        case "PKIX_ECDSA_P256_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
+        case 6:
+        case "PKIX_ECDSA_P256_HMAC_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
+        case 7:
+        case "PKIX_ED25519":
+            return PublicKeyDetails.PKIX_ED25519;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+    }
+}
+exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
+function publicKeyDetailsToJSON(object) {
+    switch (object) {
+        case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
+            return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+        case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
+            return "PKCS1_RSA_PKCS1V5";
+        case PublicKeyDetails.PKCS1_RSA_PSS:
+            return "PKCS1_RSA_PSS";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V5:
+            return "PKIX_RSA_PKCS1V5";
+        case PublicKeyDetails.PKIX_RSA_PSS:
+            return "PKIX_RSA_PSS";
+        case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
+            return "PKIX_ECDSA_P256_SHA_256";
+        case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
+            return "PKIX_ECDSA_P256_HMAC_SHA_256";
+        case PublicKeyDetails.PKIX_ED25519:
+            return "PKIX_ED25519";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+    }
+}
+exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
+var SubjectAlternativeNameType;
+(function (SubjectAlternativeNameType) {
+    SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+    SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
+    SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
+    /**
+     * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
+     * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
+     * for more details.
+     */
+    SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
+})(SubjectAlternativeNameType = exports.SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = {}));
+function subjectAlternativeNameTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
+            return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
+        case 1:
+        case "EMAIL":
+            return SubjectAlternativeNameType.EMAIL;
+        case 2:
+        case "URI":
+            return SubjectAlternativeNameType.URI;
+        case 3:
+        case "OTHER_NAME":
+            return SubjectAlternativeNameType.OTHER_NAME;
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+    }
+}
+exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
+function subjectAlternativeNameTypeToJSON(object) {
+    switch (object) {
+        case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
+            return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+        case SubjectAlternativeNameType.EMAIL:
+            return "EMAIL";
+        case SubjectAlternativeNameType.URI:
+            return "URI";
+        case SubjectAlternativeNameType.OTHER_NAME:
+            return "OTHER_NAME";
+        default:
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+    }
+}
+exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
+function createBaseHashOutput() {
+    return { algorithm: 0, digest: Buffer.alloc(0) };
+}
+exports.HashOutput = {
+    fromJSON(object) {
+        return {
+            algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
+            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.algorithm !== undefined && (obj.algorithm = hashAlgorithmToJSON(message.algorithm));
+        message.digest !== undefined &&
+            (obj.digest = base64FromBytes(message.digest !== undefined ? message.digest : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseMessageSignature() {
+    return { messageDigest: undefined, signature: Buffer.alloc(0) };
+}
+exports.MessageSignature = {
+    fromJSON(object) {
+        return {
+            messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
+            signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.messageDigest !== undefined &&
+            (obj.messageDigest = message.messageDigest ? exports.HashOutput.toJSON(message.messageDigest) : undefined);
+        message.signature !== undefined &&
+            (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseLogId() {
+    return { keyId: Buffer.alloc(0) };
+}
+exports.LogId = {
+    fromJSON(object) {
+        return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.keyId !== undefined &&
+            (obj.keyId = base64FromBytes(message.keyId !== undefined ? message.keyId : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseRFC3161SignedTimestamp() {
+    return { signedTimestamp: Buffer.alloc(0) };
+}
+exports.RFC3161SignedTimestamp = {
+    fromJSON(object) {
+        return {
+            signedTimestamp: isSet(object.signedTimestamp)
+                ? Buffer.from(bytesFromBase64(object.signedTimestamp))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.signedTimestamp !== undefined &&
+            (obj.signedTimestamp = base64FromBytes(message.signedTimestamp !== undefined ? message.signedTimestamp : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBasePublicKey() {
+    return { rawBytes: undefined, keyDetails: 0, validFor: undefined };
+}
+exports.PublicKey = {
+    fromJSON(object) {
+        return {
+            rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
+            keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
+            validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.rawBytes !== undefined &&
+            (obj.rawBytes = message.rawBytes !== undefined ? base64FromBytes(message.rawBytes) : undefined);
+        message.keyDetails !== undefined && (obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails));
+        message.validFor !== undefined &&
+            (obj.validFor = message.validFor ? exports.TimeRange.toJSON(message.validFor) : undefined);
+        return obj;
+    },
+};
+function createBasePublicKeyIdentifier() {
+    return { hint: "" };
+}
+exports.PublicKeyIdentifier = {
+    fromJSON(object) {
+        return { hint: isSet(object.hint) ? String(object.hint) : "" };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.hint !== undefined && (obj.hint = message.hint);
+        return obj;
+    },
+};
+function createBaseObjectIdentifier() {
+    return { id: [] };
+}
+exports.ObjectIdentifier = {
+    fromJSON(object) {
+        return { id: Array.isArray(object?.id) ? object.id.map((e) => Number(e)) : [] };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.id) {
+            obj.id = message.id.map((e) => Math.round(e));
+        }
+        else {
+            obj.id = [];
+        }
+        return obj;
+    },
+};
+function createBaseObjectIdentifierValuePair() {
+    return { oid: undefined, value: Buffer.alloc(0) };
+}
+exports.ObjectIdentifierValuePair = {
+    fromJSON(object) {
+        return {
+            oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
+            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.oid !== undefined && (obj.oid = message.oid ? exports.ObjectIdentifier.toJSON(message.oid) : undefined);
+        message.value !== undefined &&
+            (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseDistinguishedName() {
+    return { organization: "", commonName: "" };
+}
+exports.DistinguishedName = {
+    fromJSON(object) {
+        return {
+            organization: isSet(object.organization) ? String(object.organization) : "",
+            commonName: isSet(object.commonName) ? String(object.commonName) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.organization !== undefined && (obj.organization = message.organization);
+        message.commonName !== undefined && (obj.commonName = message.commonName);
+        return obj;
+    },
+};
+function createBaseX509Certificate() {
+    return { rawBytes: Buffer.alloc(0) };
+}
+exports.X509Certificate = {
+    fromJSON(object) {
+        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.rawBytes !== undefined &&
+            (obj.rawBytes = base64FromBytes(message.rawBytes !== undefined ? message.rawBytes : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseSubjectAlternativeName() {
+    return { type: 0, identity: undefined };
+}
+exports.SubjectAlternativeName = {
+    fromJSON(object) {
+        return {
+            type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
+            identity: isSet(object.regexp)
+                ? { $case: "regexp", regexp: String(object.regexp) }
+                : isSet(object.value)
+                    ? { $case: "value", value: String(object.value) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.type !== undefined && (obj.type = subjectAlternativeNameTypeToJSON(message.type));
+        message.identity?.$case === "regexp" && (obj.regexp = message.identity?.regexp);
+        message.identity?.$case === "value" && (obj.value = message.identity?.value);
+        return obj;
+    },
+};
+function createBaseX509CertificateChain() {
+    return { certificates: [] };
+}
+exports.X509CertificateChain = {
+    fromJSON(object) {
+        return {
+            certificates: Array.isArray(object?.certificates)
+                ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.certificates) {
+            obj.certificates = message.certificates.map((e) => e ? exports.X509Certificate.toJSON(e) : undefined);
+        }
+        else {
+            obj.certificates = [];
+        }
+        return obj;
+    },
+};
+function createBaseTimeRange() {
+    return { start: undefined, end: undefined };
+}
+exports.TimeRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
+            end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.start !== undefined && (obj.start = message.start.toISOString());
+        message.end !== undefined && (obj.end = message.end.toISOString());
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function fromTimestamp(t) {
+    let millis = Number(t.seconds) * 1000;
+    millis += t.nanos / 1000000;
+    return new Date(millis);
+}
+function fromJsonTimestamp(o) {
+    if (o instanceof Date) {
+        return o;
+    }
+    else if (typeof o === "string") {
+        return new Date(o);
+    }
+    else {
+        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
new file mode 100644
index 0000000000000..398193b2075a7
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
@@ -0,0 +1,167 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+function createBaseKindVersion() {
+    return { kind: "", version: "" };
+}
+exports.KindVersion = {
+    fromJSON(object) {
+        return {
+            kind: isSet(object.kind) ? String(object.kind) : "",
+            version: isSet(object.version) ? String(object.version) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.kind !== undefined && (obj.kind = message.kind);
+        message.version !== undefined && (obj.version = message.version);
+        return obj;
+    },
+};
+function createBaseCheckpoint() {
+    return { envelope: "" };
+}
+exports.Checkpoint = {
+    fromJSON(object) {
+        return { envelope: isSet(object.envelope) ? String(object.envelope) : "" };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.envelope !== undefined && (obj.envelope = message.envelope);
+        return obj;
+    },
+};
+function createBaseInclusionProof() {
+    return { logIndex: "0", rootHash: Buffer.alloc(0), treeSize: "0", hashes: [], checkpoint: undefined };
+}
+exports.InclusionProof = {
+    fromJSON(object) {
+        return {
+            logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
+            rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
+            treeSize: isSet(object.treeSize) ? String(object.treeSize) : "0",
+            hashes: Array.isArray(object?.hashes) ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e))) : [],
+            checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.logIndex !== undefined && (obj.logIndex = message.logIndex);
+        message.rootHash !== undefined &&
+            (obj.rootHash = base64FromBytes(message.rootHash !== undefined ? message.rootHash : Buffer.alloc(0)));
+        message.treeSize !== undefined && (obj.treeSize = message.treeSize);
+        if (message.hashes) {
+            obj.hashes = message.hashes.map((e) => base64FromBytes(e !== undefined ? e : Buffer.alloc(0)));
+        }
+        else {
+            obj.hashes = [];
+        }
+        message.checkpoint !== undefined &&
+            (obj.checkpoint = message.checkpoint ? exports.Checkpoint.toJSON(message.checkpoint) : undefined);
+        return obj;
+    },
+};
+function createBaseInclusionPromise() {
+    return { signedEntryTimestamp: Buffer.alloc(0) };
+}
+exports.InclusionPromise = {
+    fromJSON(object) {
+        return {
+            signedEntryTimestamp: isSet(object.signedEntryTimestamp)
+                ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.signedEntryTimestamp !== undefined &&
+            (obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp !== undefined ? message.signedEntryTimestamp : Buffer.alloc(0)));
+        return obj;
+    },
+};
+function createBaseTransparencyLogEntry() {
+    return {
+        logIndex: "0",
+        logId: undefined,
+        kindVersion: undefined,
+        integratedTime: "0",
+        inclusionPromise: undefined,
+        inclusionProof: undefined,
+        canonicalizedBody: Buffer.alloc(0),
+    };
+}
+exports.TransparencyLogEntry = {
+    fromJSON(object) {
+        return {
+            logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
+            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+            kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
+            integratedTime: isSet(object.integratedTime) ? String(object.integratedTime) : "0",
+            inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
+            inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
+            canonicalizedBody: isSet(object.canonicalizedBody)
+                ? Buffer.from(bytesFromBase64(object.canonicalizedBody))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.logIndex !== undefined && (obj.logIndex = message.logIndex);
+        message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
+        message.kindVersion !== undefined &&
+            (obj.kindVersion = message.kindVersion ? exports.KindVersion.toJSON(message.kindVersion) : undefined);
+        message.integratedTime !== undefined && (obj.integratedTime = message.integratedTime);
+        message.inclusionPromise !== undefined &&
+            (obj.inclusionPromise = message.inclusionPromise ? exports.InclusionPromise.toJSON(message.inclusionPromise) : undefined);
+        message.inclusionProof !== undefined &&
+            (obj.inclusionProof = message.inclusionProof ? exports.InclusionProof.toJSON(message.inclusionProof) : undefined);
+        message.canonicalizedBody !== undefined &&
+            (obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody !== undefined ? message.canonicalizedBody : Buffer.alloc(0)));
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
new file mode 100644
index 0000000000000..05e566767cdb2
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
@@ -0,0 +1,103 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+function createBaseTransparencyLogInstance() {
+    return { baseUrl: "", hashAlgorithm: 0, publicKey: undefined, logId: undefined };
+}
+exports.TransparencyLogInstance = {
+    fromJSON(object) {
+        return {
+            baseUrl: isSet(object.baseUrl) ? String(object.baseUrl) : "",
+            hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
+            publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
+            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.baseUrl !== undefined && (obj.baseUrl = message.baseUrl);
+        message.hashAlgorithm !== undefined && (obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm));
+        message.publicKey !== undefined &&
+            (obj.publicKey = message.publicKey ? sigstore_common_1.PublicKey.toJSON(message.publicKey) : undefined);
+        message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
+        return obj;
+    },
+};
+function createBaseCertificateAuthority() {
+    return { subject: undefined, uri: "", certChain: undefined, validFor: undefined };
+}
+exports.CertificateAuthority = {
+    fromJSON(object) {
+        return {
+            subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
+            uri: isSet(object.uri) ? String(object.uri) : "",
+            certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
+            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.subject !== undefined &&
+            (obj.subject = message.subject ? sigstore_common_1.DistinguishedName.toJSON(message.subject) : undefined);
+        message.uri !== undefined && (obj.uri = message.uri);
+        message.certChain !== undefined &&
+            (obj.certChain = message.certChain ? sigstore_common_1.X509CertificateChain.toJSON(message.certChain) : undefined);
+        message.validFor !== undefined &&
+            (obj.validFor = message.validFor ? sigstore_common_1.TimeRange.toJSON(message.validFor) : undefined);
+        return obj;
+    },
+};
+function createBaseTrustedRoot() {
+    return { mediaType: "", tlogs: [], certificateAuthorities: [], ctlogs: [], timestampAuthorities: [] };
+}
+exports.TrustedRoot = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
+            tlogs: Array.isArray(object?.tlogs) ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) : [],
+            certificateAuthorities: Array.isArray(object?.certificateAuthorities)
+                ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+                : [],
+            ctlogs: Array.isArray(object?.ctlogs)
+                ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
+                : [],
+            timestampAuthorities: Array.isArray(object?.timestampAuthorities)
+                ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.mediaType !== undefined && (obj.mediaType = message.mediaType);
+        if (message.tlogs) {
+            obj.tlogs = message.tlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
+        }
+        else {
+            obj.tlogs = [];
+        }
+        if (message.certificateAuthorities) {
+            obj.certificateAuthorities = message.certificateAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
+        }
+        else {
+            obj.certificateAuthorities = [];
+        }
+        if (message.ctlogs) {
+            obj.ctlogs = message.ctlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
+        }
+        else {
+            obj.ctlogs = [];
+        }
+        if (message.timestampAuthorities) {
+            obj.timestampAuthorities = message.timestampAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
+        }
+        else {
+            obj.timestampAuthorities = [];
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
new file mode 100644
index 0000000000000..8a72b89761869
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
@@ -0,0 +1,273 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
+/* eslint-disable */
+const sigstore_bundle_1 = require("./sigstore_bundle");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_trustroot_1 = require("./sigstore_trustroot");
+function createBaseCertificateIdentity() {
+    return { issuer: "", san: undefined, oids: [] };
+}
+exports.CertificateIdentity = {
+    fromJSON(object) {
+        return {
+            issuer: isSet(object.issuer) ? String(object.issuer) : "",
+            san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
+            oids: Array.isArray(object?.oids) ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.issuer !== undefined && (obj.issuer = message.issuer);
+        message.san !== undefined && (obj.san = message.san ? sigstore_common_1.SubjectAlternativeName.toJSON(message.san) : undefined);
+        if (message.oids) {
+            obj.oids = message.oids.map((e) => e ? sigstore_common_1.ObjectIdentifierValuePair.toJSON(e) : undefined);
+        }
+        else {
+            obj.oids = [];
+        }
+        return obj;
+    },
+};
+function createBaseCertificateIdentities() {
+    return { identities: [] };
+}
+exports.CertificateIdentities = {
+    fromJSON(object) {
+        return {
+            identities: Array.isArray(object?.identities)
+                ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.identities) {
+            obj.identities = message.identities.map((e) => e ? exports.CertificateIdentity.toJSON(e) : undefined);
+        }
+        else {
+            obj.identities = [];
+        }
+        return obj;
+    },
+};
+function createBasePublicKeyIdentities() {
+    return { publicKeys: [] };
+}
+exports.PublicKeyIdentities = {
+    fromJSON(object) {
+        return {
+            publicKeys: Array.isArray(object?.publicKeys) ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.publicKeys) {
+            obj.publicKeys = message.publicKeys.map((e) => e ? sigstore_common_1.PublicKey.toJSON(e) : undefined);
+        }
+        else {
+            obj.publicKeys = [];
+        }
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions() {
+    return { signers: undefined, tlogOptions: undefined, ctlogOptions: undefined, tsaOptions: undefined };
+}
+exports.ArtifactVerificationOptions = {
+    fromJSON(object) {
+        return {
+            signers: isSet(object.certificateIdentities)
+                ? {
+                    $case: "certificateIdentities",
+                    certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
+                }
+                : isSet(object.publicKeys)
+                    ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
+                    : undefined,
+            tlogOptions: isSet(object.tlogOptions)
+                ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
+                : undefined,
+            ctlogOptions: isSet(object.ctlogOptions)
+                ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
+                : undefined,
+            tsaOptions: isSet(object.tsaOptions)
+                ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
+                : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.signers?.$case === "certificateIdentities" &&
+            (obj.certificateIdentities = message.signers?.certificateIdentities
+                ? exports.CertificateIdentities.toJSON(message.signers?.certificateIdentities)
+                : undefined);
+        message.signers?.$case === "publicKeys" && (obj.publicKeys = message.signers?.publicKeys
+            ? exports.PublicKeyIdentities.toJSON(message.signers?.publicKeys)
+            : undefined);
+        message.tlogOptions !== undefined && (obj.tlogOptions = message.tlogOptions
+            ? exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions)
+            : undefined);
+        message.ctlogOptions !== undefined && (obj.ctlogOptions = message.ctlogOptions
+            ? exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions)
+            : undefined);
+        message.tsaOptions !== undefined && (obj.tsaOptions = message.tsaOptions
+            ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions)
+            : undefined);
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions_TlogOptions() {
+    return { threshold: 0, performOnlineVerification: false, disable: false };
+}
+exports.ArtifactVerificationOptions_TlogOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+            performOnlineVerification: isSet(object.performOnlineVerification)
+                ? Boolean(object.performOnlineVerification)
+                : false,
+            disable: isSet(object.disable) ? Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+        message.performOnlineVerification !== undefined &&
+            (obj.performOnlineVerification = message.performOnlineVerification);
+        message.disable !== undefined && (obj.disable = message.disable);
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions_CtlogOptions() {
+    return { threshold: 0, detachedSct: false, disable: false };
+}
+exports.ArtifactVerificationOptions_CtlogOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+            detachedSct: isSet(object.detachedSct) ? Boolean(object.detachedSct) : false,
+            disable: isSet(object.disable) ? Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+        message.detachedSct !== undefined && (obj.detachedSct = message.detachedSct);
+        message.disable !== undefined && (obj.disable = message.disable);
+        return obj;
+    },
+};
+function createBaseArtifactVerificationOptions_TimestampAuthorityOptions() {
+    return { threshold: 0, disable: false };
+}
+exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+        message.disable !== undefined && (obj.disable = message.disable);
+        return obj;
+    },
+};
+function createBaseArtifact() {
+    return { data: undefined };
+}
+exports.Artifact = {
+    fromJSON(object) {
+        return {
+            data: isSet(object.artifactUri)
+                ? { $case: "artifactUri", artifactUri: String(object.artifactUri) }
+                : isSet(object.artifact)
+                    ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.data?.$case === "artifactUri" && (obj.artifactUri = message.data?.artifactUri);
+        message.data?.$case === "artifact" &&
+            (obj.artifact = message.data?.artifact !== undefined ? base64FromBytes(message.data?.artifact) : undefined);
+        return obj;
+    },
+};
+function createBaseInput() {
+    return {
+        artifactTrustRoot: undefined,
+        artifactVerificationOptions: undefined,
+        bundle: undefined,
+        artifact: undefined,
+    };
+}
+exports.Input = {
+    fromJSON(object) {
+        return {
+            artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
+            artifactVerificationOptions: isSet(object.artifactVerificationOptions)
+                ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
+                : undefined,
+            bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
+            artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        message.artifactTrustRoot !== undefined &&
+            (obj.artifactTrustRoot = message.artifactTrustRoot ? sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot) : undefined);
+        message.artifactVerificationOptions !== undefined &&
+            (obj.artifactVerificationOptions = message.artifactVerificationOptions
+                ? exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions)
+                : undefined);
+        message.bundle !== undefined && (obj.bundle = message.bundle ? sigstore_bundle_1.Bundle.toJSON(message.bundle) : undefined);
+        message.artifact !== undefined && (obj.artifact = message.artifact ? exports.Artifact.toJSON(message.artifact) : undefined);
+        return obj;
+    },
+};
+var tsProtoGlobalThis = (() => {
+    if (typeof globalThis !== "undefined") {
+        return globalThis;
+    }
+    if (typeof self !== "undefined") {
+        return self;
+    }
+    if (typeof window !== "undefined") {
+        return window;
+    }
+    if (typeof global !== "undefined") {
+        return global;
+    }
+    throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+    }
+    else {
+        const bin = tsProtoGlobalThis.atob(b64);
+        const arr = new Uint8Array(bin.length);
+        for (let i = 0; i < bin.length; ++i) {
+            arr[i] = bin.charCodeAt(i);
+        }
+        return arr;
+    }
+}
+function base64FromBytes(arr) {
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+    }
+    else {
+        const bin = [];
+        arr.forEach((byte) => {
+            bin.push(String.fromCharCode(byte));
+        });
+        return tsProtoGlobalThis.btoa(bin.join(""));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js
new file mode 100644
index 0000000000000..eafb768c48fca
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js
@@ -0,0 +1,37 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+__exportStar(require("./__generated__/envelope"), exports);
+__exportStar(require("./__generated__/sigstore_bundle"), exports);
+__exportStar(require("./__generated__/sigstore_common"), exports);
+__exportStar(require("./__generated__/sigstore_rekor"), exports);
+__exportStar(require("./__generated__/sigstore_trustroot"), exports);
+__exportStar(require("./__generated__/sigstore_verification"), exports);
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json
new file mode 100644
index 0000000000000..450abb157f31a
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json
@@ -0,0 +1,31 @@
+{
+  "name": "@sigstore/protobuf-specs",
+  "version": "0.2.1",
+  "description": "code-signing for npm packages",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "scripts": {
+    "build": "tsc"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sigstore/protobuf-specs.git"
+  },
+  "files": [
+    "dist"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "Apache-2.0",
+  "bugs": {
+    "url": "https://github.com/sigstore/protobuf-specs/issues"
+  },
+  "homepage": "https://github.com/sigstore/protobuf-specs#readme",
+  "devDependencies": {
+    "@tsconfig/node14": "^1.0.3",
+    "@types/node": "^18.14.0",
+    "typescript": "^4.9.5"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  }
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/package.json b/node_modules/sigstore/node_modules/@sigstore/tuf/package.json
new file mode 100644
index 0000000000000..3473dfef2cde9
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/package.json
@@ -0,0 +1,41 @@
+{
+  "name": "@sigstore/tuf",
+  "version": "1.0.3",
+  "description": "Client for the Sigstore TUF repository",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "scripts": {
+    "clean": "shx rm -rf dist *.tsbuildinfo",
+    "build": "tsc --build",
+    "test": "jest"
+  },
+  "files": [
+    "dist",
+    "store"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "Apache-2.0",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sigstore/sigstore-js.git"
+  },
+  "bugs": {
+    "url": "https://github.com/sigstore/sigstore-js/issues"
+  },
+  "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/tuf#readme",
+  "publishConfig": {
+    "provenance": true
+  },
+  "devDependencies": {
+    "@sigstore/jest": "^0.0.0",
+    "@tufjs/repo-mock": "^1.1.0",
+    "@types/make-fetch-happen": "^10.0.0"
+  },
+  "dependencies": {
+    "@sigstore/protobuf-specs": "^0.2.0",
+    "tuf-js": "^1.1.7"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  }
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/store/public-good-instance-root.json b/node_modules/sigstore/node_modules/@sigstore/tuf/store/public-good-instance-root.json
new file mode 100644
index 0000000000000..e95c7e88cdf09
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/tuf/store/public-good-instance-root.json
@@ -0,0 +1 @@
+{"signed":{"_type":"root","spec_version":"1.0","version":7,"expires":"2023-10-04T13:08:11Z","keys":{"25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEEXsz3SZXFb8jMV42j6pJlyjbjR8K\nN3Bwocexq6LMIb5qsWKOQvLN16NUefLc4HswOoumRsVVaajSpQS6fobkRw==\n-----END PUBLIC KEY-----\n"}},"2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE0ghrh92Lw1Yr3idGV5WqCtMDB8Cx\n+D8hdC4w2ZLNIplVRoVGLskYa3gheMyOjiJ8kPi15aQ2//7P+oj7UvJPGw==\n-----END PUBLIC KEY-----\n"}},"45b283825eb184cabd582eb17b74fc8ed404f68cf452acabdad2ed6f90ce216b":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAELrWvNt94v4R085ELeeCMxHp7PldF\n0/T1GxukUh2ODuggLGJE0pc1e8CSBf6CS91Fwo9FUOuRsjBUld+VqSyCdQ==\n-----END PUBLIC KEY-----\n"}},"7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEinikSsAQmYkNeH5eYq/CnIzLaacO\nxlSaawQDOwqKy/tCqxq5xxPSJc21K4WIhs9GyOkKfzueY3GILzcMJZ4cWw==\n-----END PUBLIC KEY-----\n"}},"e1863ba02070322ebc626dcecf9d881a3a38c35c3b41a83765b6ad6c37eaec2a":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEWRiGr5+j+3J5SsH+Ztr5nE2H2wO7\nBV+nO3s93gLca18qTOzHY1oWyAGDykMSsGTUBSt9D+An0KfKsD2mfSM42Q==\n-----END PUBLIC KEY-----\n"}},"f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEzBzVOmHCPojMVLSI364WiiV8NPrD\n6IgRxVliskz/v+y3JER5mcVGcONliDcWMC5J2lfHmjPNPhb4H7xm8LzfSA==\n-----END PUBLIC KEY-----\n"}},"ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEy8XKsmhBYDI8Jc0GwzBxeKax0cm5\nSTKEU65HPFunUn41sT8pi0FjM4IkHz/YUmwmLUO0Wt7lxhj6BkLIK4qYAw==\n-----END PUBLIC KEY-----\n"}}},"roles":{"root":{"keyids":["ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c","25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99","f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f","7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b","2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de"],"threshold":3},"snapshot":{"keyids":["45b283825eb184cabd582eb17b74fc8ed404f68cf452acabdad2ed6f90ce216b"],"threshold":1},"targets":{"keyids":["ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c","25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99","f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f","7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b","2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de"],"threshold":3},"timestamp":{"keyids":["e1863ba02070322ebc626dcecf9d881a3a38c35c3b41a83765b6ad6c37eaec2a"],"threshold":1}},"consistent_snapshot":true},"signatures":[{"keyid":"25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99","sig":"3046022100c0610c0055ce5c4a52d054d7322e7b514d55baf44423d63aa4daa077cc60fd1f022100a097f2803f090fb66c42ead915a2c46ebe7db53a32bf18f2188275cc936f8bdd"},{"keyid":"f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f","sig":"304502203134f0468810299d5493a867c40630b341296b92e59c29821311d353343bb3a4022100e667ae3d304e7e3da0894c7425f6b9ecd917106841280e5cf6f3496ad5f8f68e"},{"keyid":"7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b","sig":"3045022037fe5f45426f21eaaf4730d2136f2b1611d6379688f79b9d1e3f61719997135c022100b63b022d7b79d4694b96f416d88aa4d7b1a3bff8a01f4fb51e0f42137c7d2d06"},{"keyid":"2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de","sig":"3044022007cc8fcc4940809f2751ad5b535f4c5f53f5b4952f5b5696b09668e743306ac1022006dfcdf94e94c92163eeb1b47796db62cedaa730aa13aa61b573fe23714730f2"}]}
diff --git a/node_modules/sigstore/node_modules/@tufjs/canonical-json/LICENSE b/node_modules/sigstore/node_modules/@tufjs/canonical-json/LICENSE
new file mode 100644
index 0000000000000..420700f5d3765
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/canonical-json/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2022 GitHub and the TUF Contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/@tufjs/canonical-json/lib/index.js b/node_modules/sigstore/node_modules/@tufjs/canonical-json/lib/index.js
new file mode 100644
index 0000000000000..d480696de1f6c
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/canonical-json/lib/index.js
@@ -0,0 +1,64 @@
+const COMMA = ',';
+const COLON = ':';
+const LEFT_SQUARE_BRACKET = '[';
+const RIGHT_SQUARE_BRACKET = ']';
+const LEFT_CURLY_BRACKET = '{';
+const RIGHT_CURLY_BRACKET = '}';
+
+// Recursively encodes the supplied object according to the canonical JSON form
+// as specified at http://wiki.laptop.org/go/Canonical_JSON. It's a restricted
+// dialect of JSON in which keys are lexically sorted, floats are not allowed,
+// and only double quotes and backslashes are escaped.
+function canonicalize(object) {
+  const buffer = [];
+  if (typeof object === 'string') {
+    buffer.push(canonicalizeString(object));
+  } else if (typeof object === 'boolean') {
+    buffer.push(JSON.stringify(object));
+  } else if (Number.isInteger(object)) {
+    buffer.push(JSON.stringify(object));
+  } else if (object === null) {
+    buffer.push(JSON.stringify(object));
+  } else if (Array.isArray(object)) {
+    buffer.push(LEFT_SQUARE_BRACKET);
+    let first = true;
+    object.forEach((element) => {
+      if (!first) {
+        buffer.push(COMMA);
+      }
+      first = false;
+      buffer.push(canonicalize(element));
+    });
+    buffer.push(RIGHT_SQUARE_BRACKET);
+  } else if (typeof object === 'object') {
+    buffer.push(LEFT_CURLY_BRACKET);
+    let first = true;
+    Object.keys(object)
+      .sort()
+      .forEach((property) => {
+        if (!first) {
+          buffer.push(COMMA);
+        }
+        first = false;
+        buffer.push(canonicalizeString(property));
+        buffer.push(COLON);
+        buffer.push(canonicalize(object[property]));
+      });
+    buffer.push(RIGHT_CURLY_BRACKET);
+  } else {
+    throw new TypeError('cannot encode ' + object.toString());
+  }
+
+  return buffer.join('');
+}
+
+// String canonicalization consists of escaping backslash (\) and double
+// quote (") characters and wrapping the resulting string in double quotes.
+function canonicalizeString(string) {
+  const escapedString = string.replace(/\\/g, '\\\\').replace(/"/g, '\\"');
+  return '"' + escapedString + '"';
+}
+
+module.exports = {
+  canonicalize,
+};
diff --git a/node_modules/sigstore/node_modules/@tufjs/canonical-json/package.json b/node_modules/sigstore/node_modules/@tufjs/canonical-json/package.json
new file mode 100644
index 0000000000000..688c9b93c3a4e
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/canonical-json/package.json
@@ -0,0 +1,39 @@
+{
+  "name": "@tufjs/canonical-json",
+  "version": "1.0.0",
+  "description": "OLPC JSON canonicalization",
+  "main": "lib/index.js",
+  "typings": "lib/index.d.ts",
+  "license": "MIT",
+  "keywords": [
+    "json",
+    "canonical",
+    "canonicalize",
+    "canonicalization",
+    "crypto",
+    "signature",
+    "olpc"
+  ],
+  "author": "bdehamer@github.com",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/theupdateframework/tuf-js.git"
+  },
+  "homepage": "https://github.com/theupdateframework/tuf-js/packages/canonical-json#readme",
+  "bugs": {
+    "url": "https://github.com/theupdateframework/tuf-js/issues"
+  },
+  "files": [
+    "lib/"
+  ],
+  "scripts": {
+    "test": "jest"
+  },
+  "devDependencies": {
+    "@types/node": "^18.14.1",
+    "typescript": "^4.9.5"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  }
+}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/LICENSE b/node_modules/sigstore/node_modules/@tufjs/models/LICENSE
new file mode 100644
index 0000000000000..420700f5d3765
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2022 GitHub and the TUF Contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/base.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/base.js
new file mode 100644
index 0000000000000..d89a089c33092
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/base.js
@@ -0,0 +1,83 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signed = exports.isMetadataKind = exports.MetadataKind = void 0;
+const util_1 = __importDefault(require("util"));
+const error_1 = require("./error");
+const utils_1 = require("./utils");
+const SPECIFICATION_VERSION = ['1', '0', '31'];
+var MetadataKind;
+(function (MetadataKind) {
+    MetadataKind["Root"] = "root";
+    MetadataKind["Timestamp"] = "timestamp";
+    MetadataKind["Snapshot"] = "snapshot";
+    MetadataKind["Targets"] = "targets";
+})(MetadataKind = exports.MetadataKind || (exports.MetadataKind = {}));
+function isMetadataKind(value) {
+    return (typeof value === 'string' &&
+        Object.values(MetadataKind).includes(value));
+}
+exports.isMetadataKind = isMetadataKind;
+/***
+ * A base class for the signed part of TUF metadata.
+ *
+ * Objects with base class Signed are usually included in a ``Metadata`` object
+ * on the signed attribute. This class provides attributes and methods that
+ * are common for all TUF metadata types (roles).
+ */
+class Signed {
+    constructor(options) {
+        this.specVersion = options.specVersion || SPECIFICATION_VERSION.join('.');
+        const specList = this.specVersion.split('.');
+        if (!(specList.length === 2 || specList.length === 3) ||
+            !specList.every((item) => isNumeric(item))) {
+            throw new error_1.ValueError('Failed to parse specVersion');
+        }
+        // major version must match
+        if (specList[0] != SPECIFICATION_VERSION[0]) {
+            throw new error_1.ValueError('Unsupported specVersion');
+        }
+        this.expires = options.expires || new Date().toISOString();
+        this.version = options.version || 1;
+        this.unrecognizedFields = options.unrecognizedFields || {};
+    }
+    equals(other) {
+        if (!(other instanceof Signed)) {
+            return false;
+        }
+        return (this.specVersion === other.specVersion &&
+            this.expires === other.expires &&
+            this.version === other.version &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+    }
+    isExpired(referenceTime) {
+        if (!referenceTime) {
+            referenceTime = new Date();
+        }
+        return referenceTime >= new Date(this.expires);
+    }
+    static commonFieldsFromJSON(data) {
+        const { spec_version, expires, version, ...rest } = data;
+        if (utils_1.guard.isDefined(spec_version) && !(typeof spec_version === 'string')) {
+            throw new TypeError('spec_version must be a string');
+        }
+        if (utils_1.guard.isDefined(expires) && !(typeof expires === 'string')) {
+            throw new TypeError('expires must be a string');
+        }
+        if (utils_1.guard.isDefined(version) && !(typeof version === 'number')) {
+            throw new TypeError('version must be a number');
+        }
+        return {
+            specVersion: spec_version,
+            expires,
+            version,
+            unrecognizedFields: rest,
+        };
+    }
+}
+exports.Signed = Signed;
+function isNumeric(str) {
+    return !isNaN(Number(str));
+}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/delegations.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/delegations.js
new file mode 100644
index 0000000000000..7165f1e244393
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/delegations.js
@@ -0,0 +1,115 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Delegations = void 0;
+const util_1 = __importDefault(require("util"));
+const error_1 = require("./error");
+const key_1 = require("./key");
+const role_1 = require("./role");
+const utils_1 = require("./utils");
+/**
+ * A container object storing information about all delegations.
+ *
+ * Targets roles that are trusted to provide signed metadata files
+ * describing targets with designated pathnames and/or further delegations.
+ */
+class Delegations {
+    constructor(options) {
+        this.keys = options.keys;
+        this.unrecognizedFields = options.unrecognizedFields || {};
+        if (options.roles) {
+            if (Object.keys(options.roles).some((roleName) => role_1.TOP_LEVEL_ROLE_NAMES.includes(roleName))) {
+                throw new error_1.ValueError('Delegated role name conflicts with top-level role name');
+            }
+        }
+        this.succinctRoles = options.succinctRoles;
+        this.roles = options.roles;
+    }
+    equals(other) {
+        if (!(other instanceof Delegations)) {
+            return false;
+        }
+        return (util_1.default.isDeepStrictEqual(this.keys, other.keys) &&
+            util_1.default.isDeepStrictEqual(this.roles, other.roles) &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields) &&
+            util_1.default.isDeepStrictEqual(this.succinctRoles, other.succinctRoles));
+    }
+    *rolesForTarget(targetPath) {
+        if (this.roles) {
+            for (const role of Object.values(this.roles)) {
+                if (role.isDelegatedPath(targetPath)) {
+                    yield { role: role.name, terminating: role.terminating };
+                }
+            }
+        }
+        else if (this.succinctRoles) {
+            yield {
+                role: this.succinctRoles.getRoleForTarget(targetPath),
+                terminating: true,
+            };
+        }
+    }
+    toJSON() {
+        const json = {
+            keys: keysToJSON(this.keys),
+            ...this.unrecognizedFields,
+        };
+        if (this.roles) {
+            json.roles = rolesToJSON(this.roles);
+        }
+        else if (this.succinctRoles) {
+            json.succinct_roles = this.succinctRoles.toJSON();
+        }
+        return json;
+    }
+    static fromJSON(data) {
+        const { keys, roles, succinct_roles, ...unrecognizedFields } = data;
+        let succinctRoles;
+        if (utils_1.guard.isObject(succinct_roles)) {
+            succinctRoles = role_1.SuccinctRoles.fromJSON(succinct_roles);
+        }
+        return new Delegations({
+            keys: keysFromJSON(keys),
+            roles: rolesFromJSON(roles),
+            unrecognizedFields,
+            succinctRoles,
+        });
+    }
+}
+exports.Delegations = Delegations;
+function keysToJSON(keys) {
+    return Object.entries(keys).reduce((acc, [keyId, key]) => ({
+        ...acc,
+        [keyId]: key.toJSON(),
+    }), {});
+}
+function rolesToJSON(roles) {
+    return Object.values(roles).map((role) => role.toJSON());
+}
+function keysFromJSON(data) {
+    if (!utils_1.guard.isObjectRecord(data)) {
+        throw new TypeError('keys is malformed');
+    }
+    return Object.entries(data).reduce((acc, [keyID, keyData]) => ({
+        ...acc,
+        [keyID]: key_1.Key.fromJSON(keyID, keyData),
+    }), {});
+}
+function rolesFromJSON(data) {
+    let roleMap;
+    if (utils_1.guard.isDefined(data)) {
+        if (!utils_1.guard.isObjectArray(data)) {
+            throw new TypeError('roles is malformed');
+        }
+        roleMap = data.reduce((acc, role) => {
+            const delegatedRole = role_1.DelegatedRole.fromJSON(role);
+            return {
+                ...acc,
+                [delegatedRole.name]: delegatedRole,
+            };
+        }, {});
+    }
+    return roleMap;
+}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/error.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/error.js
new file mode 100644
index 0000000000000..ba80698747ba0
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/error.js
@@ -0,0 +1,27 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.UnsupportedAlgorithmError = exports.CryptoError = exports.LengthOrHashMismatchError = exports.UnsignedMetadataError = exports.RepositoryError = exports.ValueError = void 0;
+// An error about insufficient values
+class ValueError extends Error {
+}
+exports.ValueError = ValueError;
+// An error with a repository's state, such as a missing file.
+// It covers all exceptions that come from the repository side when
+// looking from the perspective of users of metadata API or ngclient.
+class RepositoryError extends Error {
+}
+exports.RepositoryError = RepositoryError;
+// An error about metadata object with insufficient threshold of signatures.
+class UnsignedMetadataError extends RepositoryError {
+}
+exports.UnsignedMetadataError = UnsignedMetadataError;
+// An error while checking the length and hash values of an object.
+class LengthOrHashMismatchError extends RepositoryError {
+}
+exports.LengthOrHashMismatchError = LengthOrHashMismatchError;
+class CryptoError extends Error {
+}
+exports.CryptoError = CryptoError;
+class UnsupportedAlgorithmError extends CryptoError {
+}
+exports.UnsupportedAlgorithmError = UnsupportedAlgorithmError;
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/file.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/file.js
new file mode 100644
index 0000000000000..b35fe5950bbb7
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/file.js
@@ -0,0 +1,183 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TargetFile = exports.MetaFile = void 0;
+const crypto_1 = __importDefault(require("crypto"));
+const util_1 = __importDefault(require("util"));
+const error_1 = require("./error");
+const utils_1 = require("./utils");
+// A container with information about a particular metadata file.
+//
+// This class is used for Timestamp and Snapshot metadata.
+class MetaFile {
+    constructor(opts) {
+        if (opts.version <= 0) {
+            throw new error_1.ValueError('Metafile version must be at least 1');
+        }
+        if (opts.length !== undefined) {
+            validateLength(opts.length);
+        }
+        this.version = opts.version;
+        this.length = opts.length;
+        this.hashes = opts.hashes;
+        this.unrecognizedFields = opts.unrecognizedFields || {};
+    }
+    equals(other) {
+        if (!(other instanceof MetaFile)) {
+            return false;
+        }
+        return (this.version === other.version &&
+            this.length === other.length &&
+            util_1.default.isDeepStrictEqual(this.hashes, other.hashes) &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+    }
+    verify(data) {
+        // Verifies that the given data matches the expected length.
+        if (this.length !== undefined) {
+            if (data.length !== this.length) {
+                throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${data.length}`);
+            }
+        }
+        // Verifies that the given data matches the supplied hashes.
+        if (this.hashes) {
+            Object.entries(this.hashes).forEach(([key, value]) => {
+                let hash;
+                try {
+                    hash = crypto_1.default.createHash(key);
+                }
+                catch (e) {
+                    throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`);
+                }
+                const observedHash = hash.update(data).digest('hex');
+                if (observedHash !== value) {
+                    throw new error_1.LengthOrHashMismatchError(`Expected hash ${value} but got ${observedHash}`);
+                }
+            });
+        }
+    }
+    toJSON() {
+        const json = {
+            version: this.version,
+            ...this.unrecognizedFields,
+        };
+        if (this.length !== undefined) {
+            json.length = this.length;
+        }
+        if (this.hashes) {
+            json.hashes = this.hashes;
+        }
+        return json;
+    }
+    static fromJSON(data) {
+        const { version, length, hashes, ...rest } = data;
+        if (typeof version !== 'number') {
+            throw new TypeError('version must be a number');
+        }
+        if (utils_1.guard.isDefined(length) && typeof length !== 'number') {
+            throw new TypeError('length must be a number');
+        }
+        if (utils_1.guard.isDefined(hashes) && !utils_1.guard.isStringRecord(hashes)) {
+            throw new TypeError('hashes must be string keys and values');
+        }
+        return new MetaFile({
+            version,
+            length,
+            hashes,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.MetaFile = MetaFile;
+// Container for info about a particular target file.
+//
+// This class is used for Target metadata.
+class TargetFile {
+    constructor(opts) {
+        validateLength(opts.length);
+        this.length = opts.length;
+        this.path = opts.path;
+        this.hashes = opts.hashes;
+        this.unrecognizedFields = opts.unrecognizedFields || {};
+    }
+    get custom() {
+        const custom = this.unrecognizedFields['custom'];
+        if (!custom || Array.isArray(custom) || !(typeof custom === 'object')) {
+            return {};
+        }
+        return custom;
+    }
+    equals(other) {
+        if (!(other instanceof TargetFile)) {
+            return false;
+        }
+        return (this.length === other.length &&
+            this.path === other.path &&
+            util_1.default.isDeepStrictEqual(this.hashes, other.hashes) &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+    }
+    async verify(stream) {
+        let observedLength = 0;
+        // Create a digest for each hash algorithm
+        const digests = Object.keys(this.hashes).reduce((acc, key) => {
+            try {
+                acc[key] = crypto_1.default.createHash(key);
+            }
+            catch (e) {
+                throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`);
+            }
+            return acc;
+        }, {});
+        // Read stream chunk by chunk
+        for await (const chunk of stream) {
+            // Keep running tally of stream length
+            observedLength += chunk.length;
+            // Append chunk to each digest
+            Object.values(digests).forEach((digest) => {
+                digest.update(chunk);
+            });
+        }
+        // Verify length matches expected value
+        if (observedLength !== this.length) {
+            throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${observedLength}`);
+        }
+        // Verify each digest matches expected value
+        Object.entries(digests).forEach(([key, value]) => {
+            const expected = this.hashes[key];
+            const actual = value.digest('hex');
+            if (actual !== expected) {
+                throw new error_1.LengthOrHashMismatchError(`Expected hash ${expected} but got ${actual}`);
+            }
+        });
+    }
+    toJSON() {
+        return {
+            length: this.length,
+            hashes: this.hashes,
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(path, data) {
+        const { length, hashes, ...rest } = data;
+        if (typeof length !== 'number') {
+            throw new TypeError('length must be a number');
+        }
+        if (!utils_1.guard.isStringRecord(hashes)) {
+            throw new TypeError('hashes must have string keys and values');
+        }
+        return new TargetFile({
+            length,
+            path,
+            hashes,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.TargetFile = TargetFile;
+// Check that supplied length if valid
+function validateLength(length) {
+    if (length < 0) {
+        throw new error_1.ValueError('Length must be at least 0');
+    }
+}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/index.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/index.js
new file mode 100644
index 0000000000000..a4dc783659f04
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/index.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Timestamp = exports.Targets = exports.Snapshot = exports.Signature = exports.Root = exports.Metadata = exports.Key = exports.TargetFile = exports.MetaFile = exports.ValueError = exports.MetadataKind = void 0;
+var base_1 = require("./base");
+Object.defineProperty(exports, "MetadataKind", { enumerable: true, get: function () { return base_1.MetadataKind; } });
+var error_1 = require("./error");
+Object.defineProperty(exports, "ValueError", { enumerable: true, get: function () { return error_1.ValueError; } });
+var file_1 = require("./file");
+Object.defineProperty(exports, "MetaFile", { enumerable: true, get: function () { return file_1.MetaFile; } });
+Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return file_1.TargetFile; } });
+var key_1 = require("./key");
+Object.defineProperty(exports, "Key", { enumerable: true, get: function () { return key_1.Key; } });
+var metadata_1 = require("./metadata");
+Object.defineProperty(exports, "Metadata", { enumerable: true, get: function () { return metadata_1.Metadata; } });
+var root_1 = require("./root");
+Object.defineProperty(exports, "Root", { enumerable: true, get: function () { return root_1.Root; } });
+var signature_1 = require("./signature");
+Object.defineProperty(exports, "Signature", { enumerable: true, get: function () { return signature_1.Signature; } });
+var snapshot_1 = require("./snapshot");
+Object.defineProperty(exports, "Snapshot", { enumerable: true, get: function () { return snapshot_1.Snapshot; } });
+var targets_1 = require("./targets");
+Object.defineProperty(exports, "Targets", { enumerable: true, get: function () { return targets_1.Targets; } });
+var timestamp_1 = require("./timestamp");
+Object.defineProperty(exports, "Timestamp", { enumerable: true, get: function () { return timestamp_1.Timestamp; } });
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/key.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/key.js
new file mode 100644
index 0000000000000..5e55b09d7c6dd
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/key.js
@@ -0,0 +1,85 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Key = void 0;
+const util_1 = __importDefault(require("util"));
+const error_1 = require("./error");
+const utils_1 = require("./utils");
+const key_1 = require("./utils/key");
+// A container class representing the public portion of a Key.
+class Key {
+    constructor(options) {
+        const { keyID, keyType, scheme, keyVal, unrecognizedFields } = options;
+        this.keyID = keyID;
+        this.keyType = keyType;
+        this.scheme = scheme;
+        this.keyVal = keyVal;
+        this.unrecognizedFields = unrecognizedFields || {};
+    }
+    // Verifies the that the metadata.signatures contains a signature made with
+    // this key and is correctly signed.
+    verifySignature(metadata) {
+        const signature = metadata.signatures[this.keyID];
+        if (!signature)
+            throw new error_1.UnsignedMetadataError('no signature for key found in metadata');
+        if (!this.keyVal.public)
+            throw new error_1.UnsignedMetadataError('no public key found');
+        const publicKey = (0, key_1.getPublicKey)({
+            keyType: this.keyType,
+            scheme: this.scheme,
+            keyVal: this.keyVal.public,
+        });
+        const signedData = metadata.signed.toJSON();
+        try {
+            if (!utils_1.crypto.verifySignature(signedData, publicKey, signature.sig)) {
+                throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`);
+            }
+        }
+        catch (error) {
+            if (error instanceof error_1.UnsignedMetadataError) {
+                throw error;
+            }
+            throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`);
+        }
+    }
+    equals(other) {
+        if (!(other instanceof Key)) {
+            return false;
+        }
+        return (this.keyID === other.keyID &&
+            this.keyType === other.keyType &&
+            this.scheme === other.scheme &&
+            util_1.default.isDeepStrictEqual(this.keyVal, other.keyVal) &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+    }
+    toJSON() {
+        return {
+            keytype: this.keyType,
+            scheme: this.scheme,
+            keyval: this.keyVal,
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(keyID, data) {
+        const { keytype, scheme, keyval, ...rest } = data;
+        if (typeof keytype !== 'string') {
+            throw new TypeError('keytype must be a string');
+        }
+        if (typeof scheme !== 'string') {
+            throw new TypeError('scheme must be a string');
+        }
+        if (!utils_1.guard.isStringRecord(keyval)) {
+            throw new TypeError('keyval must be a string record');
+        }
+        return new Key({
+            keyID,
+            keyType: keytype,
+            scheme,
+            keyVal: keyval,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.Key = Key;
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/metadata.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/metadata.js
new file mode 100644
index 0000000000000..9668b6f14fa70
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/metadata.js
@@ -0,0 +1,158 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Metadata = void 0;
+const canonical_json_1 = require("@tufjs/canonical-json");
+const util_1 = __importDefault(require("util"));
+const base_1 = require("./base");
+const error_1 = require("./error");
+const root_1 = require("./root");
+const signature_1 = require("./signature");
+const snapshot_1 = require("./snapshot");
+const targets_1 = require("./targets");
+const timestamp_1 = require("./timestamp");
+const utils_1 = require("./utils");
+/***
+ * A container for signed TUF metadata.
+ *
+ * Provides methods to convert to and from json, read and write to and
+ * from JSON and to create and verify metadata signatures.
+ *
+ * ``Metadata[T]`` is a generic container type where T can be any one type of
+ * [``Root``, ``Timestamp``, ``Snapshot``, ``Targets``]. The purpose of this
+ * is to allow static type checking of the signed attribute in code using
+ * Metadata::
+ *
+ * root_md = Metadata[Root].fromJSON("root.json")
+ * # root_md type is now Metadata[Root]. This means signed and its
+ * # attributes like consistent_snapshot are now statically typed and the
+ * # types can be verified by static type checkers and shown by IDEs
+ *
+ * Using a type constraint is not required but not doing so means T is not a
+ * specific type so static typing cannot happen. Note that the type constraint
+ * ``[Root]`` is not validated at runtime (as pure annotations are not available
+ * then).
+ *
+ * Apart from ``expires`` all of the arguments to the inner constructors have
+ * reasonable default values for new metadata.
+ */
+class Metadata {
+    constructor(signed, signatures, unrecognizedFields) {
+        this.signed = signed;
+        this.signatures = signatures || {};
+        this.unrecognizedFields = unrecognizedFields || {};
+    }
+    sign(signer, append = true) {
+        const bytes = Buffer.from((0, canonical_json_1.canonicalize)(this.signed.toJSON()));
+        const signature = signer(bytes);
+        if (!append) {
+            this.signatures = {};
+        }
+        this.signatures[signature.keyID] = signature;
+    }
+    verifyDelegate(delegatedRole, delegatedMetadata) {
+        let role;
+        let keys = {};
+        switch (this.signed.type) {
+            case base_1.MetadataKind.Root:
+                keys = this.signed.keys;
+                role = this.signed.roles[delegatedRole];
+                break;
+            case base_1.MetadataKind.Targets:
+                if (!this.signed.delegations) {
+                    throw new error_1.ValueError(`No delegations found for ${delegatedRole}`);
+                }
+                keys = this.signed.delegations.keys;
+                if (this.signed.delegations.roles) {
+                    role = this.signed.delegations.roles[delegatedRole];
+                }
+                else if (this.signed.delegations.succinctRoles) {
+                    if (this.signed.delegations.succinctRoles.isDelegatedRole(delegatedRole)) {
+                        role = this.signed.delegations.succinctRoles;
+                    }
+                }
+                break;
+            default:
+                throw new TypeError('invalid metadata type');
+        }
+        if (!role) {
+            throw new error_1.ValueError(`no delegation found for ${delegatedRole}`);
+        }
+        const signingKeys = new Set();
+        role.keyIDs.forEach((keyID) => {
+            const key = keys[keyID];
+            // If we dont' have the key, continue checking other keys
+            if (!key) {
+                return;
+            }
+            try {
+                key.verifySignature(delegatedMetadata);
+                signingKeys.add(key.keyID);
+            }
+            catch (error) {
+                // continue
+            }
+        });
+        if (signingKeys.size < role.threshold) {
+            throw new error_1.UnsignedMetadataError(`${delegatedRole} was signed by ${signingKeys.size}/${role.threshold} keys`);
+        }
+    }
+    equals(other) {
+        if (!(other instanceof Metadata)) {
+            return false;
+        }
+        return (this.signed.equals(other.signed) &&
+            util_1.default.isDeepStrictEqual(this.signatures, other.signatures) &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+    }
+    toJSON() {
+        const signatures = Object.values(this.signatures).map((signature) => {
+            return signature.toJSON();
+        });
+        return {
+            signatures,
+            signed: this.signed.toJSON(),
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(type, data) {
+        const { signed, signatures, ...rest } = data;
+        if (!utils_1.guard.isDefined(signed) || !utils_1.guard.isObject(signed)) {
+            throw new TypeError('signed is not defined');
+        }
+        if (type !== signed._type) {
+            throw new error_1.ValueError(`expected '${type}', got ${signed['_type']}`);
+        }
+        let signedObj;
+        switch (type) {
+            case base_1.MetadataKind.Root:
+                signedObj = root_1.Root.fromJSON(signed);
+                break;
+            case base_1.MetadataKind.Timestamp:
+                signedObj = timestamp_1.Timestamp.fromJSON(signed);
+                break;
+            case base_1.MetadataKind.Snapshot:
+                signedObj = snapshot_1.Snapshot.fromJSON(signed);
+                break;
+            case base_1.MetadataKind.Targets:
+                signedObj = targets_1.Targets.fromJSON(signed);
+                break;
+            default:
+                throw new TypeError('invalid metadata type');
+        }
+        const sigMap = signaturesFromJSON(signatures);
+        return new Metadata(signedObj, sigMap, rest);
+    }
+}
+exports.Metadata = Metadata;
+function signaturesFromJSON(data) {
+    if (!utils_1.guard.isObjectArray(data)) {
+        throw new TypeError('signatures is not an array');
+    }
+    return data.reduce((acc, sigData) => {
+        const signature = signature_1.Signature.fromJSON(sigData);
+        return { ...acc, [signature.keyID]: signature };
+    }, {});
+}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/role.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/role.js
new file mode 100644
index 0000000000000..f7ddbc6fe3f38
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/role.js
@@ -0,0 +1,299 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.SuccinctRoles = exports.DelegatedRole = exports.Role = exports.TOP_LEVEL_ROLE_NAMES = void 0;
+const crypto_1 = __importDefault(require("crypto"));
+const minimatch_1 = require("minimatch");
+const util_1 = __importDefault(require("util"));
+const error_1 = require("./error");
+const utils_1 = require("./utils");
+exports.TOP_LEVEL_ROLE_NAMES = [
+    'root',
+    'targets',
+    'snapshot',
+    'timestamp',
+];
+/**
+ * Container that defines which keys are required to sign roles metadata.
+ *
+ * Role defines how many keys are required to successfully sign the roles
+ * metadata, and which keys are accepted.
+ */
+class Role {
+    constructor(options) {
+        const { keyIDs, threshold, unrecognizedFields } = options;
+        if (hasDuplicates(keyIDs)) {
+            throw new error_1.ValueError('duplicate key IDs found');
+        }
+        if (threshold < 1) {
+            throw new error_1.ValueError('threshold must be at least 1');
+        }
+        this.keyIDs = keyIDs;
+        this.threshold = threshold;
+        this.unrecognizedFields = unrecognizedFields || {};
+    }
+    equals(other) {
+        if (!(other instanceof Role)) {
+            return false;
+        }
+        return (this.threshold === other.threshold &&
+            util_1.default.isDeepStrictEqual(this.keyIDs, other.keyIDs) &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+    }
+    toJSON() {
+        return {
+            keyids: this.keyIDs,
+            threshold: this.threshold,
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(data) {
+        const { keyids, threshold, ...rest } = data;
+        if (!utils_1.guard.isStringArray(keyids)) {
+            throw new TypeError('keyids must be an array');
+        }
+        if (typeof threshold !== 'number') {
+            throw new TypeError('threshold must be a number');
+        }
+        return new Role({
+            keyIDs: keyids,
+            threshold,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.Role = Role;
+function hasDuplicates(array) {
+    return new Set(array).size !== array.length;
+}
+/**
+ * A container with information about a delegated role.
+ *
+ * A delegation can happen in two ways:
+ *   - ``paths`` is set: delegates targets matching any path pattern in ``paths``
+ *   - ``pathHashPrefixes`` is set: delegates targets whose target path hash
+ *      starts with any of the prefixes in ``pathHashPrefixes``
+ *
+ *   ``paths`` and ``pathHashPrefixes`` are mutually exclusive: both cannot be
+ *   set, at least one of them must be set.
+ */
+class DelegatedRole extends Role {
+    constructor(opts) {
+        super(opts);
+        const { name, terminating, paths, pathHashPrefixes } = opts;
+        this.name = name;
+        this.terminating = terminating;
+        if (opts.paths && opts.pathHashPrefixes) {
+            throw new error_1.ValueError('paths and pathHashPrefixes are mutually exclusive');
+        }
+        this.paths = paths;
+        this.pathHashPrefixes = pathHashPrefixes;
+    }
+    equals(other) {
+        if (!(other instanceof DelegatedRole)) {
+            return false;
+        }
+        return (super.equals(other) &&
+            this.name === other.name &&
+            this.terminating === other.terminating &&
+            util_1.default.isDeepStrictEqual(this.paths, other.paths) &&
+            util_1.default.isDeepStrictEqual(this.pathHashPrefixes, other.pathHashPrefixes));
+    }
+    isDelegatedPath(targetFilepath) {
+        if (this.paths) {
+            return this.paths.some((pathPattern) => isTargetInPathPattern(targetFilepath, pathPattern));
+        }
+        if (this.pathHashPrefixes) {
+            const hasher = crypto_1.default.createHash('sha256');
+            const pathHash = hasher.update(targetFilepath).digest('hex');
+            return this.pathHashPrefixes.some((pathHashPrefix) => pathHash.startsWith(pathHashPrefix));
+        }
+        return false;
+    }
+    toJSON() {
+        const json = {
+            ...super.toJSON(),
+            name: this.name,
+            terminating: this.terminating,
+        };
+        if (this.paths) {
+            json.paths = this.paths;
+        }
+        if (this.pathHashPrefixes) {
+            json.path_hash_prefixes = this.pathHashPrefixes;
+        }
+        return json;
+    }
+    static fromJSON(data) {
+        const { keyids, threshold, name, terminating, paths, path_hash_prefixes, ...rest } = data;
+        if (!utils_1.guard.isStringArray(keyids)) {
+            throw new TypeError('keyids must be an array of strings');
+        }
+        if (typeof threshold !== 'number') {
+            throw new TypeError('threshold must be a number');
+        }
+        if (typeof name !== 'string') {
+            throw new TypeError('name must be a string');
+        }
+        if (typeof terminating !== 'boolean') {
+            throw new TypeError('terminating must be a boolean');
+        }
+        if (utils_1.guard.isDefined(paths) && !utils_1.guard.isStringArray(paths)) {
+            throw new TypeError('paths must be an array of strings');
+        }
+        if (utils_1.guard.isDefined(path_hash_prefixes) &&
+            !utils_1.guard.isStringArray(path_hash_prefixes)) {
+            throw new TypeError('path_hash_prefixes must be an array of strings');
+        }
+        return new DelegatedRole({
+            keyIDs: keyids,
+            threshold,
+            name,
+            terminating,
+            paths,
+            pathHashPrefixes: path_hash_prefixes,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.DelegatedRole = DelegatedRole;
+// JS version of Ruby's Array#zip
+const zip = (a, b) => a.map((k, i) => [k, b[i]]);
+function isTargetInPathPattern(target, pattern) {
+    const targetParts = target.split('/');
+    const patternParts = pattern.split('/');
+    if (patternParts.length != targetParts.length) {
+        return false;
+    }
+    return zip(targetParts, patternParts).every(([targetPart, patternPart]) => (0, minimatch_1.minimatch)(targetPart, patternPart));
+}
+/**
+ * Succinctly defines a hash bin delegation graph.
+ *
+ * A ``SuccinctRoles`` object describes a delegation graph that covers all
+ * targets, distributing them uniformly over the delegated roles (i.e. bins)
+ * in the graph.
+ *
+ * The total number of bins is 2 to the power of the passed ``bit_length``.
+ *
+ * Bin names are the concatenation of the passed ``name_prefix`` and a
+ * zero-padded hex representation of the bin index separated by a hyphen.
+ *
+ * The passed ``keyids`` and ``threshold`` is used for each bin, and each bin
+ * is 'terminating'.
+ *
+ * For details: https://github.com/theupdateframework/taps/blob/master/tap15.md
+ */
+class SuccinctRoles extends Role {
+    constructor(opts) {
+        super(opts);
+        const { bitLength, namePrefix } = opts;
+        if (bitLength <= 0 || bitLength > 32) {
+            throw new error_1.ValueError('bitLength must be between 1 and 32');
+        }
+        this.bitLength = bitLength;
+        this.namePrefix = namePrefix;
+        // Calculate the suffix_len value based on the total number of bins in
+        // hex. If bit_length = 10 then number_of_bins = 1024 or bin names will
+        // have a suffix between "000" and "3ff" in hex and suffix_len will be 3
+        // meaning the third bin will have a suffix of "003".
+        this.numberOfBins = Math.pow(2, bitLength);
+        // suffix_len is calculated based on "number_of_bins - 1" as the name
+        // of the last bin contains the number "number_of_bins -1" as a suffix.
+        this.suffixLen = (this.numberOfBins - 1).toString(16).length;
+    }
+    equals(other) {
+        if (!(other instanceof SuccinctRoles)) {
+            return false;
+        }
+        return (super.equals(other) &&
+            this.bitLength === other.bitLength &&
+            this.namePrefix === other.namePrefix);
+    }
+    /***
+     * Calculates the name of the delegated role responsible for 'target_filepath'.
+     *
+     * The target at path ''target_filepath' is assigned to a bin by casting
+     * the left-most 'bit_length' of bits of the file path hash digest to
+     * int, using it as bin index between 0 and '2**bit_length - 1'.
+     *
+     * Args:
+     *  target_filepath: URL path to a target file, relative to a base
+     *  targets URL.
+     */
+    getRoleForTarget(targetFilepath) {
+        const hasher = crypto_1.default.createHash('sha256');
+        const hasherBuffer = hasher.update(targetFilepath).digest();
+        // can't ever need more than 4 bytes (32 bits).
+        const hashBytes = hasherBuffer.subarray(0, 4);
+        // Right shift hash bytes, so that we only have the leftmost
+        // bit_length bits that we care about.
+        const shiftValue = 32 - this.bitLength;
+        const binNumber = hashBytes.readUInt32BE() >>> shiftValue;
+        // Add zero padding if necessary and cast to hex the suffix.
+        const suffix = binNumber.toString(16).padStart(this.suffixLen, '0');
+        return `${this.namePrefix}-${suffix}`;
+    }
+    *getRoles() {
+        for (let i = 0; i < this.numberOfBins; i++) {
+            const suffix = i.toString(16).padStart(this.suffixLen, '0');
+            yield `${this.namePrefix}-${suffix}`;
+        }
+    }
+    /***
+     * Determines whether the given ``role_name`` is in one of
+     * the delegated roles that ``SuccinctRoles`` represents.
+     *
+     * Args:
+     *  role_name: The name of the role to check against.
+     */
+    isDelegatedRole(roleName) {
+        const desiredPrefix = this.namePrefix + '-';
+        if (!roleName.startsWith(desiredPrefix)) {
+            return false;
+        }
+        const suffix = roleName.slice(desiredPrefix.length, roleName.length);
+        if (suffix.length != this.suffixLen) {
+            return false;
+        }
+        // make sure the suffix is a hex string
+        if (!suffix.match(/^[0-9a-fA-F]+$/)) {
+            return false;
+        }
+        const num = parseInt(suffix, 16);
+        return 0 <= num && num < this.numberOfBins;
+    }
+    toJSON() {
+        const json = {
+            ...super.toJSON(),
+            bit_length: this.bitLength,
+            name_prefix: this.namePrefix,
+        };
+        return json;
+    }
+    static fromJSON(data) {
+        const { keyids, threshold, bit_length, name_prefix, ...rest } = data;
+        if (!utils_1.guard.isStringArray(keyids)) {
+            throw new TypeError('keyids must be an array of strings');
+        }
+        if (typeof threshold !== 'number') {
+            throw new TypeError('threshold must be a number');
+        }
+        if (typeof bit_length !== 'number') {
+            throw new TypeError('bit_length must be a number');
+        }
+        if (typeof name_prefix !== 'string') {
+            throw new TypeError('name_prefix must be a string');
+        }
+        return new SuccinctRoles({
+            keyIDs: keyids,
+            threshold,
+            bitLength: bit_length,
+            namePrefix: name_prefix,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.SuccinctRoles = SuccinctRoles;
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/root.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/root.js
new file mode 100644
index 0000000000000..36d0ef0f186d1
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/root.js
@@ -0,0 +1,116 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Root = void 0;
+const util_1 = __importDefault(require("util"));
+const base_1 = require("./base");
+const error_1 = require("./error");
+const key_1 = require("./key");
+const role_1 = require("./role");
+const utils_1 = require("./utils");
+/**
+ * A container for the signed part of root metadata.
+ *
+ * The top-level role and metadata file signed by the root keys.
+ * This role specifies trusted keys for all other top-level roles, which may further delegate trust.
+ */
+class Root extends base_1.Signed {
+    constructor(options) {
+        super(options);
+        this.type = base_1.MetadataKind.Root;
+        this.keys = options.keys || {};
+        this.consistentSnapshot = options.consistentSnapshot ?? true;
+        if (!options.roles) {
+            this.roles = role_1.TOP_LEVEL_ROLE_NAMES.reduce((acc, role) => ({
+                ...acc,
+                [role]: new role_1.Role({ keyIDs: [], threshold: 1 }),
+            }), {});
+        }
+        else {
+            const roleNames = new Set(Object.keys(options.roles));
+            if (!role_1.TOP_LEVEL_ROLE_NAMES.every((role) => roleNames.has(role))) {
+                throw new error_1.ValueError('missing top-level role');
+            }
+            this.roles = options.roles;
+        }
+    }
+    addKey(key, role) {
+        if (!this.roles[role]) {
+            throw new error_1.ValueError(`role ${role} does not exist`);
+        }
+        if (!this.roles[role].keyIDs.includes(key.keyID)) {
+            this.roles[role].keyIDs.push(key.keyID);
+        }
+        this.keys[key.keyID] = key;
+    }
+    equals(other) {
+        if (!(other instanceof Root)) {
+            return false;
+        }
+        return (super.equals(other) &&
+            this.consistentSnapshot === other.consistentSnapshot &&
+            util_1.default.isDeepStrictEqual(this.keys, other.keys) &&
+            util_1.default.isDeepStrictEqual(this.roles, other.roles));
+    }
+    toJSON() {
+        return {
+            _type: this.type,
+            spec_version: this.specVersion,
+            version: this.version,
+            expires: this.expires,
+            keys: keysToJSON(this.keys),
+            roles: rolesToJSON(this.roles),
+            consistent_snapshot: this.consistentSnapshot,
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(data) {
+        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
+        const { keys, roles, consistent_snapshot, ...rest } = unrecognizedFields;
+        if (typeof consistent_snapshot !== 'boolean') {
+            throw new TypeError('consistent_snapshot must be a boolean');
+        }
+        return new Root({
+            ...commonFields,
+            keys: keysFromJSON(keys),
+            roles: rolesFromJSON(roles),
+            consistentSnapshot: consistent_snapshot,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.Root = Root;
+function keysToJSON(keys) {
+    return Object.entries(keys).reduce((acc, [keyID, key]) => ({ ...acc, [keyID]: key.toJSON() }), {});
+}
+function rolesToJSON(roles) {
+    return Object.entries(roles).reduce((acc, [roleName, role]) => ({ ...acc, [roleName]: role.toJSON() }), {});
+}
+function keysFromJSON(data) {
+    let keys;
+    if (utils_1.guard.isDefined(data)) {
+        if (!utils_1.guard.isObjectRecord(data)) {
+            throw new TypeError('keys must be an object');
+        }
+        keys = Object.entries(data).reduce((acc, [keyID, keyData]) => ({
+            ...acc,
+            [keyID]: key_1.Key.fromJSON(keyID, keyData),
+        }), {});
+    }
+    return keys;
+}
+function rolesFromJSON(data) {
+    let roles;
+    if (utils_1.guard.isDefined(data)) {
+        if (!utils_1.guard.isObjectRecord(data)) {
+            throw new TypeError('roles must be an object');
+        }
+        roles = Object.entries(data).reduce((acc, [roleName, roleData]) => ({
+            ...acc,
+            [roleName]: role_1.Role.fromJSON(roleData),
+        }), {});
+    }
+    return roles;
+}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/signature.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/signature.js
new file mode 100644
index 0000000000000..33eb204eb0835
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/signature.js
@@ -0,0 +1,38 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = void 0;
+/**
+ * A container class containing information about a signature.
+ *
+ * Contains a signature and the keyid uniquely identifying the key used
+ * to generate the signature.
+ *
+ * Provide a `fromJSON` method to create a Signature from a JSON object.
+ */
+class Signature {
+    constructor(options) {
+        const { keyID, sig } = options;
+        this.keyID = keyID;
+        this.sig = sig;
+    }
+    toJSON() {
+        return {
+            keyid: this.keyID,
+            sig: this.sig,
+        };
+    }
+    static fromJSON(data) {
+        const { keyid, sig } = data;
+        if (typeof keyid !== 'string') {
+            throw new TypeError('keyid must be a string');
+        }
+        if (typeof sig !== 'string') {
+            throw new TypeError('sig must be a string');
+        }
+        return new Signature({
+            keyID: keyid,
+            sig: sig,
+        });
+    }
+}
+exports.Signature = Signature;
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/snapshot.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/snapshot.js
new file mode 100644
index 0000000000000..e90ea8e729e4e
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/snapshot.js
@@ -0,0 +1,71 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Snapshot = void 0;
+const util_1 = __importDefault(require("util"));
+const base_1 = require("./base");
+const file_1 = require("./file");
+const utils_1 = require("./utils");
+/**
+ * A container for the signed part of snapshot metadata.
+ *
+ * Snapshot contains information about all target Metadata files.
+ * A top-level role that specifies the latest versions of all targets metadata files,
+ * and hence the latest versions of all targets (including any dependencies between them) on the repository.
+ */
+class Snapshot extends base_1.Signed {
+    constructor(opts) {
+        super(opts);
+        this.type = base_1.MetadataKind.Snapshot;
+        this.meta = opts.meta || { 'targets.json': new file_1.MetaFile({ version: 1 }) };
+    }
+    equals(other) {
+        if (!(other instanceof Snapshot)) {
+            return false;
+        }
+        return super.equals(other) && util_1.default.isDeepStrictEqual(this.meta, other.meta);
+    }
+    toJSON() {
+        return {
+            _type: this.type,
+            meta: metaToJSON(this.meta),
+            spec_version: this.specVersion,
+            version: this.version,
+            expires: this.expires,
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(data) {
+        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
+        const { meta, ...rest } = unrecognizedFields;
+        return new Snapshot({
+            ...commonFields,
+            meta: metaFromJSON(meta),
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.Snapshot = Snapshot;
+function metaToJSON(meta) {
+    return Object.entries(meta).reduce((acc, [path, metadata]) => ({
+        ...acc,
+        [path]: metadata.toJSON(),
+    }), {});
+}
+function metaFromJSON(data) {
+    let meta;
+    if (utils_1.guard.isDefined(data)) {
+        if (!utils_1.guard.isObjectRecord(data)) {
+            throw new TypeError('meta field is malformed');
+        }
+        else {
+            meta = Object.entries(data).reduce((acc, [path, metadata]) => ({
+                ...acc,
+                [path]: file_1.MetaFile.fromJSON(metadata),
+            }), {});
+        }
+    }
+    return meta;
+}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/targets.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/targets.js
new file mode 100644
index 0000000000000..54bd8f8c554af
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/targets.js
@@ -0,0 +1,92 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Targets = void 0;
+const util_1 = __importDefault(require("util"));
+const base_1 = require("./base");
+const delegations_1 = require("./delegations");
+const file_1 = require("./file");
+const utils_1 = require("./utils");
+// Container for the signed part of targets metadata.
+//
+// Targets contains verifying information about target files and also delegates
+// responsible to other Targets roles.
+class Targets extends base_1.Signed {
+    constructor(options) {
+        super(options);
+        this.type = base_1.MetadataKind.Targets;
+        this.targets = options.targets || {};
+        this.delegations = options.delegations;
+    }
+    addTarget(target) {
+        this.targets[target.path] = target;
+    }
+    equals(other) {
+        if (!(other instanceof Targets)) {
+            return false;
+        }
+        return (super.equals(other) &&
+            util_1.default.isDeepStrictEqual(this.targets, other.targets) &&
+            util_1.default.isDeepStrictEqual(this.delegations, other.delegations));
+    }
+    toJSON() {
+        const json = {
+            _type: this.type,
+            spec_version: this.specVersion,
+            version: this.version,
+            expires: this.expires,
+            targets: targetsToJSON(this.targets),
+            ...this.unrecognizedFields,
+        };
+        if (this.delegations) {
+            json.delegations = this.delegations.toJSON();
+        }
+        return json;
+    }
+    static fromJSON(data) {
+        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
+        const { targets, delegations, ...rest } = unrecognizedFields;
+        return new Targets({
+            ...commonFields,
+            targets: targetsFromJSON(targets),
+            delegations: delegationsFromJSON(delegations),
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.Targets = Targets;
+function targetsToJSON(targets) {
+    return Object.entries(targets).reduce((acc, [path, target]) => ({
+        ...acc,
+        [path]: target.toJSON(),
+    }), {});
+}
+function targetsFromJSON(data) {
+    let targets;
+    if (utils_1.guard.isDefined(data)) {
+        if (!utils_1.guard.isObjectRecord(data)) {
+            throw new TypeError('targets must be an object');
+        }
+        else {
+            targets = Object.entries(data).reduce((acc, [path, target]) => ({
+                ...acc,
+                [path]: file_1.TargetFile.fromJSON(path, target),
+            }), {});
+        }
+    }
+    return targets;
+}
+function delegationsFromJSON(data) {
+    let delegations;
+    if (utils_1.guard.isDefined(data)) {
+        if (!utils_1.guard.isObject(data)) {
+            throw new TypeError('delegations must be an object');
+        }
+        else {
+            delegations = delegations_1.Delegations.fromJSON(data);
+        }
+    }
+    return delegations;
+}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/timestamp.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/timestamp.js
new file mode 100644
index 0000000000000..9880c4c9fc254
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/timestamp.js
@@ -0,0 +1,58 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Timestamp = void 0;
+const base_1 = require("./base");
+const file_1 = require("./file");
+const utils_1 = require("./utils");
+/**
+ * A container for the signed part of timestamp metadata.
+ *
+ * A top-level that specifies the latest version of the snapshot role metadata file,
+ * and hence the latest versions of all metadata and targets on the repository.
+ */
+class Timestamp extends base_1.Signed {
+    constructor(options) {
+        super(options);
+        this.type = base_1.MetadataKind.Timestamp;
+        this.snapshotMeta = options.snapshotMeta || new file_1.MetaFile({ version: 1 });
+    }
+    equals(other) {
+        if (!(other instanceof Timestamp)) {
+            return false;
+        }
+        return super.equals(other) && this.snapshotMeta.equals(other.snapshotMeta);
+    }
+    toJSON() {
+        return {
+            _type: this.type,
+            spec_version: this.specVersion,
+            version: this.version,
+            expires: this.expires,
+            meta: { 'snapshot.json': this.snapshotMeta.toJSON() },
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(data) {
+        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
+        const { meta, ...rest } = unrecognizedFields;
+        return new Timestamp({
+            ...commonFields,
+            snapshotMeta: snapshotMetaFromJSON(meta),
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.Timestamp = Timestamp;
+function snapshotMetaFromJSON(data) {
+    let snapshotMeta;
+    if (utils_1.guard.isDefined(data)) {
+        const snapshotData = data['snapshot.json'];
+        if (!utils_1.guard.isDefined(snapshotData) || !utils_1.guard.isObject(snapshotData)) {
+            throw new TypeError('missing snapshot.json in meta');
+        }
+        else {
+            snapshotMeta = file_1.MetaFile.fromJSON(snapshotData);
+        }
+    }
+    return snapshotMeta;
+}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/guard.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/guard.js
new file mode 100644
index 0000000000000..efe558852303c
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/guard.js
@@ -0,0 +1,33 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.isObjectRecord = exports.isStringRecord = exports.isObjectArray = exports.isStringArray = exports.isObject = exports.isDefined = void 0;
+function isDefined(val) {
+    return val !== undefined;
+}
+exports.isDefined = isDefined;
+function isObject(value) {
+    return typeof value === 'object' && value !== null;
+}
+exports.isObject = isObject;
+function isStringArray(value) {
+    return Array.isArray(value) && value.every((v) => typeof v === 'string');
+}
+exports.isStringArray = isStringArray;
+function isObjectArray(value) {
+    return Array.isArray(value) && value.every(isObject);
+}
+exports.isObjectArray = isObjectArray;
+function isStringRecord(value) {
+    return (typeof value === 'object' &&
+        value !== null &&
+        Object.keys(value).every((k) => typeof k === 'string') &&
+        Object.values(value).every((v) => typeof v === 'string'));
+}
+exports.isStringRecord = isStringRecord;
+function isObjectRecord(value) {
+    return (typeof value === 'object' &&
+        value !== null &&
+        Object.keys(value).every((k) => typeof k === 'string') &&
+        Object.values(value).every((v) => typeof v === 'object' && v !== null));
+}
+exports.isObjectRecord = isObjectRecord;
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/index.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/index.js
new file mode 100644
index 0000000000000..872aae28049c9
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/index.js
@@ -0,0 +1,28 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.crypto = exports.guard = void 0;
+exports.guard = __importStar(require("./guard"));
+exports.crypto = __importStar(require("./verify"));
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/key.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/key.js
new file mode 100644
index 0000000000000..1f795ba1a2733
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/key.js
@@ -0,0 +1,143 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.getPublicKey = void 0;
+const crypto_1 = __importDefault(require("crypto"));
+const error_1 = require("../error");
+const oid_1 = require("./oid");
+const ASN1_TAG_SEQUENCE = 0x30;
+const ANS1_TAG_BIT_STRING = 0x03;
+const NULL_BYTE = 0x00;
+const OID_EDDSA = '1.3.101.112';
+const OID_EC_PUBLIC_KEY = '1.2.840.10045.2.1';
+const OID_EC_CURVE_P256V1 = '1.2.840.10045.3.1.7';
+const PEM_HEADER = '-----BEGIN PUBLIC KEY-----';
+function getPublicKey(keyInfo) {
+    switch (keyInfo.keyType) {
+        case 'rsa':
+            return getRSAPublicKey(keyInfo);
+        case 'ed25519':
+            return getED25519PublicKey(keyInfo);
+        case 'ecdsa':
+        case 'ecdsa-sha2-nistp256':
+        case 'ecdsa-sha2-nistp384':
+            return getECDCSAPublicKey(keyInfo);
+        default:
+            throw new error_1.UnsupportedAlgorithmError(`Unsupported key type: ${keyInfo.keyType}`);
+    }
+}
+exports.getPublicKey = getPublicKey;
+function getRSAPublicKey(keyInfo) {
+    // Only support PEM-encoded RSA keys
+    if (!keyInfo.keyVal.startsWith(PEM_HEADER)) {
+        throw new error_1.CryptoError('Invalid key format');
+    }
+    const key = crypto_1.default.createPublicKey(keyInfo.keyVal);
+    switch (keyInfo.scheme) {
+        case 'rsassa-pss-sha256':
+            return {
+                key: key,
+                padding: crypto_1.default.constants.RSA_PKCS1_PSS_PADDING,
+            };
+        default:
+            throw new error_1.UnsupportedAlgorithmError(`Unsupported RSA scheme: ${keyInfo.scheme}`);
+    }
+}
+function getED25519PublicKey(keyInfo) {
+    let key;
+    // If key is already PEM-encoded we can just parse it
+    if (keyInfo.keyVal.startsWith(PEM_HEADER)) {
+        key = crypto_1.default.createPublicKey(keyInfo.keyVal);
+    }
+    else {
+        // If key is not PEM-encoded it had better be hex
+        if (!isHex(keyInfo.keyVal)) {
+            throw new error_1.CryptoError('Invalid key format');
+        }
+        key = crypto_1.default.createPublicKey({
+            key: ed25519.hexToDER(keyInfo.keyVal),
+            format: 'der',
+            type: 'spki',
+        });
+    }
+    return { key };
+}
+function getECDCSAPublicKey(keyInfo) {
+    let key;
+    // If key is already PEM-encoded we can just parse it
+    if (keyInfo.keyVal.startsWith(PEM_HEADER)) {
+        key = crypto_1.default.createPublicKey(keyInfo.keyVal);
+    }
+    else {
+        // If key is not PEM-encoded it had better be hex
+        if (!isHex(keyInfo.keyVal)) {
+            throw new error_1.CryptoError('Invalid key format');
+        }
+        key = crypto_1.default.createPublicKey({
+            key: ecdsa.hexToDER(keyInfo.keyVal),
+            format: 'der',
+            type: 'spki',
+        });
+    }
+    return { key };
+}
+const ed25519 = {
+    // Translates a hex key into a crypto KeyObject
+    // https://keygen.sh/blog/how-to-use-hexadecimal-ed25519-keys-in-node/
+    hexToDER: (hex) => {
+        const key = Buffer.from(hex, 'hex');
+        const oid = (0, oid_1.encodeOIDString)(OID_EDDSA);
+        // Create a byte sequence containing the OID and key
+        const elements = Buffer.concat([
+            Buffer.concat([
+                Buffer.from([ASN1_TAG_SEQUENCE]),
+                Buffer.from([oid.length]),
+                oid,
+            ]),
+            Buffer.concat([
+                Buffer.from([ANS1_TAG_BIT_STRING]),
+                Buffer.from([key.length + 1]),
+                Buffer.from([NULL_BYTE]),
+                key,
+            ]),
+        ]);
+        // Wrap up by creating a sequence of elements
+        const der = Buffer.concat([
+            Buffer.from([ASN1_TAG_SEQUENCE]),
+            Buffer.from([elements.length]),
+            elements,
+        ]);
+        return der;
+    },
+};
+const ecdsa = {
+    hexToDER: (hex) => {
+        const key = Buffer.from(hex, 'hex');
+        const bitString = Buffer.concat([
+            Buffer.from([ANS1_TAG_BIT_STRING]),
+            Buffer.from([key.length + 1]),
+            Buffer.from([NULL_BYTE]),
+            key,
+        ]);
+        const oids = Buffer.concat([
+            (0, oid_1.encodeOIDString)(OID_EC_PUBLIC_KEY),
+            (0, oid_1.encodeOIDString)(OID_EC_CURVE_P256V1),
+        ]);
+        const oidSequence = Buffer.concat([
+            Buffer.from([ASN1_TAG_SEQUENCE]),
+            Buffer.from([oids.length]),
+            oids,
+        ]);
+        // Wrap up by creating a sequence of elements
+        const der = Buffer.concat([
+            Buffer.from([ASN1_TAG_SEQUENCE]),
+            Buffer.from([oidSequence.length + bitString.length]),
+            oidSequence,
+            bitString,
+        ]);
+        return der;
+    },
+};
+const isHex = (key) => /^[0-9a-fA-F]+$/.test(key);
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/oid.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/oid.js
new file mode 100644
index 0000000000000..e1bb7af5e54fb
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/oid.js
@@ -0,0 +1,27 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.encodeOIDString = void 0;
+const ANS1_TAG_OID = 0x06;
+function encodeOIDString(oid) {
+    const parts = oid.split('.');
+    // The first two subidentifiers are encoded into the first byte
+    const first = parseInt(parts[0], 10) * 40 + parseInt(parts[1], 10);
+    const rest = [];
+    parts.slice(2).forEach((part) => {
+        const bytes = encodeVariableLengthInteger(parseInt(part, 10));
+        rest.push(...bytes);
+    });
+    const der = Buffer.from([first, ...rest]);
+    return Buffer.from([ANS1_TAG_OID, der.length, ...der]);
+}
+exports.encodeOIDString = encodeOIDString;
+function encodeVariableLengthInteger(value) {
+    const bytes = [];
+    let mask = 0x00;
+    while (value > 0) {
+        bytes.unshift((value & 0x7f) | mask);
+        value >>= 7;
+        mask = 0x80;
+    }
+    return bytes;
+}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/types.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/types.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/types.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/verify.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/verify.js
new file mode 100644
index 0000000000000..8232b6f6a97ab
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/verify.js
@@ -0,0 +1,13 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifySignature = void 0;
+const canonical_json_1 = require("@tufjs/canonical-json");
+const crypto_1 = __importDefault(require("crypto"));
+const verifySignature = (metaDataSignedData, key, signature) => {
+    const canonicalData = Buffer.from((0, canonical_json_1.canonicalize)(metaDataSignedData));
+    return crypto_1.default.verify(undefined, canonicalData, key, Buffer.from(signature, 'hex'));
+};
+exports.verifySignature = verifySignature;
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/package.json b/node_modules/sigstore/node_modules/@tufjs/models/package.json
new file mode 100644
index 0000000000000..6711ee0dababc
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@tufjs/models/package.json
@@ -0,0 +1,41 @@
+{
+  "name": "@tufjs/models",
+  "version": "1.0.4",
+  "description": "TUF metadata models",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "build": "tsc --build",
+    "clean": "rm -rf dist && rm tsconfig.tsbuildinfo",
+    "test": "jest"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/theupdateframework/tuf-js.git"
+  },
+  "keywords": [
+    "tuf",
+    "security",
+    "update"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "MIT",
+  "bugs": {
+    "url": "https://github.com/theupdateframework/tuf-js/issues"
+  },
+  "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/models#readme",
+  "devDependencies": {
+    "@types/node": "^18.16.3",
+    "typescript": "^5.0.4"
+  },
+  "dependencies": {
+    "@tufjs/canonical-json": "1.0.0",
+    "minimatch": "^9.0.0"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  }
+}
diff --git a/node_modules/sigstore/node_modules/tuf-js/LICENSE b/node_modules/sigstore/node_modules/tuf-js/LICENSE
new file mode 100644
index 0000000000000..420700f5d3765
--- /dev/null
+++ b/node_modules/sigstore/node_modules/tuf-js/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2022 GitHub and the TUF Contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/config.js b/node_modules/sigstore/node_modules/tuf-js/dist/config.js
new file mode 100644
index 0000000000000..c2d970e256244
--- /dev/null
+++ b/node_modules/sigstore/node_modules/tuf-js/dist/config.js
@@ -0,0 +1,14 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.defaultConfig = void 0;
+exports.defaultConfig = {
+    maxRootRotations: 32,
+    maxDelegations: 32,
+    rootMaxLength: 512000,
+    timestampMaxLength: 16384,
+    snapshotMaxLength: 2000000,
+    targetsMaxLength: 5000000,
+    prefixTargetsWithHash: true,
+    fetchTimeout: 100000,
+    fetchRetries: 2,
+};
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/error.js b/node_modules/sigstore/node_modules/tuf-js/dist/error.js
new file mode 100644
index 0000000000000..f4b10fa202895
--- /dev/null
+++ b/node_modules/sigstore/node_modules/tuf-js/dist/error.js
@@ -0,0 +1,48 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.DownloadHTTPError = exports.DownloadLengthMismatchError = exports.DownloadError = exports.ExpiredMetadataError = exports.EqualVersionError = exports.BadVersionError = exports.RepositoryError = exports.PersistError = exports.RuntimeError = exports.ValueError = void 0;
+// An error about insufficient values
+class ValueError extends Error {
+}
+exports.ValueError = ValueError;
+class RuntimeError extends Error {
+}
+exports.RuntimeError = RuntimeError;
+class PersistError extends Error {
+}
+exports.PersistError = PersistError;
+// An error with a repository's state, such as a missing file.
+// It covers all exceptions that come from the repository side when
+// looking from the perspective of users of metadata API or ngclient.
+class RepositoryError extends Error {
+}
+exports.RepositoryError = RepositoryError;
+// An error for metadata that contains an invalid version number.
+class BadVersionError extends RepositoryError {
+}
+exports.BadVersionError = BadVersionError;
+// An error for metadata containing a previously verified version number.
+class EqualVersionError extends BadVersionError {
+}
+exports.EqualVersionError = EqualVersionError;
+// Indicate that a TUF Metadata file has expired.
+class ExpiredMetadataError extends RepositoryError {
+}
+exports.ExpiredMetadataError = ExpiredMetadataError;
+//----- Download Errors -------------------------------------------------------
+// An error occurred while attempting to download a file.
+class DownloadError extends Error {
+}
+exports.DownloadError = DownloadError;
+// Indicate that a mismatch of lengths was seen while downloading a file
+class DownloadLengthMismatchError extends DownloadError {
+}
+exports.DownloadLengthMismatchError = DownloadLengthMismatchError;
+// Returned by FetcherInterface implementations for HTTP errors.
+class DownloadHTTPError extends DownloadError {
+    constructor(message, statusCode) {
+        super(message);
+        this.statusCode = statusCode;
+    }
+}
+exports.DownloadHTTPError = DownloadHTTPError;
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/fetcher.js b/node_modules/sigstore/node_modules/tuf-js/dist/fetcher.js
new file mode 100644
index 0000000000000..d3dcf53eeb869
--- /dev/null
+++ b/node_modules/sigstore/node_modules/tuf-js/dist/fetcher.js
@@ -0,0 +1,84 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.DefaultFetcher = exports.BaseFetcher = void 0;
+const debug_1 = __importDefault(require("debug"));
+const fs_1 = __importDefault(require("fs"));
+const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
+const util_1 = __importDefault(require("util"));
+const error_1 = require("./error");
+const tmpfile_1 = require("./utils/tmpfile");
+const log = (0, debug_1.default)('tuf:fetch');
+class BaseFetcher {
+    // Download file from given URL. The file is downloaded to a temporary
+    // location and then passed to the given handler. The handler is responsible
+    // for moving the file to its final location. The temporary file is deleted
+    // after the handler returns.
+    async downloadFile(url, maxLength, handler) {
+        return (0, tmpfile_1.withTempFile)(async (tmpFile) => {
+            const reader = await this.fetch(url);
+            let numberOfBytesReceived = 0;
+            const fileStream = fs_1.default.createWriteStream(tmpFile);
+            // Read the stream a chunk at a time so that we can check
+            // the length of the file as we go
+            try {
+                for await (const chunk of reader) {
+                    const bufferChunk = Buffer.from(chunk);
+                    numberOfBytesReceived += bufferChunk.length;
+                    if (numberOfBytesReceived > maxLength) {
+                        throw new error_1.DownloadLengthMismatchError('Max length reached');
+                    }
+                    await writeBufferToStream(fileStream, bufferChunk);
+                }
+            }
+            finally {
+                // Make sure we always close the stream
+                await util_1.default.promisify(fileStream.close).bind(fileStream)();
+            }
+            return handler(tmpFile);
+        });
+    }
+    // Download bytes from given URL.
+    async downloadBytes(url, maxLength) {
+        return this.downloadFile(url, maxLength, async (file) => {
+            const stream = fs_1.default.createReadStream(file);
+            const chunks = [];
+            for await (const chunk of stream) {
+                chunks.push(chunk);
+            }
+            return Buffer.concat(chunks);
+        });
+    }
+}
+exports.BaseFetcher = BaseFetcher;
+class DefaultFetcher extends BaseFetcher {
+    constructor(options = {}) {
+        super();
+        this.timeout = options.timeout;
+        this.retries = options.retries;
+    }
+    async fetch(url) {
+        log('GET %s', url);
+        const response = await (0, make_fetch_happen_1.default)(url, {
+            timeout: this.timeout,
+            retry: this.retries,
+        });
+        if (!response.ok || !response?.body) {
+            throw new error_1.DownloadHTTPError('Failed to download', response.status);
+        }
+        return response.body;
+    }
+}
+exports.DefaultFetcher = DefaultFetcher;
+const writeBufferToStream = async (stream, buffer) => {
+    return new Promise((resolve, reject) => {
+        stream.write(buffer, (err) => {
+            if (err) {
+                reject(err);
+            }
+            resolve(true);
+        });
+    });
+};
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/index.js b/node_modules/sigstore/node_modules/tuf-js/dist/index.js
new file mode 100644
index 0000000000000..5a83b91f355d8
--- /dev/null
+++ b/node_modules/sigstore/node_modules/tuf-js/dist/index.js
@@ -0,0 +1,9 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Updater = exports.BaseFetcher = exports.TargetFile = void 0;
+var models_1 = require("@tufjs/models");
+Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return models_1.TargetFile; } });
+var fetcher_1 = require("./fetcher");
+Object.defineProperty(exports, "BaseFetcher", { enumerable: true, get: function () { return fetcher_1.BaseFetcher; } });
+var updater_1 = require("./updater");
+Object.defineProperty(exports, "Updater", { enumerable: true, get: function () { return updater_1.Updater; } });
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/store.js b/node_modules/sigstore/node_modules/tuf-js/dist/store.js
new file mode 100644
index 0000000000000..8567336108709
--- /dev/null
+++ b/node_modules/sigstore/node_modules/tuf-js/dist/store.js
@@ -0,0 +1,208 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TrustedMetadataStore = void 0;
+const models_1 = require("@tufjs/models");
+const error_1 = require("./error");
+class TrustedMetadataStore {
+    constructor(rootData) {
+        this.trustedSet = {};
+        // Client workflow 5.1: record fixed update start time
+        this.referenceTime = new Date();
+        // Client workflow 5.2: load trusted root metadata
+        this.loadTrustedRoot(rootData);
+    }
+    get root() {
+        if (!this.trustedSet.root) {
+            throw new ReferenceError('No trusted root metadata');
+        }
+        return this.trustedSet.root;
+    }
+    get timestamp() {
+        return this.trustedSet.timestamp;
+    }
+    get snapshot() {
+        return this.trustedSet.snapshot;
+    }
+    get targets() {
+        return this.trustedSet.targets;
+    }
+    getRole(name) {
+        return this.trustedSet[name];
+    }
+    updateRoot(bytesBuffer) {
+        const data = JSON.parse(bytesBuffer.toString('utf8'));
+        const newRoot = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data);
+        if (newRoot.signed.type != models_1.MetadataKind.Root) {
+            throw new error_1.RepositoryError(`Expected 'root', got ${newRoot.signed.type}`);
+        }
+        // Client workflow 5.4: check for arbitrary software attack
+        this.root.verifyDelegate(models_1.MetadataKind.Root, newRoot);
+        // Client workflow 5.5: check for rollback attack
+        if (newRoot.signed.version != this.root.signed.version + 1) {
+            throw new error_1.BadVersionError(`Expected version ${this.root.signed.version + 1}, got ${newRoot.signed.version}`);
+        }
+        // Check that new root is signed by self
+        newRoot.verifyDelegate(models_1.MetadataKind.Root, newRoot);
+        // Client workflow 5.7: set new root as trusted root
+        this.trustedSet.root = newRoot;
+        return newRoot;
+    }
+    updateTimestamp(bytesBuffer) {
+        if (this.snapshot) {
+            throw new error_1.RuntimeError('Cannot update timestamp after snapshot');
+        }
+        if (this.root.signed.isExpired(this.referenceTime)) {
+            throw new error_1.ExpiredMetadataError('Final root.json is expired');
+        }
+        const data = JSON.parse(bytesBuffer.toString('utf8'));
+        const newTimestamp = models_1.Metadata.fromJSON(models_1.MetadataKind.Timestamp, data);
+        if (newTimestamp.signed.type != models_1.MetadataKind.Timestamp) {
+            throw new error_1.RepositoryError(`Expected 'timestamp', got ${newTimestamp.signed.type}`);
+        }
+        // Client workflow 5.4.2: check for arbitrary software attack
+        this.root.verifyDelegate(models_1.MetadataKind.Timestamp, newTimestamp);
+        if (this.timestamp) {
+            // Prevent rolling back timestamp version
+            // Client workflow 5.4.3.1: check for rollback attack
+            if (newTimestamp.signed.version < this.timestamp.signed.version) {
+                throw new error_1.BadVersionError(`New timestamp version ${newTimestamp.signed.version} is less than current version ${this.timestamp.signed.version}`);
+            }
+            //  Keep using old timestamp if versions are equal.
+            if (newTimestamp.signed.version === this.timestamp.signed.version) {
+                throw new error_1.EqualVersionError(`New timestamp version ${newTimestamp.signed.version} is equal to current version ${this.timestamp.signed.version}`);
+            }
+            // Prevent rolling back snapshot version
+            // Client workflow 5.4.3.2: check for rollback attack
+            const snapshotMeta = this.timestamp.signed.snapshotMeta;
+            const newSnapshotMeta = newTimestamp.signed.snapshotMeta;
+            if (newSnapshotMeta.version < snapshotMeta.version) {
+                throw new error_1.BadVersionError(`New snapshot version ${newSnapshotMeta.version} is less than current version ${snapshotMeta.version}`);
+            }
+        }
+        // expiry not checked to allow old timestamp to be used for rollback
+        // protection of new timestamp: expiry is checked in update_snapshot
+        this.trustedSet.timestamp = newTimestamp;
+        // Client workflow 5.4.4: check for freeze attack
+        this.checkFinalTimestamp();
+        return newTimestamp;
+    }
+    updateSnapshot(bytesBuffer, trusted = false) {
+        if (!this.timestamp) {
+            throw new error_1.RuntimeError('Cannot update snapshot before timestamp');
+        }
+        if (this.targets) {
+            throw new error_1.RuntimeError('Cannot update snapshot after targets');
+        }
+        // Snapshot cannot be loaded if final timestamp is expired
+        this.checkFinalTimestamp();
+        const snapshotMeta = this.timestamp.signed.snapshotMeta;
+        // Verify non-trusted data against the hashes in timestamp, if any.
+        // Trusted snapshot data has already been verified once.
+        // Client workflow 5.5.2: check against timestamp role's snaphsot hash
+        if (!trusted) {
+            snapshotMeta.verify(bytesBuffer);
+        }
+        const data = JSON.parse(bytesBuffer.toString('utf8'));
+        const newSnapshot = models_1.Metadata.fromJSON(models_1.MetadataKind.Snapshot, data);
+        if (newSnapshot.signed.type != models_1.MetadataKind.Snapshot) {
+            throw new error_1.RepositoryError(`Expected 'snapshot', got ${newSnapshot.signed.type}`);
+        }
+        // Client workflow 5.5.3: check for arbitrary software attack
+        this.root.verifyDelegate(models_1.MetadataKind.Snapshot, newSnapshot);
+        // version check against meta version (5.5.4) is deferred to allow old
+        // snapshot to be used in rollback protection
+        // Client workflow 5.5.5: check for rollback attack
+        if (this.snapshot) {
+            Object.entries(this.snapshot.signed.meta).forEach(([fileName, fileInfo]) => {
+                const newFileInfo = newSnapshot.signed.meta[fileName];
+                if (!newFileInfo) {
+                    throw new error_1.RepositoryError(`Missing file ${fileName} in new snapshot`);
+                }
+                if (newFileInfo.version < fileInfo.version) {
+                    throw new error_1.BadVersionError(`New version ${newFileInfo.version} of ${fileName} is less than current version ${fileInfo.version}`);
+                }
+            });
+        }
+        this.trustedSet.snapshot = newSnapshot;
+        // snapshot is loaded, but we raise if it's not valid _final_ snapshot
+        // Client workflow 5.5.4 & 5.5.6
+        this.checkFinalSnapsnot();
+        return newSnapshot;
+    }
+    updateDelegatedTargets(bytesBuffer, roleName, delegatorName) {
+        if (!this.snapshot) {
+            throw new error_1.RuntimeError('Cannot update delegated targets before snapshot');
+        }
+        // Targets cannot be loaded if final snapshot is expired or its version
+        // does not match meta version in timestamp.
+        this.checkFinalSnapsnot();
+        const delegator = this.trustedSet[delegatorName];
+        if (!delegator) {
+            throw new error_1.RuntimeError(`No trusted ${delegatorName} metadata`);
+        }
+        // Extract metadata for the delegated role from snapshot
+        const meta = this.snapshot.signed.meta?.[`${roleName}.json`];
+        if (!meta) {
+            throw new error_1.RepositoryError(`Missing ${roleName}.json in snapshot`);
+        }
+        // Client workflow 5.6.2: check against snapshot role's targets hash
+        meta.verify(bytesBuffer);
+        const data = JSON.parse(bytesBuffer.toString('utf8'));
+        const newDelegate = models_1.Metadata.fromJSON(models_1.MetadataKind.Targets, data);
+        if (newDelegate.signed.type != models_1.MetadataKind.Targets) {
+            throw new error_1.RepositoryError(`Expected 'targets', got ${newDelegate.signed.type}`);
+        }
+        // Client workflow 5.6.3: check for arbitrary software attack
+        delegator.verifyDelegate(roleName, newDelegate);
+        // Client workflow 5.6.4: Check against snapshot role’s targets version
+        const version = newDelegate.signed.version;
+        if (version != meta.version) {
+            throw new error_1.BadVersionError(`Version ${version} of ${roleName} does not match snapshot version ${meta.version}`);
+        }
+        // Client workflow 5.6.5: check for a freeze attack
+        if (newDelegate.signed.isExpired(this.referenceTime)) {
+            throw new error_1.ExpiredMetadataError(`${roleName}.json is expired`);
+        }
+        this.trustedSet[roleName] = newDelegate;
+    }
+    // Verifies and loads data as trusted root metadata.
+    // Note that an expired initial root is still considered valid.
+    loadTrustedRoot(bytesBuffer) {
+        const data = JSON.parse(bytesBuffer.toString('utf8'));
+        const root = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data);
+        if (root.signed.type != models_1.MetadataKind.Root) {
+            throw new error_1.RepositoryError(`Expected 'root', got ${root.signed.type}`);
+        }
+        root.verifyDelegate(models_1.MetadataKind.Root, root);
+        this.trustedSet['root'] = root;
+    }
+    checkFinalTimestamp() {
+        // Timestamp MUST be loaded
+        if (!this.timestamp) {
+            throw new ReferenceError('No trusted timestamp metadata');
+        }
+        // Client workflow 5.4.4: check for freeze attack
+        if (this.timestamp.signed.isExpired(this.referenceTime)) {
+            throw new error_1.ExpiredMetadataError('Final timestamp.json is expired');
+        }
+    }
+    checkFinalSnapsnot() {
+        // Snapshot and timestamp MUST be loaded
+        if (!this.snapshot) {
+            throw new ReferenceError('No trusted snapshot metadata');
+        }
+        if (!this.timestamp) {
+            throw new ReferenceError('No trusted timestamp metadata');
+        }
+        // Client workflow 5.5.6: check for freeze attack
+        if (this.snapshot.signed.isExpired(this.referenceTime)) {
+            throw new error_1.ExpiredMetadataError('snapshot.json is expired');
+        }
+        // Client workflow 5.5.4: check against timestamp role’s snapshot version
+        const snapshotMeta = this.timestamp.signed.snapshotMeta;
+        if (this.snapshot.signed.version !== snapshotMeta.version) {
+            throw new error_1.BadVersionError("Snapshot version doesn't match timestamp");
+        }
+    }
+}
+exports.TrustedMetadataStore = TrustedMetadataStore;
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/updater.js b/node_modules/sigstore/node_modules/tuf-js/dist/updater.js
new file mode 100644
index 0000000000000..2aba48d24affd
--- /dev/null
+++ b/node_modules/sigstore/node_modules/tuf-js/dist/updater.js
@@ -0,0 +1,320 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Updater = void 0;
+const models_1 = require("@tufjs/models");
+const debug_1 = __importDefault(require("debug"));
+const fs = __importStar(require("fs"));
+const path = __importStar(require("path"));
+const config_1 = require("./config");
+const error_1 = require("./error");
+const fetcher_1 = require("./fetcher");
+const store_1 = require("./store");
+const url = __importStar(require("./utils/url"));
+const log = (0, debug_1.default)('tuf:cache');
+class Updater {
+    constructor(options) {
+        const { metadataDir, metadataBaseUrl, targetDir, targetBaseUrl, fetcher, config, } = options;
+        this.dir = metadataDir;
+        this.metadataBaseUrl = metadataBaseUrl;
+        this.targetDir = targetDir;
+        this.targetBaseUrl = targetBaseUrl;
+        const data = this.loadLocalMetadata(models_1.MetadataKind.Root);
+        this.trustedSet = new store_1.TrustedMetadataStore(data);
+        this.config = { ...config_1.defaultConfig, ...config };
+        this.fetcher =
+            fetcher ||
+                new fetcher_1.DefaultFetcher({
+                    timeout: this.config.fetchTimeout,
+                    retries: this.config.fetchRetries,
+                });
+    }
+    // refresh and load the metadata before downloading the target
+    // refresh should be called once after the client is initialized
+    async refresh() {
+        await this.loadRoot();
+        await this.loadTimestamp();
+        await this.loadSnapshot();
+        await this.loadTargets(models_1.MetadataKind.Targets, models_1.MetadataKind.Root);
+    }
+    // Returns the TargetFile instance with information for the given target path.
+    //
+    // Implicitly calls refresh if it hasn't already been called.
+    async getTargetInfo(targetPath) {
+        if (!this.trustedSet.targets) {
+            await this.refresh();
+        }
+        return this.preorderDepthFirstWalk(targetPath);
+    }
+    async downloadTarget(targetInfo, filePath, targetBaseUrl) {
+        const targetPath = filePath || this.generateTargetPath(targetInfo);
+        if (!targetBaseUrl) {
+            if (!this.targetBaseUrl) {
+                throw new error_1.ValueError('Target base URL not set');
+            }
+            targetBaseUrl = this.targetBaseUrl;
+        }
+        let targetFilePath = targetInfo.path;
+        const consistentSnapshot = this.trustedSet.root.signed.consistentSnapshot;
+        if (consistentSnapshot && this.config.prefixTargetsWithHash) {
+            const hashes = Object.values(targetInfo.hashes);
+            const { dir, base } = path.parse(targetFilePath);
+            const filename = `${hashes[0]}.${base}`;
+            targetFilePath = dir ? `${dir}/${filename}` : filename;
+        }
+        const targetUrl = url.join(targetBaseUrl, targetFilePath);
+        // Client workflow 5.7.3: download target file
+        await this.fetcher.downloadFile(targetUrl, targetInfo.length, async (fileName) => {
+            // Verify hashes and length of downloaded file
+            await targetInfo.verify(fs.createReadStream(fileName));
+            // Copy file to target path
+            log('WRITE %s', targetPath);
+            fs.copyFileSync(fileName, targetPath);
+        });
+        return targetPath;
+    }
+    async findCachedTarget(targetInfo, filePath) {
+        if (!filePath) {
+            filePath = this.generateTargetPath(targetInfo);
+        }
+        try {
+            if (fs.existsSync(filePath)) {
+                await targetInfo.verify(fs.createReadStream(filePath));
+                return filePath;
+            }
+        }
+        catch (error) {
+            return; // File not found
+        }
+        return; // File not found
+    }
+    loadLocalMetadata(fileName) {
+        const filePath = path.join(this.dir, `${fileName}.json`);
+        log('READ %s', filePath);
+        return fs.readFileSync(filePath);
+    }
+    // Sequentially load and persist on local disk every newer root metadata
+    // version available on the remote.
+    // Client workflow 5.3: update root role
+    async loadRoot() {
+        // Client workflow 5.3.2: version of trusted root metadata file
+        const rootVersion = this.trustedSet.root.signed.version;
+        const lowerBound = rootVersion + 1;
+        const upperBound = lowerBound + this.config.maxRootRotations;
+        for (let version = lowerBound; version <= upperBound; version++) {
+            const rootUrl = url.join(this.metadataBaseUrl, `${version}.root.json`);
+            try {
+                // Client workflow 5.3.3: download new root metadata file
+                const bytesData = await this.fetcher.downloadBytes(rootUrl, this.config.rootMaxLength);
+                // Client workflow 5.3.4 - 5.4.7
+                this.trustedSet.updateRoot(bytesData);
+                // Client workflow 5.3.8: persist root metadata file
+                this.persistMetadata(models_1.MetadataKind.Root, bytesData);
+            }
+            catch (error) {
+                break;
+            }
+        }
+    }
+    // Load local and remote timestamp metadata.
+    // Client workflow 5.4: update timestamp role
+    async loadTimestamp() {
+        // Load local and remote timestamp metadata
+        try {
+            const data = this.loadLocalMetadata(models_1.MetadataKind.Timestamp);
+            this.trustedSet.updateTimestamp(data);
+        }
+        catch (error) {
+            // continue
+        }
+        //Load from remote (whether local load succeeded or not)
+        const timestampUrl = url.join(this.metadataBaseUrl, 'timestamp.json');
+        // Client workflow 5.4.1: download timestamp metadata file
+        const bytesData = await this.fetcher.downloadBytes(timestampUrl, this.config.timestampMaxLength);
+        try {
+            // Client workflow 5.4.2 - 5.4.4
+            this.trustedSet.updateTimestamp(bytesData);
+        }
+        catch (error) {
+            // If new timestamp version is same as current, discardd the new one.
+            // This is normal and should NOT raise an error.
+            if (error instanceof error_1.EqualVersionError) {
+                return;
+            }
+            // Re-raise any other error
+            throw error;
+        }
+        // Client workflow 5.4.5: persist timestamp metadata
+        this.persistMetadata(models_1.MetadataKind.Timestamp, bytesData);
+    }
+    // Load local and remote snapshot metadata.
+    // Client workflow 5.5: update snapshot role
+    async loadSnapshot() {
+        //Load local (and if needed remote) snapshot metadata
+        try {
+            const data = this.loadLocalMetadata(models_1.MetadataKind.Snapshot);
+            this.trustedSet.updateSnapshot(data, true);
+        }
+        catch (error) {
+            if (!this.trustedSet.timestamp) {
+                throw new ReferenceError('No timestamp metadata');
+            }
+            const snapshotMeta = this.trustedSet.timestamp.signed.snapshotMeta;
+            const maxLength = snapshotMeta.length || this.config.snapshotMaxLength;
+            const version = this.trustedSet.root.signed.consistentSnapshot
+                ? snapshotMeta.version
+                : undefined;
+            const snapshotUrl = url.join(this.metadataBaseUrl, version ? `${version}.snapshot.json` : 'snapshot.json');
+            try {
+                // Client workflow 5.5.1: download snapshot metadata file
+                const bytesData = await this.fetcher.downloadBytes(snapshotUrl, maxLength);
+                // Client workflow 5.5.2 - 5.5.6
+                this.trustedSet.updateSnapshot(bytesData);
+                // Client workflow 5.5.7: persist snapshot metadata file
+                this.persistMetadata(models_1.MetadataKind.Snapshot, bytesData);
+            }
+            catch (error) {
+                throw new error_1.RuntimeError(`Unable to load snapshot metadata error ${error}`);
+            }
+        }
+    }
+    // Load local and remote targets metadata.
+    // Client workflow 5.6: update targets role
+    async loadTargets(role, parentRole) {
+        if (this.trustedSet.getRole(role)) {
+            return this.trustedSet.getRole(role);
+        }
+        try {
+            const buffer = this.loadLocalMetadata(role);
+            this.trustedSet.updateDelegatedTargets(buffer, role, parentRole);
+        }
+        catch (error) {
+            // Local 'role' does not exist or is invalid: update from remote
+            if (!this.trustedSet.snapshot) {
+                throw new ReferenceError('No snapshot metadata');
+            }
+            const metaInfo = this.trustedSet.snapshot.signed.meta[`${role}.json`];
+            // TODO: use length for fetching
+            const maxLength = metaInfo.length || this.config.targetsMaxLength;
+            const version = this.trustedSet.root.signed.consistentSnapshot
+                ? metaInfo.version
+                : undefined;
+            const metadataUrl = url.join(this.metadataBaseUrl, version ? `${version}.${role}.json` : `${role}.json`);
+            try {
+                // Client workflow 5.6.1: download targets metadata file
+                const bytesData = await this.fetcher.downloadBytes(metadataUrl, maxLength);
+                // Client workflow 5.6.2 - 5.6.6
+                this.trustedSet.updateDelegatedTargets(bytesData, role, parentRole);
+                // Client workflow 5.6.7: persist targets metadata file
+                this.persistMetadata(role, bytesData);
+            }
+            catch (error) {
+                throw new error_1.RuntimeError(`Unable to load targets error ${error}`);
+            }
+        }
+        return this.trustedSet.getRole(role);
+    }
+    async preorderDepthFirstWalk(targetPath) {
+        // Interrogates the tree of target delegations in order of appearance
+        // (which implicitly order trustworthiness), and returns the matching
+        // target found in the most trusted role.
+        // List of delegations to be interrogated. A (role, parent role) pair
+        // is needed to load and verify the delegated targets metadata.
+        const delegationsToVisit = [
+            {
+                roleName: models_1.MetadataKind.Targets,
+                parentRoleName: models_1.MetadataKind.Root,
+            },
+        ];
+        const visitedRoleNames = new Set();
+        // Client workflow 5.6.7: preorder depth-first traversal of the graph of
+        // target delegations
+        while (visitedRoleNames.size <= this.config.maxDelegations &&
+            delegationsToVisit.length > 0) {
+            //  Pop the role name from the top of the stack.
+            // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
+            const { roleName, parentRoleName } = delegationsToVisit.pop();
+            // Skip any visited current role to prevent cycles.
+            // Client workflow 5.6.7.1: skip already-visited roles
+            if (visitedRoleNames.has(roleName)) {
+                continue;
+            }
+            // The metadata for 'role_name' must be downloaded/updated before
+            // its targets, delegations, and child roles can be inspected.
+            const targets = (await this.loadTargets(roleName, parentRoleName))
+                ?.signed;
+            if (!targets) {
+                continue;
+            }
+            const target = targets.targets?.[targetPath];
+            if (target) {
+                return target;
+            }
+            // After preorder check, add current role to set of visited roles.
+            visitedRoleNames.add(roleName);
+            if (targets.delegations) {
+                const childRolesToVisit = [];
+                // NOTE: This may be a slow operation if there are many delegated roles.
+                const rolesForTarget = targets.delegations.rolesForTarget(targetPath);
+                for (const { role: childName, terminating } of rolesForTarget) {
+                    childRolesToVisit.push({
+                        roleName: childName,
+                        parentRoleName: roleName,
+                    });
+                    // Client workflow 5.6.7.2.1
+                    if (terminating) {
+                        delegationsToVisit.splice(0); // empty the array
+                        break;
+                    }
+                }
+                childRolesToVisit.reverse();
+                delegationsToVisit.push(...childRolesToVisit);
+            }
+        }
+        return; // no matching target found
+    }
+    generateTargetPath(targetInfo) {
+        if (!this.targetDir) {
+            throw new error_1.ValueError('Target directory not set');
+        }
+        // URL encode target path
+        const filePath = encodeURIComponent(targetInfo.path);
+        return path.join(this.targetDir, filePath);
+    }
+    async persistMetadata(metaDataName, bytesData) {
+        try {
+            const filePath = path.join(this.dir, `${metaDataName}.json`);
+            log('WRITE %s', filePath);
+            fs.writeFileSync(filePath, bytesData.toString('utf8'));
+        }
+        catch (error) {
+            throw new error_1.PersistError(`Failed to persist metadata ${metaDataName} error: ${error}`);
+        }
+    }
+}
+exports.Updater = Updater;
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/utils/tmpfile.js b/node_modules/sigstore/node_modules/tuf-js/dist/utils/tmpfile.js
new file mode 100644
index 0000000000000..923eef6044bcc
--- /dev/null
+++ b/node_modules/sigstore/node_modules/tuf-js/dist/utils/tmpfile.js
@@ -0,0 +1,25 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.withTempFile = void 0;
+const promises_1 = __importDefault(require("fs/promises"));
+const os_1 = __importDefault(require("os"));
+const path_1 = __importDefault(require("path"));
+// Invokes the given handler with the path to a temporary file. The file
+// is deleted after the handler returns.
+const withTempFile = async (handler) => withTempDir(async (dir) => handler(path_1.default.join(dir, 'tempfile')));
+exports.withTempFile = withTempFile;
+// Invokes the given handler with a temporary directory. The directory is
+// deleted after the handler returns.
+const withTempDir = async (handler) => {
+    const tmpDir = await promises_1.default.realpath(os_1.default.tmpdir());
+    const dir = await promises_1.default.mkdtemp(tmpDir + path_1.default.sep);
+    try {
+        return await handler(dir);
+    }
+    finally {
+        await promises_1.default.rm(dir, { force: true, recursive: true, maxRetries: 3 });
+    }
+};
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/utils/url.js b/node_modules/sigstore/node_modules/tuf-js/dist/utils/url.js
new file mode 100644
index 0000000000000..ce67fe2c23053
--- /dev/null
+++ b/node_modules/sigstore/node_modules/tuf-js/dist/utils/url.js
@@ -0,0 +1,14 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.join = void 0;
+const url_1 = require("url");
+function join(base, path) {
+    return new url_1.URL(ensureTrailingSlash(base) + removeLeadingSlash(path)).toString();
+}
+exports.join = join;
+function ensureTrailingSlash(path) {
+    return path.endsWith('/') ? path : path + '/';
+}
+function removeLeadingSlash(path) {
+    return path.startsWith('/') ? path.slice(1) : path;
+}
diff --git a/node_modules/sigstore/node_modules/tuf-js/package.json b/node_modules/sigstore/node_modules/tuf-js/package.json
new file mode 100644
index 0000000000000..9187d88083272
--- /dev/null
+++ b/node_modules/sigstore/node_modules/tuf-js/package.json
@@ -0,0 +1,46 @@
+{
+  "name": "tuf-js",
+  "version": "1.1.7",
+  "description": "JavaScript implementation of The Update Framework (TUF)",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "scripts": {
+    "build": "tsc --build",
+    "clean": "rm -rf dist",
+    "test": "jest"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/theupdateframework/tuf-js.git"
+  },
+  "files": [
+    "dist"
+  ],
+  "keywords": [
+    "tuf",
+    "security",
+    "update"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "MIT",
+  "bugs": {
+    "url": "https://github.com/theupdateframework/tuf-js/issues"
+  },
+  "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/client#readme",
+  "devDependencies": {
+    "@tufjs/repo-mock": "1.3.1",
+    "@types/debug": "^4.1.8",
+    "@types/make-fetch-happen": "^10.0.1",
+    "@types/node": "^20.2.5",
+    "nock": "^13.3.1",
+    "typescript": "^5.1.3"
+  },
+  "dependencies": {
+    "@tufjs/models": "1.0.4",
+    "debug": "^4.3.4",
+    "make-fetch-happen": "^11.1.1"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  }
+}
diff --git a/node_modules/tuf-js/dist/config.js b/node_modules/tuf-js/dist/config.js
index c2d970e256244..bafb33a8a1bf7 100644
--- a/node_modules/tuf-js/dist/config.js
+++ b/node_modules/tuf-js/dist/config.js
@@ -10,5 +10,6 @@ exports.defaultConfig = {
     targetsMaxLength: 5000000,
     prefixTargetsWithHash: true,
     fetchTimeout: 100000,
-    fetchRetries: 2,
+    fetchRetries: undefined,
+    fetchRetry: 2,
 };
diff --git a/node_modules/tuf-js/dist/fetcher.js b/node_modules/tuf-js/dist/fetcher.js
index d3dcf53eeb869..f966ce1bb0cdc 100644
--- a/node_modules/tuf-js/dist/fetcher.js
+++ b/node_modules/tuf-js/dist/fetcher.js
@@ -57,13 +57,13 @@ class DefaultFetcher extends BaseFetcher {
     constructor(options = {}) {
         super();
         this.timeout = options.timeout;
-        this.retries = options.retries;
+        this.retry = options.retry;
     }
     async fetch(url) {
         log('GET %s', url);
         const response = await (0, make_fetch_happen_1.default)(url, {
             timeout: this.timeout,
-            retry: this.retries,
+            retry: this.retry,
         });
         if (!response.ok || !response?.body) {
             throw new error_1.DownloadHTTPError('Failed to download', response.status);
diff --git a/node_modules/tuf-js/dist/updater.js b/node_modules/tuf-js/dist/updater.js
index 2aba48d24affd..2d0c769c7af64 100644
--- a/node_modules/tuf-js/dist/updater.js
+++ b/node_modules/tuf-js/dist/updater.js
@@ -51,7 +51,7 @@ class Updater {
             fetcher ||
                 new fetcher_1.DefaultFetcher({
                     timeout: this.config.fetchTimeout,
-                    retries: this.config.fetchRetries,
+                    retry: this.config.fetchRetries ?? this.config.fetchRetry,
                 });
     }
     // refresh and load the metadata before downloading the target
@@ -306,7 +306,7 @@ class Updater {
         const filePath = encodeURIComponent(targetInfo.path);
         return path.join(this.targetDir, filePath);
     }
-    async persistMetadata(metaDataName, bytesData) {
+    persistMetadata(metaDataName, bytesData) {
         try {
             const filePath = path.join(this.dir, `${metaDataName}.json`);
             log('WRITE %s', filePath);
diff --git a/node_modules/tuf-js/node_modules/cacache/LICENSE.md b/node_modules/tuf-js/node_modules/cacache/LICENSE.md
deleted file mode 100644
index 8d28acf866d93..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/path.js b/node_modules/tuf-js/node_modules/cacache/lib/content/path.js
deleted file mode 100644
index ad5a76a4f73f2..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/lib/content/path.js
+++ /dev/null
@@ -1,29 +0,0 @@
-'use strict'
-
-const contentVer = require('../../package.json')['cache-version'].content
-const hashToSegments = require('../util/hash-to-segments')
-const path = require('path')
-const ssri = require('ssri')
-
-// Current format of content file path:
-//
-// sha512-BaSE64Hex= ->
-// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
-//
-module.exports = contentPath
-
-function contentPath (cache, integrity) {
-  const sri = ssri.parse(integrity, { single: true })
-  // contentPath is the *strongest* algo given
-  return path.join(
-    contentDir(cache),
-    sri.algorithm,
-    ...hashToSegments(sri.hexDigest())
-  )
-}
-
-module.exports.contentDir = contentDir
-
-function contentDir (cache) {
-  return path.join(cache, `content-v${contentVer}`)
-}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/read.js b/node_modules/tuf-js/node_modules/cacache/lib/content/read.js
deleted file mode 100644
index f41b539df65dc..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/lib/content/read.js
+++ /dev/null
@@ -1,166 +0,0 @@
-'use strict'
-
-const fs = require('fs/promises')
-const fsm = require('fs-minipass')
-const ssri = require('ssri')
-const contentPath = require('./path')
-const Pipeline = require('minipass-pipeline')
-
-module.exports = read
-
-const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
-async function read (cache, integrity, opts = {}) {
-  const { size } = opts
-  const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
-    // get size
-    const stat = await fs.stat(cpath)
-    return { stat, cpath, sri }
-  })
-  if (typeof size === 'number' && stat.size !== size) {
-    throw sizeError(size, stat.size)
-  }
-
-  if (stat.size > MAX_SINGLE_READ_SIZE) {
-    return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
-  }
-
-  const data = await fs.readFile(cpath, { encoding: null })
-  if (!ssri.checkData(data, sri)) {
-    throw integrityError(sri, cpath)
-  }
-
-  return data
-}
-
-const readPipeline = (cpath, size, sri, stream) => {
-  stream.push(
-    new fsm.ReadStream(cpath, {
-      size,
-      readSize: MAX_SINGLE_READ_SIZE,
-    }),
-    ssri.integrityStream({
-      integrity: sri,
-      size,
-    })
-  )
-  return stream
-}
-
-module.exports.stream = readStream
-module.exports.readStream = readStream
-
-function readStream (cache, integrity, opts = {}) {
-  const { size } = opts
-  const stream = new Pipeline()
-  // Set all this up to run on the stream and then just return the stream
-  Promise.resolve().then(async () => {
-    const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
-      // just stat to ensure it exists
-      const stat = await fs.stat(cpath)
-      return { stat, cpath, sri }
-    })
-    if (typeof size === 'number' && size !== stat.size) {
-      return stream.emit('error', sizeError(size, stat.size))
-    }
-
-    return readPipeline(cpath, stat.size, sri, stream)
-  }).catch(err => stream.emit('error', err))
-
-  return stream
-}
-
-module.exports.copy = copy
-
-function copy (cache, integrity, dest) {
-  return withContentSri(cache, integrity, (cpath, sri) => {
-    return fs.copyFile(cpath, dest)
-  })
-}
-
-module.exports.hasContent = hasContent
-
-async function hasContent (cache, integrity) {
-  if (!integrity) {
-    return false
-  }
-
-  try {
-    return await withContentSri(cache, integrity, async (cpath, sri) => {
-      const stat = await fs.stat(cpath)
-      return { size: stat.size, sri, stat }
-    })
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return false
-    }
-
-    if (err.code === 'EPERM') {
-      /* istanbul ignore else */
-      if (process.platform !== 'win32') {
-        throw err
-      } else {
-        return false
-      }
-    }
-  }
-}
-
-async function withContentSri (cache, integrity, fn) {
-  const sri = ssri.parse(integrity)
-  // If `integrity` has multiple entries, pick the first digest
-  // with available local data.
-  const algo = sri.pickAlgorithm()
-  const digests = sri[algo]
-
-  if (digests.length <= 1) {
-    const cpath = contentPath(cache, digests[0])
-    return fn(cpath, digests[0])
-  } else {
-    // Can't use race here because a generic error can happen before
-    // a ENOENT error, and can happen before a valid result
-    const results = await Promise.all(digests.map(async (meta) => {
-      try {
-        return await withContentSri(cache, meta, fn)
-      } catch (err) {
-        if (err.code === 'ENOENT') {
-          return Object.assign(
-            new Error('No matching content found for ' + sri.toString()),
-            { code: 'ENOENT' }
-          )
-        }
-        return err
-      }
-    }))
-    // Return the first non error if it is found
-    const result = results.find((r) => !(r instanceof Error))
-    if (result) {
-      return result
-    }
-
-    // Throw the No matching content found error
-    const enoentError = results.find((r) => r.code === 'ENOENT')
-    if (enoentError) {
-      throw enoentError
-    }
-
-    // Throw generic error
-    throw results.find((r) => r instanceof Error)
-  }
-}
-
-function sizeError (expected, found) {
-  /* eslint-disable-next-line max-len */
-  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
-  err.expected = expected
-  err.found = found
-  err.code = 'EBADSIZE'
-  return err
-}
-
-function integrityError (sri, path) {
-  const err = new Error(`Integrity verification failed for ${sri} (${path})`)
-  err.code = 'EINTEGRITY'
-  err.sri = sri
-  err.path = path
-  return err
-}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js b/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js
deleted file mode 100644
index ce58d679e4cb2..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js
+++ /dev/null
@@ -1,18 +0,0 @@
-'use strict'
-
-const fs = require('fs/promises')
-const contentPath = require('./path')
-const { hasContent } = require('./read')
-
-module.exports = rm
-
-async function rm (cache, integrity) {
-  const content = await hasContent(cache, integrity)
-  // ~pretty~ sure we can't end up with a content lacking sri, but be safe
-  if (content && content.sri) {
-    await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
-    return true
-  } else {
-    return false
-  }
-}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/write.js b/node_modules/tuf-js/node_modules/cacache/lib/content/write.js
deleted file mode 100644
index 7146146581287..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/lib/content/write.js
+++ /dev/null
@@ -1,205 +0,0 @@
-'use strict'
-
-const events = require('events')
-
-const contentPath = require('./path')
-const fs = require('fs/promises')
-const { moveFile } = require('@npmcli/fs')
-const { Minipass } = require('minipass')
-const Pipeline = require('minipass-pipeline')
-const Flush = require('minipass-flush')
-const path = require('path')
-const ssri = require('ssri')
-const uniqueFilename = require('unique-filename')
-const fsm = require('fs-minipass')
-
-module.exports = write
-
-// Cache of move operations in process so we don't duplicate
-const moveOperations = new Map()
-
-async function write (cache, data, opts = {}) {
-  const { algorithms, size, integrity } = opts
-
-  if (typeof size === 'number' && data.length !== size) {
-    throw sizeError(size, data.length)
-  }
-
-  const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
-  if (integrity && !ssri.checkData(data, integrity, opts)) {
-    throw checksumError(integrity, sri)
-  }
-
-  for (const algo in sri) {
-    const tmp = await makeTmp(cache, opts)
-    const hash = sri[algo].toString()
-    try {
-      await fs.writeFile(tmp.target, data, { flag: 'wx' })
-      await moveToDestination(tmp, cache, hash, opts)
-    } finally {
-      if (!tmp.moved) {
-        await fs.rm(tmp.target, { recursive: true, force: true })
-      }
-    }
-  }
-  return { integrity: sri, size: data.length }
-}
-
-module.exports.stream = writeStream
-
-// writes proxied to the 'inputStream' that is passed to the Promise
-// 'end' is deferred until content is handled.
-class CacacheWriteStream extends Flush {
-  constructor (cache, opts) {
-    super()
-    this.opts = opts
-    this.cache = cache
-    this.inputStream = new Minipass()
-    this.inputStream.on('error', er => this.emit('error', er))
-    this.inputStream.on('drain', () => this.emit('drain'))
-    this.handleContentP = null
-  }
-
-  write (chunk, encoding, cb) {
-    if (!this.handleContentP) {
-      this.handleContentP = handleContent(
-        this.inputStream,
-        this.cache,
-        this.opts
-      )
-    }
-    return this.inputStream.write(chunk, encoding, cb)
-  }
-
-  flush (cb) {
-    this.inputStream.end(() => {
-      if (!this.handleContentP) {
-        const e = new Error('Cache input stream was empty')
-        e.code = 'ENODATA'
-        // empty streams are probably emitting end right away.
-        // defer this one tick by rejecting a promise on it.
-        return Promise.reject(e).catch(cb)
-      }
-      // eslint-disable-next-line promise/catch-or-return
-      this.handleContentP.then(
-        (res) => {
-          res.integrity && this.emit('integrity', res.integrity)
-          // eslint-disable-next-line promise/always-return
-          res.size !== null && this.emit('size', res.size)
-          cb()
-        },
-        (er) => cb(er)
-      )
-    })
-  }
-}
-
-function writeStream (cache, opts = {}) {
-  return new CacacheWriteStream(cache, opts)
-}
-
-async function handleContent (inputStream, cache, opts) {
-  const tmp = await makeTmp(cache, opts)
-  try {
-    const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
-    await moveToDestination(
-      tmp,
-      cache,
-      res.integrity,
-      opts
-    )
-    return res
-  } finally {
-    if (!tmp.moved) {
-      await fs.rm(tmp.target, { recursive: true, force: true })
-    }
-  }
-}
-
-async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
-  const outStream = new fsm.WriteStream(tmpTarget, {
-    flags: 'wx',
-  })
-
-  if (opts.integrityEmitter) {
-    // we need to create these all simultaneously since they can fire in any order
-    const [integrity, size] = await Promise.all([
-      events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
-      events.once(opts.integrityEmitter, 'size').then(res => res[0]),
-      new Pipeline(inputStream, outStream).promise(),
-    ])
-    return { integrity, size }
-  }
-
-  let integrity
-  let size
-  const hashStream = ssri.integrityStream({
-    integrity: opts.integrity,
-    algorithms: opts.algorithms,
-    size: opts.size,
-  })
-  hashStream.on('integrity', i => {
-    integrity = i
-  })
-  hashStream.on('size', s => {
-    size = s
-  })
-
-  const pipeline = new Pipeline(inputStream, hashStream, outStream)
-  await pipeline.promise()
-  return { integrity, size }
-}
-
-async function makeTmp (cache, opts) {
-  const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
-  await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
-  return {
-    target: tmpTarget,
-    moved: false,
-  }
-}
-
-async function moveToDestination (tmp, cache, sri, opts) {
-  const destination = contentPath(cache, sri)
-  const destDir = path.dirname(destination)
-  if (moveOperations.has(destination)) {
-    return moveOperations.get(destination)
-  }
-  moveOperations.set(
-    destination,
-    fs.mkdir(destDir, { recursive: true })
-      .then(async () => {
-        await moveFile(tmp.target, destination, { overwrite: false })
-        tmp.moved = true
-        return tmp.moved
-      })
-      .catch(err => {
-        if (!err.message.startsWith('The destination file exists')) {
-          throw Object.assign(err, { code: 'EEXIST' })
-        }
-      }).finally(() => {
-        moveOperations.delete(destination)
-      })
-
-  )
-  return moveOperations.get(destination)
-}
-
-function sizeError (expected, found) {
-  /* eslint-disable-next-line max-len */
-  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
-  err.expected = expected
-  err.found = found
-  err.code = 'EBADSIZE'
-  return err
-}
-
-function checksumError (expected, found) {
-  const err = new Error(`Integrity check failed:
-  Wanted: ${expected}
-   Found: ${found}`)
-  err.code = 'EINTEGRITY'
-  err.expected = expected
-  err.found = found
-  return err
-}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js b/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js
deleted file mode 100644
index 722a37af5ce15..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js
+++ /dev/null
@@ -1,330 +0,0 @@
-'use strict'
-
-const crypto = require('crypto')
-const {
-  appendFile,
-  mkdir,
-  readFile,
-  readdir,
-  rm,
-  writeFile,
-} = require('fs/promises')
-const { Minipass } = require('minipass')
-const path = require('path')
-const ssri = require('ssri')
-const uniqueFilename = require('unique-filename')
-
-const contentPath = require('./content/path')
-const hashToSegments = require('./util/hash-to-segments')
-const indexV = require('../package.json')['cache-version'].index
-const { moveFile } = require('@npmcli/fs')
-
-module.exports.NotFoundError = class NotFoundError extends Error {
-  constructor (cache, key) {
-    super(`No cache entry for ${key} found in ${cache}`)
-    this.code = 'ENOENT'
-    this.cache = cache
-    this.key = key
-  }
-}
-
-module.exports.compact = compact
-
-async function compact (cache, key, matchFn, opts = {}) {
-  const bucket = bucketPath(cache, key)
-  const entries = await bucketEntries(bucket)
-  const newEntries = []
-  // we loop backwards because the bottom-most result is the newest
-  // since we add new entries with appendFile
-  for (let i = entries.length - 1; i >= 0; --i) {
-    const entry = entries[i]
-    // a null integrity could mean either a delete was appended
-    // or the user has simply stored an index that does not map
-    // to any content. we determine if the user wants to keep the
-    // null integrity based on the validateEntry function passed in options.
-    // if the integrity is null and no validateEntry is provided, we break
-    // as we consider the null integrity to be a deletion of everything
-    // that came before it.
-    if (entry.integrity === null && !opts.validateEntry) {
-      break
-    }
-
-    // if this entry is valid, and it is either the first entry or
-    // the newEntries array doesn't already include an entry that
-    // matches this one based on the provided matchFn, then we add
-    // it to the beginning of our list
-    if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
-      (newEntries.length === 0 ||
-        !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
-      newEntries.unshift(entry)
-    }
-  }
-
-  const newIndex = '\n' + newEntries.map((entry) => {
-    const stringified = JSON.stringify(entry)
-    const hash = hashEntry(stringified)
-    return `${hash}\t${stringified}`
-  }).join('\n')
-
-  const setup = async () => {
-    const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
-    await mkdir(path.dirname(target), { recursive: true })
-    return {
-      target,
-      moved: false,
-    }
-  }
-
-  const teardown = async (tmp) => {
-    if (!tmp.moved) {
-      return rm(tmp.target, { recursive: true, force: true })
-    }
-  }
-
-  const write = async (tmp) => {
-    await writeFile(tmp.target, newIndex, { flag: 'wx' })
-    await mkdir(path.dirname(bucket), { recursive: true })
-    // we use @npmcli/move-file directly here because we
-    // want to overwrite the existing file
-    await moveFile(tmp.target, bucket)
-    tmp.moved = true
-  }
-
-  // write the file atomically
-  const tmp = await setup()
-  try {
-    await write(tmp)
-  } finally {
-    await teardown(tmp)
-  }
-
-  // we reverse the list we generated such that the newest
-  // entries come first in order to make looping through them easier
-  // the true passed to formatEntry tells it to keep null
-  // integrity values, if they made it this far it's because
-  // validateEntry returned true, and as such we should return it
-  return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
-}
-
-module.exports.insert = insert
-
-async function insert (cache, key, integrity, opts = {}) {
-  const { metadata, size, time } = opts
-  const bucket = bucketPath(cache, key)
-  const entry = {
-    key,
-    integrity: integrity && ssri.stringify(integrity),
-    time: time || Date.now(),
-    size,
-    metadata,
-  }
-  try {
-    await mkdir(path.dirname(bucket), { recursive: true })
-    const stringified = JSON.stringify(entry)
-    // NOTE - Cleverness ahoy!
-    //
-    // This works because it's tremendously unlikely for an entry to corrupt
-    // another while still preserving the string length of the JSON in
-    // question. So, we just slap the length in there and verify it on read.
-    //
-    // Thanks to @isaacs for the whiteboarding session that ended up with
-    // this.
-    await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return undefined
-    }
-
-    throw err
-  }
-  return formatEntry(cache, entry)
-}
-
-module.exports.find = find
-
-async function find (cache, key) {
-  const bucket = bucketPath(cache, key)
-  try {
-    const entries = await bucketEntries(bucket)
-    return entries.reduce((latest, next) => {
-      if (next && next.key === key) {
-        return formatEntry(cache, next)
-      } else {
-        return latest
-      }
-    }, null)
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return null
-    } else {
-      throw err
-    }
-  }
-}
-
-module.exports.delete = del
-
-function del (cache, key, opts = {}) {
-  if (!opts.removeFully) {
-    return insert(cache, key, null, opts)
-  }
-
-  const bucket = bucketPath(cache, key)
-  return rm(bucket, { recursive: true, force: true })
-}
-
-module.exports.lsStream = lsStream
-
-function lsStream (cache) {
-  const indexDir = bucketDir(cache)
-  const stream = new Minipass({ objectMode: true })
-
-  // Set all this up to run on the stream and then just return the stream
-  Promise.resolve().then(async () => {
-    const buckets = await readdirOrEmpty(indexDir)
-    await Promise.all(buckets.map(async (bucket) => {
-      const bucketPath = path.join(indexDir, bucket)
-      const subbuckets = await readdirOrEmpty(bucketPath)
-      await Promise.all(subbuckets.map(async (subbucket) => {
-        const subbucketPath = path.join(bucketPath, subbucket)
-
-        // "/cachename//./*"
-        const subbucketEntries = await readdirOrEmpty(subbucketPath)
-        await Promise.all(subbucketEntries.map(async (entry) => {
-          const entryPath = path.join(subbucketPath, entry)
-          try {
-            const entries = await bucketEntries(entryPath)
-            // using a Map here prevents duplicate keys from showing up
-            // twice, I guess?
-            const reduced = entries.reduce((acc, entry) => {
-              acc.set(entry.key, entry)
-              return acc
-            }, new Map())
-            // reduced is a map of key => entry
-            for (const entry of reduced.values()) {
-              const formatted = formatEntry(cache, entry)
-              if (formatted) {
-                stream.write(formatted)
-              }
-            }
-          } catch (err) {
-            if (err.code === 'ENOENT') {
-              return undefined
-            }
-            throw err
-          }
-        }))
-      }))
-    }))
-    stream.end()
-    return stream
-  }).catch(err => stream.emit('error', err))
-
-  return stream
-}
-
-module.exports.ls = ls
-
-async function ls (cache) {
-  const entries = await lsStream(cache).collect()
-  return entries.reduce((acc, xs) => {
-    acc[xs.key] = xs
-    return acc
-  }, {})
-}
-
-module.exports.bucketEntries = bucketEntries
-
-async function bucketEntries (bucket, filter) {
-  const data = await readFile(bucket, 'utf8')
-  return _bucketEntries(data, filter)
-}
-
-function _bucketEntries (data, filter) {
-  const entries = []
-  data.split('\n').forEach((entry) => {
-    if (!entry) {
-      return
-    }
-
-    const pieces = entry.split('\t')
-    if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
-      // Hash is no good! Corruption or malice? Doesn't matter!
-      // EJECT EJECT
-      return
-    }
-    let obj
-    try {
-      obj = JSON.parse(pieces[1])
-    } catch (_) {
-      // eslint-ignore-next-line no-empty-block
-    }
-    // coverage disabled here, no need to test with an entry that parses to something falsey
-    // istanbul ignore else
-    if (obj) {
-      entries.push(obj)
-    }
-  })
-  return entries
-}
-
-module.exports.bucketDir = bucketDir
-
-function bucketDir (cache) {
-  return path.join(cache, `index-v${indexV}`)
-}
-
-module.exports.bucketPath = bucketPath
-
-function bucketPath (cache, key) {
-  const hashed = hashKey(key)
-  return path.join.apply(
-    path,
-    [bucketDir(cache)].concat(hashToSegments(hashed))
-  )
-}
-
-module.exports.hashKey = hashKey
-
-function hashKey (key) {
-  return hash(key, 'sha256')
-}
-
-module.exports.hashEntry = hashEntry
-
-function hashEntry (str) {
-  return hash(str, 'sha1')
-}
-
-function hash (str, digest) {
-  return crypto
-    .createHash(digest)
-    .update(str)
-    .digest('hex')
-}
-
-function formatEntry (cache, entry, keepAll) {
-  // Treat null digests as deletions. They'll shadow any previous entries.
-  if (!entry.integrity && !keepAll) {
-    return null
-  }
-
-  return {
-    key: entry.key,
-    integrity: entry.integrity,
-    path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
-    size: entry.size,
-    time: entry.time,
-    metadata: entry.metadata,
-  }
-}
-
-function readdirOrEmpty (dir) {
-  return readdir(dir).catch((err) => {
-    if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
-      return []
-    }
-
-    throw err
-  })
-}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/get.js b/node_modules/tuf-js/node_modules/cacache/lib/get.js
deleted file mode 100644
index 80ec206c7ecaa..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/lib/get.js
+++ /dev/null
@@ -1,170 +0,0 @@
-'use strict'
-
-const Collect = require('minipass-collect')
-const { Minipass } = require('minipass')
-const Pipeline = require('minipass-pipeline')
-
-const index = require('./entry-index')
-const memo = require('./memoization')
-const read = require('./content/read')
-
-async function getData (cache, key, opts = {}) {
-  const { integrity, memoize, size } = opts
-  const memoized = memo.get(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return {
-      metadata: memoized.entry.metadata,
-      data: memoized.data,
-      integrity: memoized.entry.integrity,
-      size: memoized.entry.size,
-    }
-  }
-
-  const entry = await index.find(cache, key, opts)
-  if (!entry) {
-    throw new index.NotFoundError(cache, key)
-  }
-  const data = await read(cache, entry.integrity, { integrity, size })
-  if (memoize) {
-    memo.put(cache, entry, data, opts)
-  }
-
-  return {
-    data,
-    metadata: entry.metadata,
-    size: entry.size,
-    integrity: entry.integrity,
-  }
-}
-module.exports = getData
-
-async function getDataByDigest (cache, key, opts = {}) {
-  const { integrity, memoize, size } = opts
-  const memoized = memo.get.byDigest(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return memoized
-  }
-
-  const res = await read(cache, key, { integrity, size })
-  if (memoize) {
-    memo.put.byDigest(cache, key, res, opts)
-  }
-  return res
-}
-module.exports.byDigest = getDataByDigest
-
-const getMemoizedStream = (memoized) => {
-  const stream = new Minipass()
-  stream.on('newListener', function (ev, cb) {
-    ev === 'metadata' && cb(memoized.entry.metadata)
-    ev === 'integrity' && cb(memoized.entry.integrity)
-    ev === 'size' && cb(memoized.entry.size)
-  })
-  stream.end(memoized.data)
-  return stream
-}
-
-function getStream (cache, key, opts = {}) {
-  const { memoize, size } = opts
-  const memoized = memo.get(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return getMemoizedStream(memoized)
-  }
-
-  const stream = new Pipeline()
-  // Set all this up to run on the stream and then just return the stream
-  Promise.resolve().then(async () => {
-    const entry = await index.find(cache, key)
-    if (!entry) {
-      throw new index.NotFoundError(cache, key)
-    }
-
-    stream.emit('metadata', entry.metadata)
-    stream.emit('integrity', entry.integrity)
-    stream.emit('size', entry.size)
-    stream.on('newListener', function (ev, cb) {
-      ev === 'metadata' && cb(entry.metadata)
-      ev === 'integrity' && cb(entry.integrity)
-      ev === 'size' && cb(entry.size)
-    })
-
-    const src = read.readStream(
-      cache,
-      entry.integrity,
-      { ...opts, size: typeof size !== 'number' ? entry.size : size }
-    )
-
-    if (memoize) {
-      const memoStream = new Collect.PassThrough()
-      memoStream.on('collect', data => memo.put(cache, entry, data, opts))
-      stream.unshift(memoStream)
-    }
-    stream.unshift(src)
-    return stream
-  }).catch((err) => stream.emit('error', err))
-
-  return stream
-}
-
-module.exports.stream = getStream
-
-function getStreamDigest (cache, integrity, opts = {}) {
-  const { memoize } = opts
-  const memoized = memo.get.byDigest(cache, integrity, opts)
-  if (memoized && memoize !== false) {
-    const stream = new Minipass()
-    stream.end(memoized)
-    return stream
-  } else {
-    const stream = read.readStream(cache, integrity, opts)
-    if (!memoize) {
-      return stream
-    }
-
-    const memoStream = new Collect.PassThrough()
-    memoStream.on('collect', data => memo.put.byDigest(
-      cache,
-      integrity,
-      data,
-      opts
-    ))
-    return new Pipeline(stream, memoStream)
-  }
-}
-
-module.exports.stream.byDigest = getStreamDigest
-
-function info (cache, key, opts = {}) {
-  const { memoize } = opts
-  const memoized = memo.get(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return Promise.resolve(memoized.entry)
-  } else {
-    return index.find(cache, key)
-  }
-}
-module.exports.info = info
-
-async function copy (cache, key, dest, opts = {}) {
-  const entry = await index.find(cache, key, opts)
-  if (!entry) {
-    throw new index.NotFoundError(cache, key)
-  }
-  await read.copy(cache, entry.integrity, dest, opts)
-  return {
-    metadata: entry.metadata,
-    size: entry.size,
-    integrity: entry.integrity,
-  }
-}
-
-module.exports.copy = copy
-
-async function copyByDigest (cache, key, dest, opts = {}) {
-  await read.copy(cache, key, dest, opts)
-  return key
-}
-
-module.exports.copy.byDigest = copyByDigest
-
-module.exports.hasContent = read.hasContent
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/index.js b/node_modules/tuf-js/node_modules/cacache/lib/index.js
deleted file mode 100644
index c9b0da5f3a271..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/lib/index.js
+++ /dev/null
@@ -1,42 +0,0 @@
-'use strict'
-
-const get = require('./get.js')
-const put = require('./put.js')
-const rm = require('./rm.js')
-const verify = require('./verify.js')
-const { clearMemoized } = require('./memoization.js')
-const tmp = require('./util/tmp.js')
-const index = require('./entry-index.js')
-
-module.exports.index = {}
-module.exports.index.compact = index.compact
-module.exports.index.insert = index.insert
-
-module.exports.ls = index.ls
-module.exports.ls.stream = index.lsStream
-
-module.exports.get = get
-module.exports.get.byDigest = get.byDigest
-module.exports.get.stream = get.stream
-module.exports.get.stream.byDigest = get.stream.byDigest
-module.exports.get.copy = get.copy
-module.exports.get.copy.byDigest = get.copy.byDigest
-module.exports.get.info = get.info
-module.exports.get.hasContent = get.hasContent
-
-module.exports.put = put
-module.exports.put.stream = put.stream
-
-module.exports.rm = rm.entry
-module.exports.rm.all = rm.all
-module.exports.rm.entry = module.exports.rm
-module.exports.rm.content = rm.content
-
-module.exports.clearMemoized = clearMemoized
-
-module.exports.tmp = {}
-module.exports.tmp.mkdir = tmp.mkdir
-module.exports.tmp.withTmp = tmp.withTmp
-
-module.exports.verify = verify
-module.exports.verify.lastRun = verify.lastRun
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/memoization.js b/node_modules/tuf-js/node_modules/cacache/lib/memoization.js
deleted file mode 100644
index 0ff604a479c9c..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/lib/memoization.js
+++ /dev/null
@@ -1,72 +0,0 @@
-'use strict'
-
-const LRU = require('lru-cache')
-
-const MEMOIZED = new LRU({
-  max: 500,
-  maxSize: 50 * 1024 * 1024, // 50MB
-  ttl: 3 * 60 * 1000, // 3 minutes
-  sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
-})
-
-module.exports.clearMemoized = clearMemoized
-
-function clearMemoized () {
-  const old = {}
-  MEMOIZED.forEach((v, k) => {
-    old[k] = v
-  })
-  MEMOIZED.clear()
-  return old
-}
-
-module.exports.put = put
-
-function put (cache, entry, data, opts) {
-  pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
-  putDigest(cache, entry.integrity, data, opts)
-}
-
-module.exports.put.byDigest = putDigest
-
-function putDigest (cache, integrity, data, opts) {
-  pickMem(opts).set(`digest:${cache}:${integrity}`, data)
-}
-
-module.exports.get = get
-
-function get (cache, key, opts) {
-  return pickMem(opts).get(`key:${cache}:${key}`)
-}
-
-module.exports.get.byDigest = getDigest
-
-function getDigest (cache, integrity, opts) {
-  return pickMem(opts).get(`digest:${cache}:${integrity}`)
-}
-
-class ObjProxy {
-  constructor (obj) {
-    this.obj = obj
-  }
-
-  get (key) {
-    return this.obj[key]
-  }
-
-  set (key, val) {
-    this.obj[key] = val
-  }
-}
-
-function pickMem (opts) {
-  if (!opts || !opts.memoize) {
-    return MEMOIZED
-  } else if (opts.memoize.get && opts.memoize.set) {
-    return opts.memoize
-  } else if (typeof opts.memoize === 'object') {
-    return new ObjProxy(opts.memoize)
-  } else {
-    return MEMOIZED
-  }
-}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/put.js b/node_modules/tuf-js/node_modules/cacache/lib/put.js
deleted file mode 100644
index 9fc932d5f6dec..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/lib/put.js
+++ /dev/null
@@ -1,80 +0,0 @@
-'use strict'
-
-const index = require('./entry-index')
-const memo = require('./memoization')
-const write = require('./content/write')
-const Flush = require('minipass-flush')
-const { PassThrough } = require('minipass-collect')
-const Pipeline = require('minipass-pipeline')
-
-const putOpts = (opts) => ({
-  algorithms: ['sha512'],
-  ...opts,
-})
-
-module.exports = putData
-
-async function putData (cache, key, data, opts = {}) {
-  const { memoize } = opts
-  opts = putOpts(opts)
-  const res = await write(cache, data, opts)
-  const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
-  if (memoize) {
-    memo.put(cache, entry, data, opts)
-  }
-
-  return res.integrity
-}
-
-module.exports.stream = putStream
-
-function putStream (cache, key, opts = {}) {
-  const { memoize } = opts
-  opts = putOpts(opts)
-  let integrity
-  let size
-  let error
-
-  let memoData
-  const pipeline = new Pipeline()
-  // first item in the pipeline is the memoizer, because we need
-  // that to end first and get the collected data.
-  if (memoize) {
-    const memoizer = new PassThrough().on('collect', data => {
-      memoData = data
-    })
-    pipeline.push(memoizer)
-  }
-
-  // contentStream is a write-only, not a passthrough
-  // no data comes out of it.
-  const contentStream = write.stream(cache, opts)
-    .on('integrity', (int) => {
-      integrity = int
-    })
-    .on('size', (s) => {
-      size = s
-    })
-    .on('error', (err) => {
-      error = err
-    })
-
-  pipeline.push(contentStream)
-
-  // last but not least, we write the index and emit hash and size,
-  // and memoize if we're doing that
-  pipeline.push(new Flush({
-    async flush () {
-      if (!error) {
-        const entry = await index.insert(cache, key, integrity, { ...opts, size })
-        if (memoize && memoData) {
-          memo.put(cache, entry, memoData, opts)
-        }
-        pipeline.emit('integrity', integrity)
-        pipeline.emit('size', size)
-      }
-    },
-  }))
-
-  return pipeline
-}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/rm.js b/node_modules/tuf-js/node_modules/cacache/lib/rm.js
deleted file mode 100644
index a94760c7cf243..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/lib/rm.js
+++ /dev/null
@@ -1,31 +0,0 @@
-'use strict'
-
-const { rm } = require('fs/promises')
-const glob = require('./util/glob.js')
-const index = require('./entry-index')
-const memo = require('./memoization')
-const path = require('path')
-const rmContent = require('./content/rm')
-
-module.exports = entry
-module.exports.entry = entry
-
-function entry (cache, key, opts) {
-  memo.clearMemoized()
-  return index.delete(cache, key, opts)
-}
-
-module.exports.content = content
-
-function content (cache, integrity) {
-  memo.clearMemoized()
-  return rmContent(cache, integrity)
-}
-
-module.exports.all = all
-
-async function all (cache) {
-  memo.clearMemoized()
-  const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
-  return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
-}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js b/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js
deleted file mode 100644
index 8500c1c16a429..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js
+++ /dev/null
@@ -1,7 +0,0 @@
-'use strict'
-
-const { glob } = require('glob')
-const path = require('path')
-
-const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep)
-module.exports = (path, options) => glob(globify(path), options)
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js
deleted file mode 100644
index 445599b503808..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js
+++ /dev/null
@@ -1,7 +0,0 @@
-'use strict'
-
-module.exports = hashToSegments
-
-function hashToSegments (hash) {
-  return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
-}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js b/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js
deleted file mode 100644
index 0bf5302136ebe..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js
+++ /dev/null
@@ -1,26 +0,0 @@
-'use strict'
-
-const { withTempDir } = require('@npmcli/fs')
-const fs = require('fs/promises')
-const path = require('path')
-
-module.exports.mkdir = mktmpdir
-
-async function mktmpdir (cache, opts = {}) {
-  const { tmpPrefix } = opts
-  const tmpDir = path.join(cache, 'tmp')
-  await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
-  // do not use path.join(), it drops the trailing / if tmpPrefix is unset
-  const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
-  return fs.mkdtemp(target, { owner: 'inherit' })
-}
-
-module.exports.withTmp = withTmp
-
-function withTmp (cache, opts, cb) {
-  if (!cb) {
-    cb = opts
-    opts = {}
-  }
-  return withTempDir(path.join(cache, 'tmp'), cb, opts)
-}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/verify.js b/node_modules/tuf-js/node_modules/cacache/lib/verify.js
deleted file mode 100644
index 62e85c946490f..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/lib/verify.js
+++ /dev/null
@@ -1,257 +0,0 @@
-'use strict'
-
-const {
-  mkdir,
-  readFile,
-  rm,
-  stat,
-  truncate,
-  writeFile,
-} = require('fs/promises')
-const pMap = require('p-map')
-const contentPath = require('./content/path')
-const fsm = require('fs-minipass')
-const glob = require('./util/glob.js')
-const index = require('./entry-index')
-const path = require('path')
-const ssri = require('ssri')
-
-const hasOwnProperty = (obj, key) =>
-  Object.prototype.hasOwnProperty.call(obj, key)
-
-const verifyOpts = (opts) => ({
-  concurrency: 20,
-  log: { silly () {} },
-  ...opts,
-})
-
-module.exports = verify
-
-async function verify (cache, opts) {
-  opts = verifyOpts(opts)
-  opts.log.silly('verify', 'verifying cache at', cache)
-
-  const steps = [
-    markStartTime,
-    fixPerms,
-    garbageCollect,
-    rebuildIndex,
-    cleanTmp,
-    writeVerifile,
-    markEndTime,
-  ]
-
-  const stats = {}
-  for (const step of steps) {
-    const label = step.name
-    const start = new Date()
-    const s = await step(cache, opts)
-    if (s) {
-      Object.keys(s).forEach((k) => {
-        stats[k] = s[k]
-      })
-    }
-    const end = new Date()
-    if (!stats.runTime) {
-      stats.runTime = {}
-    }
-    stats.runTime[label] = end - start
-  }
-  stats.runTime.total = stats.endTime - stats.startTime
-  opts.log.silly(
-    'verify',
-    'verification finished for',
-    cache,
-    'in',
-    `${stats.runTime.total}ms`
-  )
-  return stats
-}
-
-async function markStartTime (cache, opts) {
-  return { startTime: new Date() }
-}
-
-async function markEndTime (cache, opts) {
-  return { endTime: new Date() }
-}
-
-async function fixPerms (cache, opts) {
-  opts.log.silly('verify', 'fixing cache permissions')
-  await mkdir(cache, { recursive: true })
-  return null
-}
-
-// Implements a naive mark-and-sweep tracing garbage collector.
-//
-// The algorithm is basically as follows:
-// 1. Read (and filter) all index entries ("pointers")
-// 2. Mark each integrity value as "live"
-// 3. Read entire filesystem tree in `content-vX/` dir
-// 4. If content is live, verify its checksum and delete it if it fails
-// 5. If content is not marked as live, rm it.
-//
-async function garbageCollect (cache, opts) {
-  opts.log.silly('verify', 'garbage collecting content')
-  const indexStream = index.lsStream(cache)
-  const liveContent = new Set()
-  indexStream.on('data', (entry) => {
-    if (opts.filter && !opts.filter(entry)) {
-      return
-    }
-
-    // integrity is stringified, re-parse it so we can get each hash
-    const integrity = ssri.parse(entry.integrity)
-    for (const algo in integrity) {
-      liveContent.add(integrity[algo].toString())
-    }
-  })
-  await new Promise((resolve, reject) => {
-    indexStream.on('end', resolve).on('error', reject)
-  })
-  const contentDir = contentPath.contentDir(cache)
-  const files = await glob(path.join(contentDir, '**'), {
-    follow: false,
-    nodir: true,
-    nosort: true,
-  })
-  const stats = {
-    verifiedContent: 0,
-    reclaimedCount: 0,
-    reclaimedSize: 0,
-    badContentCount: 0,
-    keptSize: 0,
-  }
-  await pMap(
-    files,
-    async (f) => {
-      const split = f.split(/[/\\]/)
-      const digest = split.slice(split.length - 3).join('')
-      const algo = split[split.length - 4]
-      const integrity = ssri.fromHex(digest, algo)
-      if (liveContent.has(integrity.toString())) {
-        const info = await verifyContent(f, integrity)
-        if (!info.valid) {
-          stats.reclaimedCount++
-          stats.badContentCount++
-          stats.reclaimedSize += info.size
-        } else {
-          stats.verifiedContent++
-          stats.keptSize += info.size
-        }
-      } else {
-        // No entries refer to this content. We can delete.
-        stats.reclaimedCount++
-        const s = await stat(f)
-        await rm(f, { recursive: true, force: true })
-        stats.reclaimedSize += s.size
-      }
-      return stats
-    },
-    { concurrency: opts.concurrency }
-  )
-  return stats
-}
-
-async function verifyContent (filepath, sri) {
-  const contentInfo = {}
-  try {
-    const { size } = await stat(filepath)
-    contentInfo.size = size
-    contentInfo.valid = true
-    await ssri.checkStream(new fsm.ReadStream(filepath), sri)
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return { size: 0, valid: false }
-    }
-    if (err.code !== 'EINTEGRITY') {
-      throw err
-    }
-
-    await rm(filepath, { recursive: true, force: true })
-    contentInfo.valid = false
-  }
-  return contentInfo
-}
-
-async function rebuildIndex (cache, opts) {
-  opts.log.silly('verify', 'rebuilding index')
-  const entries = await index.ls(cache)
-  const stats = {
-    missingContent: 0,
-    rejectedEntries: 0,
-    totalEntries: 0,
-  }
-  const buckets = {}
-  for (const k in entries) {
-    /* istanbul ignore else */
-    if (hasOwnProperty(entries, k)) {
-      const hashed = index.hashKey(k)
-      const entry = entries[k]
-      const excluded = opts.filter && !opts.filter(entry)
-      excluded && stats.rejectedEntries++
-      if (buckets[hashed] && !excluded) {
-        buckets[hashed].push(entry)
-      } else if (buckets[hashed] && excluded) {
-        // skip
-      } else if (excluded) {
-        buckets[hashed] = []
-        buckets[hashed]._path = index.bucketPath(cache, k)
-      } else {
-        buckets[hashed] = [entry]
-        buckets[hashed]._path = index.bucketPath(cache, k)
-      }
-    }
-  }
-  await pMap(
-    Object.keys(buckets),
-    (key) => {
-      return rebuildBucket(cache, buckets[key], stats, opts)
-    },
-    { concurrency: opts.concurrency }
-  )
-  return stats
-}
-
-async function rebuildBucket (cache, bucket, stats, opts) {
-  await truncate(bucket._path)
-  // This needs to be serialized because cacache explicitly
-  // lets very racy bucket conflicts clobber each other.
-  for (const entry of bucket) {
-    const content = contentPath(cache, entry.integrity)
-    try {
-      await stat(content)
-      await index.insert(cache, entry.key, entry.integrity, {
-        metadata: entry.metadata,
-        size: entry.size,
-        time: entry.time,
-      })
-      stats.totalEntries++
-    } catch (err) {
-      if (err.code === 'ENOENT') {
-        stats.rejectedEntries++
-        stats.missingContent++
-      } else {
-        throw err
-      }
-    }
-  }
-}
-
-function cleanTmp (cache, opts) {
-  opts.log.silly('verify', 'cleaning tmp directory')
-  return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
-}
-
-async function writeVerifile (cache, opts) {
-  const verifile = path.join(cache, '_lastverified')
-  opts.log.silly('verify', 'writing verifile to ' + verifile)
-  return writeFile(verifile, `${Date.now()}`)
-}
-
-module.exports.lastRun = lastRun
-
-async function lastRun (cache) {
-  const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
-  return new Date(+data)
-}
diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/LICENSE b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/LICENSE
deleted file mode 100644
index 97f8e32ed82e4..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/index.js b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
deleted file mode 100644
index b6cdae8eb514b..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
+++ /dev/null
@@ -1,1028 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0;
-const proc = typeof process === 'object' && process
-    ? process
-    : {
-        stdout: null,
-        stderr: null,
-    };
-const events_1 = require("events");
-const stream_1 = __importDefault(require("stream"));
-const string_decoder_1 = require("string_decoder");
-/**
- * Return true if the argument is a Minipass stream, Node stream, or something
- * else that Minipass can interact with.
- */
-const isStream = (s) => !!s &&
-    typeof s === 'object' &&
-    (s instanceof Minipass ||
-        s instanceof stream_1.default ||
-        (0, exports.isReadable)(s) ||
-        (0, exports.isWritable)(s));
-exports.isStream = isStream;
-/**
- * Return true if the argument is a valid {@link Minipass.Readable}
- */
-const isReadable = (s) => !!s &&
-    typeof s === 'object' &&
-    s instanceof events_1.EventEmitter &&
-    typeof s.pipe === 'function' &&
-    // node core Writable streams have a pipe() method, but it throws
-    s.pipe !== stream_1.default.Writable.prototype.pipe;
-exports.isReadable = isReadable;
-/**
- * Return true if the argument is a valid {@link Minipass.Writable}
- */
-const isWritable = (s) => !!s &&
-    typeof s === 'object' &&
-    s instanceof events_1.EventEmitter &&
-    typeof s.write === 'function' &&
-    typeof s.end === 'function';
-exports.isWritable = isWritable;
-const EOF = Symbol('EOF');
-const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
-const EMITTED_END = Symbol('emittedEnd');
-const EMITTING_END = Symbol('emittingEnd');
-const EMITTED_ERROR = Symbol('emittedError');
-const CLOSED = Symbol('closed');
-const READ = Symbol('read');
-const FLUSH = Symbol('flush');
-const FLUSHCHUNK = Symbol('flushChunk');
-const ENCODING = Symbol('encoding');
-const DECODER = Symbol('decoder');
-const FLOWING = Symbol('flowing');
-const PAUSED = Symbol('paused');
-const RESUME = Symbol('resume');
-const BUFFER = Symbol('buffer');
-const PIPES = Symbol('pipes');
-const BUFFERLENGTH = Symbol('bufferLength');
-const BUFFERPUSH = Symbol('bufferPush');
-const BUFFERSHIFT = Symbol('bufferShift');
-const OBJECTMODE = Symbol('objectMode');
-// internal event when stream is destroyed
-const DESTROYED = Symbol('destroyed');
-// internal event when stream has an error
-const ERROR = Symbol('error');
-const EMITDATA = Symbol('emitData');
-const EMITEND = Symbol('emitEnd');
-const EMITEND2 = Symbol('emitEnd2');
-const ASYNC = Symbol('async');
-const ABORT = Symbol('abort');
-const ABORTED = Symbol('aborted');
-const SIGNAL = Symbol('signal');
-const DATALISTENERS = Symbol('dataListeners');
-const DISCARDED = Symbol('discarded');
-const defer = (fn) => Promise.resolve().then(fn);
-const nodefer = (fn) => fn();
-const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
-const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
-    (!!b &&
-        typeof b === 'object' &&
-        b.constructor &&
-        b.constructor.name === 'ArrayBuffer' &&
-        b.byteLength >= 0);
-const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
-/**
- * Internal class representing a pipe to a destination stream.
- *
- * @internal
- */
-class Pipe {
-    src;
-    dest;
-    opts;
-    ondrain;
-    constructor(src, dest, opts) {
-        this.src = src;
-        this.dest = dest;
-        this.opts = opts;
-        this.ondrain = () => src[RESUME]();
-        this.dest.on('drain', this.ondrain);
-    }
-    unpipe() {
-        this.dest.removeListener('drain', this.ondrain);
-    }
-    // only here for the prototype
-    /* c8 ignore start */
-    proxyErrors(_er) { }
-    /* c8 ignore stop */
-    end() {
-        this.unpipe();
-        if (this.opts.end)
-            this.dest.end();
-    }
-}
-/**
- * Internal class representing a pipe to a destination stream where
- * errors are proxied.
- *
- * @internal
- */
-class PipeProxyErrors extends Pipe {
-    unpipe() {
-        this.src.removeListener('error', this.proxyErrors);
-        super.unpipe();
-    }
-    constructor(src, dest, opts) {
-        super(src, dest, opts);
-        this.proxyErrors = er => dest.emit('error', er);
-        src.on('error', this.proxyErrors);
-    }
-}
-const isObjectModeOptions = (o) => !!o.objectMode;
-const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
-/**
- * Main export, the Minipass class
- *
- * `RType` is the type of data emitted, defaults to Buffer
- *
- * `WType` is the type of data to be written, if RType is buffer or string,
- * then any {@link Minipass.ContiguousData} is allowed.
- *
- * `Events` is the set of event handler signatures that this object
- * will emit, see {@link Minipass.Events}
- */
-class Minipass extends events_1.EventEmitter {
-    [FLOWING] = false;
-    [PAUSED] = false;
-    [PIPES] = [];
-    [BUFFER] = [];
-    [OBJECTMODE];
-    [ENCODING];
-    [ASYNC];
-    [DECODER];
-    [EOF] = false;
-    [EMITTED_END] = false;
-    [EMITTING_END] = false;
-    [CLOSED] = false;
-    [EMITTED_ERROR] = null;
-    [BUFFERLENGTH] = 0;
-    [DESTROYED] = false;
-    [SIGNAL];
-    [ABORTED] = false;
-    [DATALISTENERS] = 0;
-    [DISCARDED] = false;
-    /**
-     * true if the stream can be written
-     */
-    writable = true;
-    /**
-     * true if the stream can be read
-     */
-    readable = true;
-    /**
-     * If `RType` is Buffer, then options do not need to be provided.
-     * Otherwise, an options object must be provided to specify either
-     * {@link Minipass.SharedOptions.objectMode} or
-     * {@link Minipass.SharedOptions.encoding}, as appropriate.
-     */
-    constructor(...args) {
-        const options = (args[0] ||
-            {});
-        super();
-        if (options.objectMode && typeof options.encoding === 'string') {
-            throw new TypeError('Encoding and objectMode may not be used together');
-        }
-        if (isObjectModeOptions(options)) {
-            this[OBJECTMODE] = true;
-            this[ENCODING] = null;
-        }
-        else if (isEncodingOptions(options)) {
-            this[ENCODING] = options.encoding;
-            this[OBJECTMODE] = false;
-        }
-        else {
-            this[OBJECTMODE] = false;
-            this[ENCODING] = null;
-        }
-        this[ASYNC] = !!options.async;
-        this[DECODER] = this[ENCODING]
-            ? new string_decoder_1.StringDecoder(this[ENCODING])
-            : null;
-        //@ts-ignore - private option for debugging and testing
-        if (options && options.debugExposeBuffer === true) {
-            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
-        }
-        //@ts-ignore - private option for debugging and testing
-        if (options && options.debugExposePipes === true) {
-            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
-        }
-        const { signal } = options;
-        if (signal) {
-            this[SIGNAL] = signal;
-            if (signal.aborted) {
-                this[ABORT]();
-            }
-            else {
-                signal.addEventListener('abort', () => this[ABORT]());
-            }
-        }
-    }
-    /**
-     * The amount of data stored in the buffer waiting to be read.
-     *
-     * For Buffer strings, this will be the total byte length.
-     * For string encoding streams, this will be the string character length,
-     * according to JavaScript's `string.length` logic.
-     * For objectMode streams, this is a count of the items waiting to be
-     * emitted.
-     */
-    get bufferLength() {
-        return this[BUFFERLENGTH];
-    }
-    /**
-     * The `BufferEncoding` currently in use, or `null`
-     */
-    get encoding() {
-        return this[ENCODING];
-    }
-    /**
-     * @deprecated - This is a read only property
-     */
-    set encoding(_enc) {
-        throw new Error('Encoding must be set at instantiation time');
-    }
-    /**
-     * @deprecated - Encoding may only be set at instantiation time
-     */
-    setEncoding(_enc) {
-        throw new Error('Encoding must be set at instantiation time');
-    }
-    /**
-     * True if this is an objectMode stream
-     */
-    get objectMode() {
-        return this[OBJECTMODE];
-    }
-    /**
-     * @deprecated - This is a read-only property
-     */
-    set objectMode(_om) {
-        throw new Error('objectMode must be set at instantiation time');
-    }
-    /**
-     * true if this is an async stream
-     */
-    get ['async']() {
-        return this[ASYNC];
-    }
-    /**
-     * Set to true to make this stream async.
-     *
-     * Once set, it cannot be unset, as this would potentially cause incorrect
-     * behavior.  Ie, a sync stream can be made async, but an async stream
-     * cannot be safely made sync.
-     */
-    set ['async'](a) {
-        this[ASYNC] = this[ASYNC] || !!a;
-    }
-    // drop everything and get out of the flow completely
-    [ABORT]() {
-        this[ABORTED] = true;
-        this.emit('abort', this[SIGNAL]?.reason);
-        this.destroy(this[SIGNAL]?.reason);
-    }
-    /**
-     * True if the stream has been aborted.
-     */
-    get aborted() {
-        return this[ABORTED];
-    }
-    /**
-     * No-op setter. Stream aborted status is set via the AbortSignal provided
-     * in the constructor options.
-     */
-    set aborted(_) { }
-    write(chunk, encoding, cb) {
-        if (this[ABORTED])
-            return false;
-        if (this[EOF])
-            throw new Error('write after end');
-        if (this[DESTROYED]) {
-            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
-            return true;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = 'utf8';
-        }
-        if (!encoding)
-            encoding = 'utf8';
-        const fn = this[ASYNC] ? defer : nodefer;
-        // convert array buffers and typed array views into buffers
-        // at some point in the future, we may want to do the opposite!
-        // leave strings and buffers as-is
-        // anything is only allowed if in object mode, so throw
-        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
-            if (isArrayBufferView(chunk)) {
-                //@ts-ignore - sinful unsafe type changing
-                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
-            }
-            else if (isArrayBufferLike(chunk)) {
-                //@ts-ignore - sinful unsafe type changing
-                chunk = Buffer.from(chunk);
-            }
-            else if (typeof chunk !== 'string') {
-                throw new Error('Non-contiguous data written to non-objectMode stream');
-            }
-        }
-        // handle object mode up front, since it's simpler
-        // this yields better performance, fewer checks later.
-        if (this[OBJECTMODE]) {
-            // maybe impossible?
-            /* c8 ignore start */
-            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
-                this[FLUSH](true);
-            /* c8 ignore stop */
-            if (this[FLOWING])
-                this.emit('data', chunk);
-            else
-                this[BUFFERPUSH](chunk);
-            if (this[BUFFERLENGTH] !== 0)
-                this.emit('readable');
-            if (cb)
-                fn(cb);
-            return this[FLOWING];
-        }
-        // at this point the chunk is a buffer or string
-        // don't buffer it up or send it to the decoder
-        if (!chunk.length) {
-            if (this[BUFFERLENGTH] !== 0)
-                this.emit('readable');
-            if (cb)
-                fn(cb);
-            return this[FLOWING];
-        }
-        // fast-path writing strings of same encoding to a stream with
-        // an empty buffer, skipping the buffer/decoder dance
-        if (typeof chunk === 'string' &&
-            // unless it is a string already ready for us to use
-            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
-            //@ts-ignore - sinful unsafe type change
-            chunk = Buffer.from(chunk, encoding);
-        }
-        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
-            //@ts-ignore - sinful unsafe type change
-            chunk = this[DECODER].write(chunk);
-        }
-        // Note: flushing CAN potentially switch us into not-flowing mode
-        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
-            this[FLUSH](true);
-        if (this[FLOWING])
-            this.emit('data', chunk);
-        else
-            this[BUFFERPUSH](chunk);
-        if (this[BUFFERLENGTH] !== 0)
-            this.emit('readable');
-        if (cb)
-            fn(cb);
-        return this[FLOWING];
-    }
-    /**
-     * Low-level explicit read method.
-     *
-     * In objectMode, the argument is ignored, and one item is returned if
-     * available.
-     *
-     * `n` is the number of bytes (or in the case of encoding streams,
-     * characters) to consume. If `n` is not provided, then the entire buffer
-     * is returned, or `null` is returned if no data is available.
-     *
-     * If `n` is greater that the amount of data in the internal buffer,
-     * then `null` is returned.
-     */
-    read(n) {
-        if (this[DESTROYED])
-            return null;
-        this[DISCARDED] = false;
-        if (this[BUFFERLENGTH] === 0 ||
-            n === 0 ||
-            (n && n > this[BUFFERLENGTH])) {
-            this[MAYBE_EMIT_END]();
-            return null;
-        }
-        if (this[OBJECTMODE])
-            n = null;
-        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
-            // not object mode, so if we have an encoding, then RType is string
-            // otherwise, must be Buffer
-            this[BUFFER] = [
-                (this[ENCODING]
-                    ? this[BUFFER].join('')
-                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
-            ];
-        }
-        const ret = this[READ](n || null, this[BUFFER][0]);
-        this[MAYBE_EMIT_END]();
-        return ret;
-    }
-    [READ](n, chunk) {
-        if (this[OBJECTMODE])
-            this[BUFFERSHIFT]();
-        else {
-            const c = chunk;
-            if (n === c.length || n === null)
-                this[BUFFERSHIFT]();
-            else if (typeof c === 'string') {
-                this[BUFFER][0] = c.slice(n);
-                chunk = c.slice(0, n);
-                this[BUFFERLENGTH] -= n;
-            }
-            else {
-                this[BUFFER][0] = c.subarray(n);
-                chunk = c.subarray(0, n);
-                this[BUFFERLENGTH] -= n;
-            }
-        }
-        this.emit('data', chunk);
-        if (!this[BUFFER].length && !this[EOF])
-            this.emit('drain');
-        return chunk;
-    }
-    end(chunk, encoding, cb) {
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = 'utf8';
-        }
-        if (chunk !== undefined)
-            this.write(chunk, encoding);
-        if (cb)
-            this.once('end', cb);
-        this[EOF] = true;
-        this.writable = false;
-        // if we haven't written anything, then go ahead and emit,
-        // even if we're not reading.
-        // we'll re-emit if a new 'end' listener is added anyway.
-        // This makes MP more suitable to write-only use cases.
-        if (this[FLOWING] || !this[PAUSED])
-            this[MAYBE_EMIT_END]();
-        return this;
-    }
-    // don't let the internal resume be overwritten
-    [RESUME]() {
-        if (this[DESTROYED])
-            return;
-        if (!this[DATALISTENERS] && !this[PIPES].length) {
-            this[DISCARDED] = true;
-        }
-        this[PAUSED] = false;
-        this[FLOWING] = true;
-        this.emit('resume');
-        if (this[BUFFER].length)
-            this[FLUSH]();
-        else if (this[EOF])
-            this[MAYBE_EMIT_END]();
-        else
-            this.emit('drain');
-    }
-    /**
-     * Resume the stream if it is currently in a paused state
-     *
-     * If called when there are no pipe destinations or `data` event listeners,
-     * this will place the stream in a "discarded" state, where all data will
-     * be thrown away. The discarded state is removed if a pipe destination or
-     * data handler is added, if pause() is called, or if any synchronous or
-     * asynchronous iteration is started.
-     */
-    resume() {
-        return this[RESUME]();
-    }
-    /**
-     * Pause the stream
-     */
-    pause() {
-        this[FLOWING] = false;
-        this[PAUSED] = true;
-        this[DISCARDED] = false;
-    }
-    /**
-     * true if the stream has been forcibly destroyed
-     */
-    get destroyed() {
-        return this[DESTROYED];
-    }
-    /**
-     * true if the stream is currently in a flowing state, meaning that
-     * any writes will be immediately emitted.
-     */
-    get flowing() {
-        return this[FLOWING];
-    }
-    /**
-     * true if the stream is currently in a paused state
-     */
-    get paused() {
-        return this[PAUSED];
-    }
-    [BUFFERPUSH](chunk) {
-        if (this[OBJECTMODE])
-            this[BUFFERLENGTH] += 1;
-        else
-            this[BUFFERLENGTH] += chunk.length;
-        this[BUFFER].push(chunk);
-    }
-    [BUFFERSHIFT]() {
-        if (this[OBJECTMODE])
-            this[BUFFERLENGTH] -= 1;
-        else
-            this[BUFFERLENGTH] -= this[BUFFER][0].length;
-        return this[BUFFER].shift();
-    }
-    [FLUSH](noDrain = false) {
-        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
-            this[BUFFER].length);
-        if (!noDrain && !this[BUFFER].length && !this[EOF])
-            this.emit('drain');
-    }
-    [FLUSHCHUNK](chunk) {
-        this.emit('data', chunk);
-        return this[FLOWING];
-    }
-    /**
-     * Pipe all data emitted by this stream into the destination provided.
-     *
-     * Triggers the flow of data.
-     */
-    pipe(dest, opts) {
-        if (this[DESTROYED])
-            return dest;
-        this[DISCARDED] = false;
-        const ended = this[EMITTED_END];
-        opts = opts || {};
-        if (dest === proc.stdout || dest === proc.stderr)
-            opts.end = false;
-        else
-            opts.end = opts.end !== false;
-        opts.proxyErrors = !!opts.proxyErrors;
-        // piping an ended stream ends immediately
-        if (ended) {
-            if (opts.end)
-                dest.end();
-        }
-        else {
-            // "as" here just ignores the WType, which pipes don't care about,
-            // since they're only consuming from us, and writing to the dest
-            this[PIPES].push(!opts.proxyErrors
-                ? new Pipe(this, dest, opts)
-                : new PipeProxyErrors(this, dest, opts));
-            if (this[ASYNC])
-                defer(() => this[RESUME]());
-            else
-                this[RESUME]();
-        }
-        return dest;
-    }
-    /**
-     * Fully unhook a piped destination stream.
-     *
-     * If the destination stream was the only consumer of this stream (ie,
-     * there are no other piped destinations or `'data'` event listeners)
-     * then the flow of data will stop until there is another consumer or
-     * {@link Minipass#resume} is explicitly called.
-     */
-    unpipe(dest) {
-        const p = this[PIPES].find(p => p.dest === dest);
-        if (p) {
-            if (this[PIPES].length === 1) {
-                if (this[FLOWING] && this[DATALISTENERS] === 0) {
-                    this[FLOWING] = false;
-                }
-                this[PIPES] = [];
-            }
-            else
-                this[PIPES].splice(this[PIPES].indexOf(p), 1);
-            p.unpipe();
-        }
-    }
-    /**
-     * Alias for {@link Minipass#on}
-     */
-    addListener(ev, handler) {
-        return this.on(ev, handler);
-    }
-    /**
-     * Mostly identical to `EventEmitter.on`, with the following
-     * behavior differences to prevent data loss and unnecessary hangs:
-     *
-     * - Adding a 'data' event handler will trigger the flow of data
-     *
-     * - Adding a 'readable' event handler when there is data waiting to be read
-     *   will cause 'readable' to be emitted immediately.
-     *
-     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
-     *   already passed will cause the event to be emitted immediately and all
-     *   handlers removed.
-     *
-     * - Adding an 'error' event handler after an error has been emitted will
-     *   cause the event to be re-emitted immediately with the error previously
-     *   raised.
-     */
-    on(ev, handler) {
-        const ret = super.on(ev, handler);
-        if (ev === 'data') {
-            this[DISCARDED] = false;
-            this[DATALISTENERS]++;
-            if (!this[PIPES].length && !this[FLOWING]) {
-                this[RESUME]();
-            }
-        }
-        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
-            super.emit('readable');
-        }
-        else if (isEndish(ev) && this[EMITTED_END]) {
-            super.emit(ev);
-            this.removeAllListeners(ev);
-        }
-        else if (ev === 'error' && this[EMITTED_ERROR]) {
-            const h = handler;
-            if (this[ASYNC])
-                defer(() => h.call(this, this[EMITTED_ERROR]));
-            else
-                h.call(this, this[EMITTED_ERROR]);
-        }
-        return ret;
-    }
-    /**
-     * Alias for {@link Minipass#off}
-     */
-    removeListener(ev, handler) {
-        return this.off(ev, handler);
-    }
-    /**
-     * Mostly identical to `EventEmitter.off`
-     *
-     * If a 'data' event handler is removed, and it was the last consumer
-     * (ie, there are no pipe destinations or other 'data' event listeners),
-     * then the flow of data will stop until there is another consumer or
-     * {@link Minipass#resume} is explicitly called.
-     */
-    off(ev, handler) {
-        const ret = super.off(ev, handler);
-        // if we previously had listeners, and now we don't, and we don't
-        // have any pipes, then stop the flow, unless it's been explicitly
-        // put in a discarded flowing state via stream.resume().
-        if (ev === 'data') {
-            this[DATALISTENERS] = this.listeners('data').length;
-            if (this[DATALISTENERS] === 0 &&
-                !this[DISCARDED] &&
-                !this[PIPES].length) {
-                this[FLOWING] = false;
-            }
-        }
-        return ret;
-    }
-    /**
-     * Mostly identical to `EventEmitter.removeAllListeners`
-     *
-     * If all 'data' event handlers are removed, and they were the last consumer
-     * (ie, there are no pipe destinations), then the flow of data will stop
-     * until there is another consumer or {@link Minipass#resume} is explicitly
-     * called.
-     */
-    removeAllListeners(ev) {
-        const ret = super.removeAllListeners(ev);
-        if (ev === 'data' || ev === undefined) {
-            this[DATALISTENERS] = 0;
-            if (!this[DISCARDED] && !this[PIPES].length) {
-                this[FLOWING] = false;
-            }
-        }
-        return ret;
-    }
-    /**
-     * true if the 'end' event has been emitted
-     */
-    get emittedEnd() {
-        return this[EMITTED_END];
-    }
-    [MAYBE_EMIT_END]() {
-        if (!this[EMITTING_END] &&
-            !this[EMITTED_END] &&
-            !this[DESTROYED] &&
-            this[BUFFER].length === 0 &&
-            this[EOF]) {
-            this[EMITTING_END] = true;
-            this.emit('end');
-            this.emit('prefinish');
-            this.emit('finish');
-            if (this[CLOSED])
-                this.emit('close');
-            this[EMITTING_END] = false;
-        }
-    }
-    /**
-     * Mostly identical to `EventEmitter.emit`, with the following
-     * behavior differences to prevent data loss and unnecessary hangs:
-     *
-     * If the stream has been destroyed, and the event is something other
-     * than 'close' or 'error', then `false` is returned and no handlers
-     * are called.
-     *
-     * If the event is 'end', and has already been emitted, then the event
-     * is ignored. If the stream is in a paused or non-flowing state, then
-     * the event will be deferred until data flow resumes. If the stream is
-     * async, then handlers will be called on the next tick rather than
-     * immediately.
-     *
-     * If the event is 'close', and 'end' has not yet been emitted, then
-     * the event will be deferred until after 'end' is emitted.
-     *
-     * If the event is 'error', and an AbortSignal was provided for the stream,
-     * and there are no listeners, then the event is ignored, matching the
-     * behavior of node core streams in the presense of an AbortSignal.
-     *
-     * If the event is 'finish' or 'prefinish', then all listeners will be
-     * removed after emitting the event, to prevent double-firing.
-     */
-    emit(ev, ...args) {
-        const data = args[0];
-        // error and close are only events allowed after calling destroy()
-        if (ev !== 'error' &&
-            ev !== 'close' &&
-            ev !== DESTROYED &&
-            this[DESTROYED]) {
-            return false;
-        }
-        else if (ev === 'data') {
-            return !this[OBJECTMODE] && !data
-                ? false
-                : this[ASYNC]
-                    ? (defer(() => this[EMITDATA](data)), true)
-                    : this[EMITDATA](data);
-        }
-        else if (ev === 'end') {
-            return this[EMITEND]();
-        }
-        else if (ev === 'close') {
-            this[CLOSED] = true;
-            // don't emit close before 'end' and 'finish'
-            if (!this[EMITTED_END] && !this[DESTROYED])
-                return false;
-            const ret = super.emit('close');
-            this.removeAllListeners('close');
-            return ret;
-        }
-        else if (ev === 'error') {
-            this[EMITTED_ERROR] = data;
-            super.emit(ERROR, data);
-            const ret = !this[SIGNAL] || this.listeners('error').length
-                ? super.emit('error', data)
-                : false;
-            this[MAYBE_EMIT_END]();
-            return ret;
-        }
-        else if (ev === 'resume') {
-            const ret = super.emit('resume');
-            this[MAYBE_EMIT_END]();
-            return ret;
-        }
-        else if (ev === 'finish' || ev === 'prefinish') {
-            const ret = super.emit(ev);
-            this.removeAllListeners(ev);
-            return ret;
-        }
-        // Some other unknown event
-        const ret = super.emit(ev, ...args);
-        this[MAYBE_EMIT_END]();
-        return ret;
-    }
-    [EMITDATA](data) {
-        for (const p of this[PIPES]) {
-            if (p.dest.write(data) === false)
-                this.pause();
-        }
-        const ret = this[DISCARDED] ? false : super.emit('data', data);
-        this[MAYBE_EMIT_END]();
-        return ret;
-    }
-    [EMITEND]() {
-        if (this[EMITTED_END])
-            return false;
-        this[EMITTED_END] = true;
-        this.readable = false;
-        return this[ASYNC]
-            ? (defer(() => this[EMITEND2]()), true)
-            : this[EMITEND2]();
-    }
-    [EMITEND2]() {
-        if (this[DECODER]) {
-            const data = this[DECODER].end();
-            if (data) {
-                for (const p of this[PIPES]) {
-                    p.dest.write(data);
-                }
-                if (!this[DISCARDED])
-                    super.emit('data', data);
-            }
-        }
-        for (const p of this[PIPES]) {
-            p.end();
-        }
-        const ret = super.emit('end');
-        this.removeAllListeners('end');
-        return ret;
-    }
-    /**
-     * Return a Promise that resolves to an array of all emitted data once
-     * the stream ends.
-     */
-    async collect() {
-        const buf = Object.assign([], {
-            dataLength: 0,
-        });
-        if (!this[OBJECTMODE])
-            buf.dataLength = 0;
-        // set the promise first, in case an error is raised
-        // by triggering the flow here.
-        const p = this.promise();
-        this.on('data', c => {
-            buf.push(c);
-            if (!this[OBJECTMODE])
-                buf.dataLength += c.length;
-        });
-        await p;
-        return buf;
-    }
-    /**
-     * Return a Promise that resolves to the concatenation of all emitted data
-     * once the stream ends.
-     *
-     * Not allowed on objectMode streams.
-     */
-    async concat() {
-        if (this[OBJECTMODE]) {
-            throw new Error('cannot concat in objectMode');
-        }
-        const buf = await this.collect();
-        return (this[ENCODING]
-            ? buf.join('')
-            : Buffer.concat(buf, buf.dataLength));
-    }
-    /**
-     * Return a void Promise that resolves once the stream ends.
-     */
-    async promise() {
-        return new Promise((resolve, reject) => {
-            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
-            this.on('error', er => reject(er));
-            this.on('end', () => resolve());
-        });
-    }
-    /**
-     * Asynchronous `for await of` iteration.
-     *
-     * This will continue emitting all chunks until the stream terminates.
-     */
-    [Symbol.asyncIterator]() {
-        // set this up front, in case the consumer doesn't call next()
-        // right away.
-        this[DISCARDED] = false;
-        let stopped = false;
-        const stop = async () => {
-            this.pause();
-            stopped = true;
-            return { value: undefined, done: true };
-        };
-        const next = () => {
-            if (stopped)
-                return stop();
-            const res = this.read();
-            if (res !== null)
-                return Promise.resolve({ done: false, value: res });
-            if (this[EOF])
-                return stop();
-            let resolve;
-            let reject;
-            const onerr = (er) => {
-                this.off('data', ondata);
-                this.off('end', onend);
-                this.off(DESTROYED, ondestroy);
-                stop();
-                reject(er);
-            };
-            const ondata = (value) => {
-                this.off('error', onerr);
-                this.off('end', onend);
-                this.off(DESTROYED, ondestroy);
-                this.pause();
-                resolve({ value, done: !!this[EOF] });
-            };
-            const onend = () => {
-                this.off('error', onerr);
-                this.off('data', ondata);
-                this.off(DESTROYED, ondestroy);
-                stop();
-                resolve({ done: true, value: undefined });
-            };
-            const ondestroy = () => onerr(new Error('stream destroyed'));
-            return new Promise((res, rej) => {
-                reject = rej;
-                resolve = res;
-                this.once(DESTROYED, ondestroy);
-                this.once('error', onerr);
-                this.once('end', onend);
-                this.once('data', ondata);
-            });
-        };
-        return {
-            next,
-            throw: stop,
-            return: stop,
-            [Symbol.asyncIterator]() {
-                return this;
-            },
-        };
-    }
-    /**
-     * Synchronous `for of` iteration.
-     *
-     * The iteration will terminate when the internal buffer runs out, even
-     * if the stream has not yet terminated.
-     */
-    [Symbol.iterator]() {
-        // set this up front, in case the consumer doesn't call next()
-        // right away.
-        this[DISCARDED] = false;
-        let stopped = false;
-        const stop = () => {
-            this.pause();
-            this.off(ERROR, stop);
-            this.off(DESTROYED, stop);
-            this.off('end', stop);
-            stopped = true;
-            return { done: true, value: undefined };
-        };
-        const next = () => {
-            if (stopped)
-                return stop();
-            const value = this.read();
-            return value === null ? stop() : { done: false, value };
-        };
-        this.once('end', stop);
-        this.once(ERROR, stop);
-        this.once(DESTROYED, stop);
-        return {
-            next,
-            throw: stop,
-            return: stop,
-            [Symbol.iterator]() {
-                return this;
-            },
-        };
-    }
-    /**
-     * Destroy a stream, preventing it from being used for any further purpose.
-     *
-     * If the stream has a `close()` method, then it will be called on
-     * destruction.
-     *
-     * After destruction, any attempt to write data, read data, or emit most
-     * events will be ignored.
-     *
-     * If an error argument is provided, then it will be emitted in an
-     * 'error' event.
-     */
-    destroy(er) {
-        if (this[DESTROYED]) {
-            if (er)
-                this.emit('error', er);
-            else
-                this.emit(DESTROYED);
-            return this;
-        }
-        this[DESTROYED] = true;
-        this[DISCARDED] = true;
-        // throw away all buffered data, it's never coming out
-        this[BUFFER].length = 0;
-        this[BUFFERLENGTH] = 0;
-        const wc = this;
-        if (typeof wc.close === 'function' && !this[CLOSED])
-            wc.close();
-        if (er)
-            this.emit('error', er);
-        // if no error to emit, still reject pending promises
-        else
-            this.emit(DESTROYED);
-        return this;
-    }
-    /**
-     * Alias for {@link isStream}
-     *
-     * Former export location, maintained for backwards compatibility.
-     *
-     * @deprecated
-     */
-    static get isStream() {
-        return exports.isStream;
-    }
-}
-exports.Minipass = Minipass;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/package.json b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/index.js b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
deleted file mode 100644
index b65fafbae43a4..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
+++ /dev/null
@@ -1,1018 +0,0 @@
-const proc = typeof process === 'object' && process
-    ? process
-    : {
-        stdout: null,
-        stderr: null,
-    };
-import { EventEmitter } from 'events';
-import Stream from 'stream';
-import { StringDecoder } from 'string_decoder';
-/**
- * Return true if the argument is a Minipass stream, Node stream, or something
- * else that Minipass can interact with.
- */
-export const isStream = (s) => !!s &&
-    typeof s === 'object' &&
-    (s instanceof Minipass ||
-        s instanceof Stream ||
-        isReadable(s) ||
-        isWritable(s));
-/**
- * Return true if the argument is a valid {@link Minipass.Readable}
- */
-export const isReadable = (s) => !!s &&
-    typeof s === 'object' &&
-    s instanceof EventEmitter &&
-    typeof s.pipe === 'function' &&
-    // node core Writable streams have a pipe() method, but it throws
-    s.pipe !== Stream.Writable.prototype.pipe;
-/**
- * Return true if the argument is a valid {@link Minipass.Writable}
- */
-export const isWritable = (s) => !!s &&
-    typeof s === 'object' &&
-    s instanceof EventEmitter &&
-    typeof s.write === 'function' &&
-    typeof s.end === 'function';
-const EOF = Symbol('EOF');
-const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
-const EMITTED_END = Symbol('emittedEnd');
-const EMITTING_END = Symbol('emittingEnd');
-const EMITTED_ERROR = Symbol('emittedError');
-const CLOSED = Symbol('closed');
-const READ = Symbol('read');
-const FLUSH = Symbol('flush');
-const FLUSHCHUNK = Symbol('flushChunk');
-const ENCODING = Symbol('encoding');
-const DECODER = Symbol('decoder');
-const FLOWING = Symbol('flowing');
-const PAUSED = Symbol('paused');
-const RESUME = Symbol('resume');
-const BUFFER = Symbol('buffer');
-const PIPES = Symbol('pipes');
-const BUFFERLENGTH = Symbol('bufferLength');
-const BUFFERPUSH = Symbol('bufferPush');
-const BUFFERSHIFT = Symbol('bufferShift');
-const OBJECTMODE = Symbol('objectMode');
-// internal event when stream is destroyed
-const DESTROYED = Symbol('destroyed');
-// internal event when stream has an error
-const ERROR = Symbol('error');
-const EMITDATA = Symbol('emitData');
-const EMITEND = Symbol('emitEnd');
-const EMITEND2 = Symbol('emitEnd2');
-const ASYNC = Symbol('async');
-const ABORT = Symbol('abort');
-const ABORTED = Symbol('aborted');
-const SIGNAL = Symbol('signal');
-const DATALISTENERS = Symbol('dataListeners');
-const DISCARDED = Symbol('discarded');
-const defer = (fn) => Promise.resolve().then(fn);
-const nodefer = (fn) => fn();
-const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
-const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
-    (!!b &&
-        typeof b === 'object' &&
-        b.constructor &&
-        b.constructor.name === 'ArrayBuffer' &&
-        b.byteLength >= 0);
-const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
-/**
- * Internal class representing a pipe to a destination stream.
- *
- * @internal
- */
-class Pipe {
-    src;
-    dest;
-    opts;
-    ondrain;
-    constructor(src, dest, opts) {
-        this.src = src;
-        this.dest = dest;
-        this.opts = opts;
-        this.ondrain = () => src[RESUME]();
-        this.dest.on('drain', this.ondrain);
-    }
-    unpipe() {
-        this.dest.removeListener('drain', this.ondrain);
-    }
-    // only here for the prototype
-    /* c8 ignore start */
-    proxyErrors(_er) { }
-    /* c8 ignore stop */
-    end() {
-        this.unpipe();
-        if (this.opts.end)
-            this.dest.end();
-    }
-}
-/**
- * Internal class representing a pipe to a destination stream where
- * errors are proxied.
- *
- * @internal
- */
-class PipeProxyErrors extends Pipe {
-    unpipe() {
-        this.src.removeListener('error', this.proxyErrors);
-        super.unpipe();
-    }
-    constructor(src, dest, opts) {
-        super(src, dest, opts);
-        this.proxyErrors = er => dest.emit('error', er);
-        src.on('error', this.proxyErrors);
-    }
-}
-const isObjectModeOptions = (o) => !!o.objectMode;
-const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
-/**
- * Main export, the Minipass class
- *
- * `RType` is the type of data emitted, defaults to Buffer
- *
- * `WType` is the type of data to be written, if RType is buffer or string,
- * then any {@link Minipass.ContiguousData} is allowed.
- *
- * `Events` is the set of event handler signatures that this object
- * will emit, see {@link Minipass.Events}
- */
-export class Minipass extends EventEmitter {
-    [FLOWING] = false;
-    [PAUSED] = false;
-    [PIPES] = [];
-    [BUFFER] = [];
-    [OBJECTMODE];
-    [ENCODING];
-    [ASYNC];
-    [DECODER];
-    [EOF] = false;
-    [EMITTED_END] = false;
-    [EMITTING_END] = false;
-    [CLOSED] = false;
-    [EMITTED_ERROR] = null;
-    [BUFFERLENGTH] = 0;
-    [DESTROYED] = false;
-    [SIGNAL];
-    [ABORTED] = false;
-    [DATALISTENERS] = 0;
-    [DISCARDED] = false;
-    /**
-     * true if the stream can be written
-     */
-    writable = true;
-    /**
-     * true if the stream can be read
-     */
-    readable = true;
-    /**
-     * If `RType` is Buffer, then options do not need to be provided.
-     * Otherwise, an options object must be provided to specify either
-     * {@link Minipass.SharedOptions.objectMode} or
-     * {@link Minipass.SharedOptions.encoding}, as appropriate.
-     */
-    constructor(...args) {
-        const options = (args[0] ||
-            {});
-        super();
-        if (options.objectMode && typeof options.encoding === 'string') {
-            throw new TypeError('Encoding and objectMode may not be used together');
-        }
-        if (isObjectModeOptions(options)) {
-            this[OBJECTMODE] = true;
-            this[ENCODING] = null;
-        }
-        else if (isEncodingOptions(options)) {
-            this[ENCODING] = options.encoding;
-            this[OBJECTMODE] = false;
-        }
-        else {
-            this[OBJECTMODE] = false;
-            this[ENCODING] = null;
-        }
-        this[ASYNC] = !!options.async;
-        this[DECODER] = this[ENCODING]
-            ? new StringDecoder(this[ENCODING])
-            : null;
-        //@ts-ignore - private option for debugging and testing
-        if (options && options.debugExposeBuffer === true) {
-            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
-        }
-        //@ts-ignore - private option for debugging and testing
-        if (options && options.debugExposePipes === true) {
-            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
-        }
-        const { signal } = options;
-        if (signal) {
-            this[SIGNAL] = signal;
-            if (signal.aborted) {
-                this[ABORT]();
-            }
-            else {
-                signal.addEventListener('abort', () => this[ABORT]());
-            }
-        }
-    }
-    /**
-     * The amount of data stored in the buffer waiting to be read.
-     *
-     * For Buffer strings, this will be the total byte length.
-     * For string encoding streams, this will be the string character length,
-     * according to JavaScript's `string.length` logic.
-     * For objectMode streams, this is a count of the items waiting to be
-     * emitted.
-     */
-    get bufferLength() {
-        return this[BUFFERLENGTH];
-    }
-    /**
-     * The `BufferEncoding` currently in use, or `null`
-     */
-    get encoding() {
-        return this[ENCODING];
-    }
-    /**
-     * @deprecated - This is a read only property
-     */
-    set encoding(_enc) {
-        throw new Error('Encoding must be set at instantiation time');
-    }
-    /**
-     * @deprecated - Encoding may only be set at instantiation time
-     */
-    setEncoding(_enc) {
-        throw new Error('Encoding must be set at instantiation time');
-    }
-    /**
-     * True if this is an objectMode stream
-     */
-    get objectMode() {
-        return this[OBJECTMODE];
-    }
-    /**
-     * @deprecated - This is a read-only property
-     */
-    set objectMode(_om) {
-        throw new Error('objectMode must be set at instantiation time');
-    }
-    /**
-     * true if this is an async stream
-     */
-    get ['async']() {
-        return this[ASYNC];
-    }
-    /**
-     * Set to true to make this stream async.
-     *
-     * Once set, it cannot be unset, as this would potentially cause incorrect
-     * behavior.  Ie, a sync stream can be made async, but an async stream
-     * cannot be safely made sync.
-     */
-    set ['async'](a) {
-        this[ASYNC] = this[ASYNC] || !!a;
-    }
-    // drop everything and get out of the flow completely
-    [ABORT]() {
-        this[ABORTED] = true;
-        this.emit('abort', this[SIGNAL]?.reason);
-        this.destroy(this[SIGNAL]?.reason);
-    }
-    /**
-     * True if the stream has been aborted.
-     */
-    get aborted() {
-        return this[ABORTED];
-    }
-    /**
-     * No-op setter. Stream aborted status is set via the AbortSignal provided
-     * in the constructor options.
-     */
-    set aborted(_) { }
-    write(chunk, encoding, cb) {
-        if (this[ABORTED])
-            return false;
-        if (this[EOF])
-            throw new Error('write after end');
-        if (this[DESTROYED]) {
-            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
-            return true;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = 'utf8';
-        }
-        if (!encoding)
-            encoding = 'utf8';
-        const fn = this[ASYNC] ? defer : nodefer;
-        // convert array buffers and typed array views into buffers
-        // at some point in the future, we may want to do the opposite!
-        // leave strings and buffers as-is
-        // anything is only allowed if in object mode, so throw
-        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
-            if (isArrayBufferView(chunk)) {
-                //@ts-ignore - sinful unsafe type changing
-                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
-            }
-            else if (isArrayBufferLike(chunk)) {
-                //@ts-ignore - sinful unsafe type changing
-                chunk = Buffer.from(chunk);
-            }
-            else if (typeof chunk !== 'string') {
-                throw new Error('Non-contiguous data written to non-objectMode stream');
-            }
-        }
-        // handle object mode up front, since it's simpler
-        // this yields better performance, fewer checks later.
-        if (this[OBJECTMODE]) {
-            // maybe impossible?
-            /* c8 ignore start */
-            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
-                this[FLUSH](true);
-            /* c8 ignore stop */
-            if (this[FLOWING])
-                this.emit('data', chunk);
-            else
-                this[BUFFERPUSH](chunk);
-            if (this[BUFFERLENGTH] !== 0)
-                this.emit('readable');
-            if (cb)
-                fn(cb);
-            return this[FLOWING];
-        }
-        // at this point the chunk is a buffer or string
-        // don't buffer it up or send it to the decoder
-        if (!chunk.length) {
-            if (this[BUFFERLENGTH] !== 0)
-                this.emit('readable');
-            if (cb)
-                fn(cb);
-            return this[FLOWING];
-        }
-        // fast-path writing strings of same encoding to a stream with
-        // an empty buffer, skipping the buffer/decoder dance
-        if (typeof chunk === 'string' &&
-            // unless it is a string already ready for us to use
-            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
-            //@ts-ignore - sinful unsafe type change
-            chunk = Buffer.from(chunk, encoding);
-        }
-        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
-            //@ts-ignore - sinful unsafe type change
-            chunk = this[DECODER].write(chunk);
-        }
-        // Note: flushing CAN potentially switch us into not-flowing mode
-        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
-            this[FLUSH](true);
-        if (this[FLOWING])
-            this.emit('data', chunk);
-        else
-            this[BUFFERPUSH](chunk);
-        if (this[BUFFERLENGTH] !== 0)
-            this.emit('readable');
-        if (cb)
-            fn(cb);
-        return this[FLOWING];
-    }
-    /**
-     * Low-level explicit read method.
-     *
-     * In objectMode, the argument is ignored, and one item is returned if
-     * available.
-     *
-     * `n` is the number of bytes (or in the case of encoding streams,
-     * characters) to consume. If `n` is not provided, then the entire buffer
-     * is returned, or `null` is returned if no data is available.
-     *
-     * If `n` is greater that the amount of data in the internal buffer,
-     * then `null` is returned.
-     */
-    read(n) {
-        if (this[DESTROYED])
-            return null;
-        this[DISCARDED] = false;
-        if (this[BUFFERLENGTH] === 0 ||
-            n === 0 ||
-            (n && n > this[BUFFERLENGTH])) {
-            this[MAYBE_EMIT_END]();
-            return null;
-        }
-        if (this[OBJECTMODE])
-            n = null;
-        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
-            // not object mode, so if we have an encoding, then RType is string
-            // otherwise, must be Buffer
-            this[BUFFER] = [
-                (this[ENCODING]
-                    ? this[BUFFER].join('')
-                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
-            ];
-        }
-        const ret = this[READ](n || null, this[BUFFER][0]);
-        this[MAYBE_EMIT_END]();
-        return ret;
-    }
-    [READ](n, chunk) {
-        if (this[OBJECTMODE])
-            this[BUFFERSHIFT]();
-        else {
-            const c = chunk;
-            if (n === c.length || n === null)
-                this[BUFFERSHIFT]();
-            else if (typeof c === 'string') {
-                this[BUFFER][0] = c.slice(n);
-                chunk = c.slice(0, n);
-                this[BUFFERLENGTH] -= n;
-            }
-            else {
-                this[BUFFER][0] = c.subarray(n);
-                chunk = c.subarray(0, n);
-                this[BUFFERLENGTH] -= n;
-            }
-        }
-        this.emit('data', chunk);
-        if (!this[BUFFER].length && !this[EOF])
-            this.emit('drain');
-        return chunk;
-    }
-    end(chunk, encoding, cb) {
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = 'utf8';
-        }
-        if (chunk !== undefined)
-            this.write(chunk, encoding);
-        if (cb)
-            this.once('end', cb);
-        this[EOF] = true;
-        this.writable = false;
-        // if we haven't written anything, then go ahead and emit,
-        // even if we're not reading.
-        // we'll re-emit if a new 'end' listener is added anyway.
-        // This makes MP more suitable to write-only use cases.
-        if (this[FLOWING] || !this[PAUSED])
-            this[MAYBE_EMIT_END]();
-        return this;
-    }
-    // don't let the internal resume be overwritten
-    [RESUME]() {
-        if (this[DESTROYED])
-            return;
-        if (!this[DATALISTENERS] && !this[PIPES].length) {
-            this[DISCARDED] = true;
-        }
-        this[PAUSED] = false;
-        this[FLOWING] = true;
-        this.emit('resume');
-        if (this[BUFFER].length)
-            this[FLUSH]();
-        else if (this[EOF])
-            this[MAYBE_EMIT_END]();
-        else
-            this.emit('drain');
-    }
-    /**
-     * Resume the stream if it is currently in a paused state
-     *
-     * If called when there are no pipe destinations or `data` event listeners,
-     * this will place the stream in a "discarded" state, where all data will
-     * be thrown away. The discarded state is removed if a pipe destination or
-     * data handler is added, if pause() is called, or if any synchronous or
-     * asynchronous iteration is started.
-     */
-    resume() {
-        return this[RESUME]();
-    }
-    /**
-     * Pause the stream
-     */
-    pause() {
-        this[FLOWING] = false;
-        this[PAUSED] = true;
-        this[DISCARDED] = false;
-    }
-    /**
-     * true if the stream has been forcibly destroyed
-     */
-    get destroyed() {
-        return this[DESTROYED];
-    }
-    /**
-     * true if the stream is currently in a flowing state, meaning that
-     * any writes will be immediately emitted.
-     */
-    get flowing() {
-        return this[FLOWING];
-    }
-    /**
-     * true if the stream is currently in a paused state
-     */
-    get paused() {
-        return this[PAUSED];
-    }
-    [BUFFERPUSH](chunk) {
-        if (this[OBJECTMODE])
-            this[BUFFERLENGTH] += 1;
-        else
-            this[BUFFERLENGTH] += chunk.length;
-        this[BUFFER].push(chunk);
-    }
-    [BUFFERSHIFT]() {
-        if (this[OBJECTMODE])
-            this[BUFFERLENGTH] -= 1;
-        else
-            this[BUFFERLENGTH] -= this[BUFFER][0].length;
-        return this[BUFFER].shift();
-    }
-    [FLUSH](noDrain = false) {
-        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
-            this[BUFFER].length);
-        if (!noDrain && !this[BUFFER].length && !this[EOF])
-            this.emit('drain');
-    }
-    [FLUSHCHUNK](chunk) {
-        this.emit('data', chunk);
-        return this[FLOWING];
-    }
-    /**
-     * Pipe all data emitted by this stream into the destination provided.
-     *
-     * Triggers the flow of data.
-     */
-    pipe(dest, opts) {
-        if (this[DESTROYED])
-            return dest;
-        this[DISCARDED] = false;
-        const ended = this[EMITTED_END];
-        opts = opts || {};
-        if (dest === proc.stdout || dest === proc.stderr)
-            opts.end = false;
-        else
-            opts.end = opts.end !== false;
-        opts.proxyErrors = !!opts.proxyErrors;
-        // piping an ended stream ends immediately
-        if (ended) {
-            if (opts.end)
-                dest.end();
-        }
-        else {
-            // "as" here just ignores the WType, which pipes don't care about,
-            // since they're only consuming from us, and writing to the dest
-            this[PIPES].push(!opts.proxyErrors
-                ? new Pipe(this, dest, opts)
-                : new PipeProxyErrors(this, dest, opts));
-            if (this[ASYNC])
-                defer(() => this[RESUME]());
-            else
-                this[RESUME]();
-        }
-        return dest;
-    }
-    /**
-     * Fully unhook a piped destination stream.
-     *
-     * If the destination stream was the only consumer of this stream (ie,
-     * there are no other piped destinations or `'data'` event listeners)
-     * then the flow of data will stop until there is another consumer or
-     * {@link Minipass#resume} is explicitly called.
-     */
-    unpipe(dest) {
-        const p = this[PIPES].find(p => p.dest === dest);
-        if (p) {
-            if (this[PIPES].length === 1) {
-                if (this[FLOWING] && this[DATALISTENERS] === 0) {
-                    this[FLOWING] = false;
-                }
-                this[PIPES] = [];
-            }
-            else
-                this[PIPES].splice(this[PIPES].indexOf(p), 1);
-            p.unpipe();
-        }
-    }
-    /**
-     * Alias for {@link Minipass#on}
-     */
-    addListener(ev, handler) {
-        return this.on(ev, handler);
-    }
-    /**
-     * Mostly identical to `EventEmitter.on`, with the following
-     * behavior differences to prevent data loss and unnecessary hangs:
-     *
-     * - Adding a 'data' event handler will trigger the flow of data
-     *
-     * - Adding a 'readable' event handler when there is data waiting to be read
-     *   will cause 'readable' to be emitted immediately.
-     *
-     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
-     *   already passed will cause the event to be emitted immediately and all
-     *   handlers removed.
-     *
-     * - Adding an 'error' event handler after an error has been emitted will
-     *   cause the event to be re-emitted immediately with the error previously
-     *   raised.
-     */
-    on(ev, handler) {
-        const ret = super.on(ev, handler);
-        if (ev === 'data') {
-            this[DISCARDED] = false;
-            this[DATALISTENERS]++;
-            if (!this[PIPES].length && !this[FLOWING]) {
-                this[RESUME]();
-            }
-        }
-        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
-            super.emit('readable');
-        }
-        else if (isEndish(ev) && this[EMITTED_END]) {
-            super.emit(ev);
-            this.removeAllListeners(ev);
-        }
-        else if (ev === 'error' && this[EMITTED_ERROR]) {
-            const h = handler;
-            if (this[ASYNC])
-                defer(() => h.call(this, this[EMITTED_ERROR]));
-            else
-                h.call(this, this[EMITTED_ERROR]);
-        }
-        return ret;
-    }
-    /**
-     * Alias for {@link Minipass#off}
-     */
-    removeListener(ev, handler) {
-        return this.off(ev, handler);
-    }
-    /**
-     * Mostly identical to `EventEmitter.off`
-     *
-     * If a 'data' event handler is removed, and it was the last consumer
-     * (ie, there are no pipe destinations or other 'data' event listeners),
-     * then the flow of data will stop until there is another consumer or
-     * {@link Minipass#resume} is explicitly called.
-     */
-    off(ev, handler) {
-        const ret = super.off(ev, handler);
-        // if we previously had listeners, and now we don't, and we don't
-        // have any pipes, then stop the flow, unless it's been explicitly
-        // put in a discarded flowing state via stream.resume().
-        if (ev === 'data') {
-            this[DATALISTENERS] = this.listeners('data').length;
-            if (this[DATALISTENERS] === 0 &&
-                !this[DISCARDED] &&
-                !this[PIPES].length) {
-                this[FLOWING] = false;
-            }
-        }
-        return ret;
-    }
-    /**
-     * Mostly identical to `EventEmitter.removeAllListeners`
-     *
-     * If all 'data' event handlers are removed, and they were the last consumer
-     * (ie, there are no pipe destinations), then the flow of data will stop
-     * until there is another consumer or {@link Minipass#resume} is explicitly
-     * called.
-     */
-    removeAllListeners(ev) {
-        const ret = super.removeAllListeners(ev);
-        if (ev === 'data' || ev === undefined) {
-            this[DATALISTENERS] = 0;
-            if (!this[DISCARDED] && !this[PIPES].length) {
-                this[FLOWING] = false;
-            }
-        }
-        return ret;
-    }
-    /**
-     * true if the 'end' event has been emitted
-     */
-    get emittedEnd() {
-        return this[EMITTED_END];
-    }
-    [MAYBE_EMIT_END]() {
-        if (!this[EMITTING_END] &&
-            !this[EMITTED_END] &&
-            !this[DESTROYED] &&
-            this[BUFFER].length === 0 &&
-            this[EOF]) {
-            this[EMITTING_END] = true;
-            this.emit('end');
-            this.emit('prefinish');
-            this.emit('finish');
-            if (this[CLOSED])
-                this.emit('close');
-            this[EMITTING_END] = false;
-        }
-    }
-    /**
-     * Mostly identical to `EventEmitter.emit`, with the following
-     * behavior differences to prevent data loss and unnecessary hangs:
-     *
-     * If the stream has been destroyed, and the event is something other
-     * than 'close' or 'error', then `false` is returned and no handlers
-     * are called.
-     *
-     * If the event is 'end', and has already been emitted, then the event
-     * is ignored. If the stream is in a paused or non-flowing state, then
-     * the event will be deferred until data flow resumes. If the stream is
-     * async, then handlers will be called on the next tick rather than
-     * immediately.
-     *
-     * If the event is 'close', and 'end' has not yet been emitted, then
-     * the event will be deferred until after 'end' is emitted.
-     *
-     * If the event is 'error', and an AbortSignal was provided for the stream,
-     * and there are no listeners, then the event is ignored, matching the
-     * behavior of node core streams in the presense of an AbortSignal.
-     *
-     * If the event is 'finish' or 'prefinish', then all listeners will be
-     * removed after emitting the event, to prevent double-firing.
-     */
-    emit(ev, ...args) {
-        const data = args[0];
-        // error and close are only events allowed after calling destroy()
-        if (ev !== 'error' &&
-            ev !== 'close' &&
-            ev !== DESTROYED &&
-            this[DESTROYED]) {
-            return false;
-        }
-        else if (ev === 'data') {
-            return !this[OBJECTMODE] && !data
-                ? false
-                : this[ASYNC]
-                    ? (defer(() => this[EMITDATA](data)), true)
-                    : this[EMITDATA](data);
-        }
-        else if (ev === 'end') {
-            return this[EMITEND]();
-        }
-        else if (ev === 'close') {
-            this[CLOSED] = true;
-            // don't emit close before 'end' and 'finish'
-            if (!this[EMITTED_END] && !this[DESTROYED])
-                return false;
-            const ret = super.emit('close');
-            this.removeAllListeners('close');
-            return ret;
-        }
-        else if (ev === 'error') {
-            this[EMITTED_ERROR] = data;
-            super.emit(ERROR, data);
-            const ret = !this[SIGNAL] || this.listeners('error').length
-                ? super.emit('error', data)
-                : false;
-            this[MAYBE_EMIT_END]();
-            return ret;
-        }
-        else if (ev === 'resume') {
-            const ret = super.emit('resume');
-            this[MAYBE_EMIT_END]();
-            return ret;
-        }
-        else if (ev === 'finish' || ev === 'prefinish') {
-            const ret = super.emit(ev);
-            this.removeAllListeners(ev);
-            return ret;
-        }
-        // Some other unknown event
-        const ret = super.emit(ev, ...args);
-        this[MAYBE_EMIT_END]();
-        return ret;
-    }
-    [EMITDATA](data) {
-        for (const p of this[PIPES]) {
-            if (p.dest.write(data) === false)
-                this.pause();
-        }
-        const ret = this[DISCARDED] ? false : super.emit('data', data);
-        this[MAYBE_EMIT_END]();
-        return ret;
-    }
-    [EMITEND]() {
-        if (this[EMITTED_END])
-            return false;
-        this[EMITTED_END] = true;
-        this.readable = false;
-        return this[ASYNC]
-            ? (defer(() => this[EMITEND2]()), true)
-            : this[EMITEND2]();
-    }
-    [EMITEND2]() {
-        if (this[DECODER]) {
-            const data = this[DECODER].end();
-            if (data) {
-                for (const p of this[PIPES]) {
-                    p.dest.write(data);
-                }
-                if (!this[DISCARDED])
-                    super.emit('data', data);
-            }
-        }
-        for (const p of this[PIPES]) {
-            p.end();
-        }
-        const ret = super.emit('end');
-        this.removeAllListeners('end');
-        return ret;
-    }
-    /**
-     * Return a Promise that resolves to an array of all emitted data once
-     * the stream ends.
-     */
-    async collect() {
-        const buf = Object.assign([], {
-            dataLength: 0,
-        });
-        if (!this[OBJECTMODE])
-            buf.dataLength = 0;
-        // set the promise first, in case an error is raised
-        // by triggering the flow here.
-        const p = this.promise();
-        this.on('data', c => {
-            buf.push(c);
-            if (!this[OBJECTMODE])
-                buf.dataLength += c.length;
-        });
-        await p;
-        return buf;
-    }
-    /**
-     * Return a Promise that resolves to the concatenation of all emitted data
-     * once the stream ends.
-     *
-     * Not allowed on objectMode streams.
-     */
-    async concat() {
-        if (this[OBJECTMODE]) {
-            throw new Error('cannot concat in objectMode');
-        }
-        const buf = await this.collect();
-        return (this[ENCODING]
-            ? buf.join('')
-            : Buffer.concat(buf, buf.dataLength));
-    }
-    /**
-     * Return a void Promise that resolves once the stream ends.
-     */
-    async promise() {
-        return new Promise((resolve, reject) => {
-            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
-            this.on('error', er => reject(er));
-            this.on('end', () => resolve());
-        });
-    }
-    /**
-     * Asynchronous `for await of` iteration.
-     *
-     * This will continue emitting all chunks until the stream terminates.
-     */
-    [Symbol.asyncIterator]() {
-        // set this up front, in case the consumer doesn't call next()
-        // right away.
-        this[DISCARDED] = false;
-        let stopped = false;
-        const stop = async () => {
-            this.pause();
-            stopped = true;
-            return { value: undefined, done: true };
-        };
-        const next = () => {
-            if (stopped)
-                return stop();
-            const res = this.read();
-            if (res !== null)
-                return Promise.resolve({ done: false, value: res });
-            if (this[EOF])
-                return stop();
-            let resolve;
-            let reject;
-            const onerr = (er) => {
-                this.off('data', ondata);
-                this.off('end', onend);
-                this.off(DESTROYED, ondestroy);
-                stop();
-                reject(er);
-            };
-            const ondata = (value) => {
-                this.off('error', onerr);
-                this.off('end', onend);
-                this.off(DESTROYED, ondestroy);
-                this.pause();
-                resolve({ value, done: !!this[EOF] });
-            };
-            const onend = () => {
-                this.off('error', onerr);
-                this.off('data', ondata);
-                this.off(DESTROYED, ondestroy);
-                stop();
-                resolve({ done: true, value: undefined });
-            };
-            const ondestroy = () => onerr(new Error('stream destroyed'));
-            return new Promise((res, rej) => {
-                reject = rej;
-                resolve = res;
-                this.once(DESTROYED, ondestroy);
-                this.once('error', onerr);
-                this.once('end', onend);
-                this.once('data', ondata);
-            });
-        };
-        return {
-            next,
-            throw: stop,
-            return: stop,
-            [Symbol.asyncIterator]() {
-                return this;
-            },
-        };
-    }
-    /**
-     * Synchronous `for of` iteration.
-     *
-     * The iteration will terminate when the internal buffer runs out, even
-     * if the stream has not yet terminated.
-     */
-    [Symbol.iterator]() {
-        // set this up front, in case the consumer doesn't call next()
-        // right away.
-        this[DISCARDED] = false;
-        let stopped = false;
-        const stop = () => {
-            this.pause();
-            this.off(ERROR, stop);
-            this.off(DESTROYED, stop);
-            this.off('end', stop);
-            stopped = true;
-            return { done: true, value: undefined };
-        };
-        const next = () => {
-            if (stopped)
-                return stop();
-            const value = this.read();
-            return value === null ? stop() : { done: false, value };
-        };
-        this.once('end', stop);
-        this.once(ERROR, stop);
-        this.once(DESTROYED, stop);
-        return {
-            next,
-            throw: stop,
-            return: stop,
-            [Symbol.iterator]() {
-                return this;
-            },
-        };
-    }
-    /**
-     * Destroy a stream, preventing it from being used for any further purpose.
-     *
-     * If the stream has a `close()` method, then it will be called on
-     * destruction.
-     *
-     * After destruction, any attempt to write data, read data, or emit most
-     * events will be ignored.
-     *
-     * If an error argument is provided, then it will be emitted in an
-     * 'error' event.
-     */
-    destroy(er) {
-        if (this[DESTROYED]) {
-            if (er)
-                this.emit('error', er);
-            else
-                this.emit(DESTROYED);
-            return this;
-        }
-        this[DESTROYED] = true;
-        this[DISCARDED] = true;
-        // throw away all buffered data, it's never coming out
-        this[BUFFER].length = 0;
-        this[BUFFERLENGTH] = 0;
-        const wc = this;
-        if (typeof wc.close === 'function' && !this[CLOSED])
-            wc.close();
-        if (er)
-            this.emit('error', er);
-        // if no error to emit, still reject pending promises
-        else
-            this.emit(DESTROYED);
-        return this;
-    }
-    /**
-     * Alias for {@link isStream}
-     *
-     * Former export location, maintained for backwards compatibility.
-     *
-     * @deprecated
-     */
-    static get isStream() {
-        return isStream;
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/package.json b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/package.json b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/package.json
deleted file mode 100644
index 6faaa247a5bc6..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/package.json
+++ /dev/null
@@ -1,82 +0,0 @@
-{
-  "name": "minipass",
-  "version": "7.0.3",
-  "description": "minimal implementation of a PassThrough stream",
-  "main": "./dist/cjs/index.js",
-  "module": "./dist/mjs/index.js",
-  "types": "./dist/cjs/index.js",
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./dist/mjs/index.d.ts",
-        "default": "./dist/mjs/index.js"
-      },
-      "require": {
-        "types": "./dist/cjs/index.d.ts",
-        "default": "./dist/cjs/index.js"
-      }
-    },
-    "./package.json": "./package.json"
-  },
-  "files": [
-    "dist"
-  ],
-  "scripts": {
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "preprepare": "rm -rf dist",
-    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "c8 tap",
-    "snap": "c8 tap",
-    "format": "prettier --write . --loglevel warn",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
-  },
-  "tap": {
-    "coverage": false,
-    "node-arg": [
-      "--enable-source-maps",
-      "--no-warnings",
-      "--loader",
-      "ts-node/esm"
-    ],
-    "ts": false
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "devDependencies": {
-    "@types/node": "^20.1.2",
-    "@types/tap": "^15.0.8",
-    "c8": "^7.13.0",
-    "prettier": "^2.6.2",
-    "tap": "^16.3.0",
-    "ts-node": "^10.9.1",
-    "typedoc": "^0.24.8",
-    "typescript": "^5.1.3",
-    "end-of-stream": "^1.4.0",
-    "node-abort-controller": "^3.1.1",
-    "sync-content": "^1.0.2",
-    "through2": "^2.0.3"
-  },
-  "repository": "https://github.com/isaacs/minipass",
-  "keywords": [
-    "passthrough",
-    "stream"
-  ],
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "ISC",
-  "engines": {
-    "node": ">=16 || 14 >=14.17"
-  }
-}
diff --git a/node_modules/tuf-js/node_modules/cacache/package.json b/node_modules/tuf-js/node_modules/cacache/package.json
deleted file mode 100644
index ab58cb8b7c50f..0000000000000
--- a/node_modules/tuf-js/node_modules/cacache/package.json
+++ /dev/null
@@ -1,82 +0,0 @@
-{
-  "name": "cacache",
-  "version": "17.1.4",
-  "cache-version": {
-    "content": "2",
-    "index": "5"
-  },
-  "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
-  "main": "lib/index.js",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "coverage": "tap",
-    "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
-    "lint": "eslint \"**/*.js\"",
-    "npmclilint": "npmcli-lint",
-    "lintfix": "npm run lint -- --fix",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "posttest": "npm run lint",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/cacache.git"
-  },
-  "keywords": [
-    "cache",
-    "caching",
-    "content-addressable",
-    "sri",
-    "sri hash",
-    "subresource integrity",
-    "cache",
-    "storage",
-    "store",
-    "file store",
-    "filesystem",
-    "disk cache",
-    "disk storage"
-  ],
-  "license": "ISC",
-  "dependencies": {
-    "@npmcli/fs": "^3.1.0",
-    "fs-minipass": "^3.0.0",
-    "glob": "^10.2.2",
-    "lru-cache": "^7.7.1",
-    "minipass": "^7.0.3",
-    "minipass-collect": "^1.0.2",
-    "minipass-flush": "^1.0.5",
-    "minipass-pipeline": "^1.2.4",
-    "p-map": "^4.0.0",
-    "ssri": "^10.0.0",
-    "tar": "^6.1.11",
-    "unique-filename": "^3.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.18.0",
-    "tap": "^16.0.0"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "windowsCI": false,
-    "version": "4.18.0",
-    "publish": "true"
-  },
-  "author": "GitHub Inc.",
-  "tap": {
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  }
-}
diff --git a/node_modules/tuf-js/node_modules/lru-cache/LICENSE b/node_modules/tuf-js/node_modules/lru-cache/LICENSE
deleted file mode 100644
index f785757cd63f8..0000000000000
--- a/node_modules/tuf-js/node_modules/lru-cache/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tuf-js/node_modules/lru-cache/index.js b/node_modules/tuf-js/node_modules/lru-cache/index.js
deleted file mode 100644
index 48e99fe5e5a70..0000000000000
--- a/node_modules/tuf-js/node_modules/lru-cache/index.js
+++ /dev/null
@@ -1,1227 +0,0 @@
-const perf =
-  typeof performance === 'object' &&
-  performance &&
-  typeof performance.now === 'function'
-    ? performance
-    : Date
-
-const hasAbortController = typeof AbortController === 'function'
-
-// minimal backwards-compatibility polyfill
-// this doesn't have nearly all the checks and whatnot that
-// actual AbortController/Signal has, but it's enough for
-// our purposes, and if used properly, behaves the same.
-const AC = hasAbortController
-  ? AbortController
-  : class AbortController {
-      constructor() {
-        this.signal = new AS()
-      }
-      abort(reason = new Error('This operation was aborted')) {
-        this.signal.reason = this.signal.reason || reason
-        this.signal.aborted = true
-        this.signal.dispatchEvent({
-          type: 'abort',
-          target: this.signal,
-        })
-      }
-    }
-
-const hasAbortSignal = typeof AbortSignal === 'function'
-// Some polyfills put this on the AC class, not global
-const hasACAbortSignal = typeof AC.AbortSignal === 'function'
-const AS = hasAbortSignal
-  ? AbortSignal
-  : hasACAbortSignal
-  ? AC.AbortController
-  : class AbortSignal {
-      constructor() {
-        this.reason = undefined
-        this.aborted = false
-        this._listeners = []
-      }
-      dispatchEvent(e) {
-        if (e.type === 'abort') {
-          this.aborted = true
-          this.onabort(e)
-          this._listeners.forEach(f => f(e), this)
-        }
-      }
-      onabort() {}
-      addEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners.push(fn)
-        }
-      }
-      removeEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners = this._listeners.filter(f => f !== fn)
-        }
-      }
-    }
-
-const warned = new Set()
-const deprecatedOption = (opt, instead) => {
-  const code = `LRU_CACHE_OPTION_${opt}`
-  if (shouldWarn(code)) {
-    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
-  }
-}
-const deprecatedMethod = (method, instead) => {
-  const code = `LRU_CACHE_METHOD_${method}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
-    warn(code, `${method} method`, `cache.${instead}()`, get)
-  }
-}
-const deprecatedProperty = (field, instead) => {
-  const code = `LRU_CACHE_PROPERTY_${field}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
-    warn(code, `${field} property`, `cache.${instead}`, get)
-  }
-}
-
-const emitWarning = (...a) => {
-  typeof process === 'object' &&
-  process &&
-  typeof process.emitWarning === 'function'
-    ? process.emitWarning(...a)
-    : console.error(...a)
-}
-
-const shouldWarn = code => !warned.has(code)
-
-const warn = (code, what, instead, fn) => {
-  warned.add(code)
-  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
-  emitWarning(msg, 'DeprecationWarning', code, fn)
-}
-
-const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
-
-/* istanbul ignore next - This is a little bit ridiculous, tbh.
- * The maximum array length is 2^32-1 or thereabouts on most JS impls.
- * And well before that point, you're caching the entire world, I mean,
- * that's ~32GB of just integers for the next/prev links, plus whatever
- * else to hold that many keys and values.  Just filling the memory with
- * zeroes at init time is brutal when you get that big.
- * But why not be complete?
- * Maybe in the future, these limits will have expanded. */
-const getUintArray = max =>
-  !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-    ? Uint8Array
-    : max <= Math.pow(2, 16)
-    ? Uint16Array
-    : max <= Math.pow(2, 32)
-    ? Uint32Array
-    : max <= Number.MAX_SAFE_INTEGER
-    ? ZeroArray
-    : null
-
-class ZeroArray extends Array {
-  constructor(size) {
-    super(size)
-    this.fill(0)
-  }
-}
-
-class Stack {
-  constructor(max) {
-    if (max === 0) {
-      return []
-    }
-    const UintArray = getUintArray(max)
-    this.heap = new UintArray(max)
-    this.length = 0
-  }
-  push(n) {
-    this.heap[this.length++] = n
-  }
-  pop() {
-    return this.heap[--this.length]
-  }
-}
-
-class LRUCache {
-  constructor(options = {}) {
-    const {
-      max = 0,
-      ttl,
-      ttlResolution = 1,
-      ttlAutopurge,
-      updateAgeOnGet,
-      updateAgeOnHas,
-      allowStale,
-      dispose,
-      disposeAfter,
-      noDisposeOnSet,
-      noUpdateTTL,
-      maxSize = 0,
-      maxEntrySize = 0,
-      sizeCalculation,
-      fetchMethod,
-      fetchContext,
-      noDeleteOnFetchRejection,
-      noDeleteOnStaleGet,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-    } = options
-
-    // deprecated options, don't trigger a warning for getting them if
-    // the thing being passed in is another LRUCache we're copying.
-    const { length, maxAge, stale } =
-      options instanceof LRUCache ? {} : options
-
-    if (max !== 0 && !isPosInt(max)) {
-      throw new TypeError('max option must be a nonnegative integer')
-    }
-
-    const UintArray = max ? getUintArray(max) : Array
-    if (!UintArray) {
-      throw new Error('invalid max value: ' + max)
-    }
-
-    this.max = max
-    this.maxSize = maxSize
-    this.maxEntrySize = maxEntrySize || this.maxSize
-    this.sizeCalculation = sizeCalculation || length
-    if (this.sizeCalculation) {
-      if (!this.maxSize && !this.maxEntrySize) {
-        throw new TypeError(
-          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
-        )
-      }
-      if (typeof this.sizeCalculation !== 'function') {
-        throw new TypeError('sizeCalculation set to non-function')
-      }
-    }
-
-    this.fetchMethod = fetchMethod || null
-    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
-      throw new TypeError(
-        'fetchMethod must be a function if specified'
-      )
-    }
-
-    this.fetchContext = fetchContext
-    if (!this.fetchMethod && fetchContext !== undefined) {
-      throw new TypeError(
-        'cannot set fetchContext without fetchMethod'
-      )
-    }
-
-    this.keyMap = new Map()
-    this.keyList = new Array(max).fill(null)
-    this.valList = new Array(max).fill(null)
-    this.next = new UintArray(max)
-    this.prev = new UintArray(max)
-    this.head = 0
-    this.tail = 0
-    this.free = new Stack(max)
-    this.initialFill = 1
-    this.size = 0
-
-    if (typeof dispose === 'function') {
-      this.dispose = dispose
-    }
-    if (typeof disposeAfter === 'function') {
-      this.disposeAfter = disposeAfter
-      this.disposed = []
-    } else {
-      this.disposeAfter = null
-      this.disposed = null
-    }
-    this.noDisposeOnSet = !!noDisposeOnSet
-    this.noUpdateTTL = !!noUpdateTTL
-    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
-    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
-    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
-    this.ignoreFetchAbort = !!ignoreFetchAbort
-
-    // NB: maxEntrySize is set to maxSize if it's set
-    if (this.maxEntrySize !== 0) {
-      if (this.maxSize !== 0) {
-        if (!isPosInt(this.maxSize)) {
-          throw new TypeError(
-            'maxSize must be a positive integer if specified'
-          )
-        }
-      }
-      if (!isPosInt(this.maxEntrySize)) {
-        throw new TypeError(
-          'maxEntrySize must be a positive integer if specified'
-        )
-      }
-      this.initializeSizeTracking()
-    }
-
-    this.allowStale = !!allowStale || !!stale
-    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
-    this.updateAgeOnGet = !!updateAgeOnGet
-    this.updateAgeOnHas = !!updateAgeOnHas
-    this.ttlResolution =
-      isPosInt(ttlResolution) || ttlResolution === 0
-        ? ttlResolution
-        : 1
-    this.ttlAutopurge = !!ttlAutopurge
-    this.ttl = ttl || maxAge || 0
-    if (this.ttl) {
-      if (!isPosInt(this.ttl)) {
-        throw new TypeError(
-          'ttl must be a positive integer if specified'
-        )
-      }
-      this.initializeTTLTracking()
-    }
-
-    // do not allow completely unbounded caches
-    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
-      throw new TypeError(
-        'At least one of max, maxSize, or ttl is required'
-      )
-    }
-    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
-      const code = 'LRU_CACHE_UNBOUNDED'
-      if (shouldWarn(code)) {
-        warned.add(code)
-        const msg =
-          'TTL caching without ttlAutopurge, max, or maxSize can ' +
-          'result in unbounded memory consumption.'
-        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
-      }
-    }
-
-    if (stale) {
-      deprecatedOption('stale', 'allowStale')
-    }
-    if (maxAge) {
-      deprecatedOption('maxAge', 'ttl')
-    }
-    if (length) {
-      deprecatedOption('length', 'sizeCalculation')
-    }
-  }
-
-  getRemainingTTL(key) {
-    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
-  }
-
-  initializeTTLTracking() {
-    this.ttls = new ZeroArray(this.max)
-    this.starts = new ZeroArray(this.max)
-
-    this.setItemTTL = (index, ttl, start = perf.now()) => {
-      this.starts[index] = ttl !== 0 ? start : 0
-      this.ttls[index] = ttl
-      if (ttl !== 0 && this.ttlAutopurge) {
-        const t = setTimeout(() => {
-          if (this.isStale(index)) {
-            this.delete(this.keyList[index])
-          }
-        }, ttl + 1)
-        /* istanbul ignore else - unref() not supported on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-    }
-
-    this.updateItemAge = index => {
-      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
-    }
-
-    this.statusTTL = (status, index) => {
-      if (status) {
-        status.ttl = this.ttls[index]
-        status.start = this.starts[index]
-        status.now = cachedNow || getNow()
-        status.remainingTTL = status.now + status.ttl - status.start
-      }
-    }
-
-    // debounce calls to perf.now() to 1s so we're not hitting
-    // that costly call repeatedly.
-    let cachedNow = 0
-    const getNow = () => {
-      const n = perf.now()
-      if (this.ttlResolution > 0) {
-        cachedNow = n
-        const t = setTimeout(
-          () => (cachedNow = 0),
-          this.ttlResolution
-        )
-        /* istanbul ignore else - not available on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-      return n
-    }
-
-    this.getRemainingTTL = key => {
-      const index = this.keyMap.get(key)
-      if (index === undefined) {
-        return 0
-      }
-      return this.ttls[index] === 0 || this.starts[index] === 0
-        ? Infinity
-        : this.starts[index] +
-            this.ttls[index] -
-            (cachedNow || getNow())
-    }
-
-    this.isStale = index => {
-      return (
-        this.ttls[index] !== 0 &&
-        this.starts[index] !== 0 &&
-        (cachedNow || getNow()) - this.starts[index] >
-          this.ttls[index]
-      )
-    }
-  }
-  updateItemAge(_index) {}
-  statusTTL(_status, _index) {}
-  setItemTTL(_index, _ttl, _start) {}
-  isStale(_index) {
-    return false
-  }
-
-  initializeSizeTracking() {
-    this.calculatedSize = 0
-    this.sizes = new ZeroArray(this.max)
-    this.removeItemSize = index => {
-      this.calculatedSize -= this.sizes[index]
-      this.sizes[index] = 0
-    }
-    this.requireSize = (k, v, size, sizeCalculation) => {
-      // provisionally accept background fetches.
-      // actual value size will be checked when they return.
-      if (this.isBackgroundFetch(v)) {
-        return 0
-      }
-      if (!isPosInt(size)) {
-        if (sizeCalculation) {
-          if (typeof sizeCalculation !== 'function') {
-            throw new TypeError('sizeCalculation must be a function')
-          }
-          size = sizeCalculation(v, k)
-          if (!isPosInt(size)) {
-            throw new TypeError(
-              'sizeCalculation return invalid (expect positive integer)'
-            )
-          }
-        } else {
-          throw new TypeError(
-            'invalid size value (must be positive integer). ' +
-              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
-              'must be set.'
-          )
-        }
-      }
-      return size
-    }
-    this.addItemSize = (index, size, status) => {
-      this.sizes[index] = size
-      if (this.maxSize) {
-        const maxSize = this.maxSize - this.sizes[index]
-        while (this.calculatedSize > maxSize) {
-          this.evict(true)
-        }
-      }
-      this.calculatedSize += this.sizes[index]
-      if (status) {
-        status.entrySize = size
-        status.totalCalculatedSize = this.calculatedSize
-      }
-    }
-  }
-  removeItemSize(_index) {}
-  addItemSize(_index, _size) {}
-  requireSize(_k, _v, size, sizeCalculation) {
-    if (size || sizeCalculation) {
-      throw new TypeError(
-        'cannot set size without setting maxSize or maxEntrySize on cache'
-      )
-    }
-  }
-
-  *indexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.tail; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.head) {
-          break
-        } else {
-          i = this.prev[i]
-        }
-      }
-    }
-  }
-
-  *rindexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.head; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.tail) {
-          break
-        } else {
-          i = this.next[i]
-        }
-      }
-    }
-  }
-
-  isValidIndex(index) {
-    return (
-      index !== undefined &&
-      this.keyMap.get(this.keyList[index]) === index
-    )
-  }
-
-  *entries() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-  *rentries() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-
-  *keys() {
-    for (const i of this.indexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-  *rkeys() {
-    for (const i of this.rindexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-
-  *values() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-  *rvalues() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-
-  [Symbol.iterator]() {
-    return this.entries()
-  }
-
-  find(fn, getOptions) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      if (fn(value, this.keyList[i], this)) {
-        return this.get(this.keyList[i], getOptions)
-      }
-    }
-  }
-
-  forEach(fn, thisp = this) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  rforEach(fn, thisp = this) {
-    for (const i of this.rindexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  get prune() {
-    deprecatedMethod('prune', 'purgeStale')
-    return this.purgeStale
-  }
-
-  purgeStale() {
-    let deleted = false
-    for (const i of this.rindexes({ allowStale: true })) {
-      if (this.isStale(i)) {
-        this.delete(this.keyList[i])
-        deleted = true
-      }
-    }
-    return deleted
-  }
-
-  dump() {
-    const arr = []
-    for (const i of this.indexes({ allowStale: true })) {
-      const key = this.keyList[i]
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      const entry = { value }
-      if (this.ttls) {
-        entry.ttl = this.ttls[i]
-        // always dump the start relative to a portable timestamp
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = perf.now() - this.starts[i]
-        entry.start = Math.floor(Date.now() - age)
-      }
-      if (this.sizes) {
-        entry.size = this.sizes[i]
-      }
-      arr.unshift([key, entry])
-    }
-    return arr
-  }
-
-  load(arr) {
-    this.clear()
-    for (const [key, entry] of arr) {
-      if (entry.start) {
-        // entry.start is a portable timestamp, but we may be using
-        // node's performance.now(), so calculate the offset.
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = Date.now() - entry.start
-        entry.start = perf.now() - age
-      }
-      this.set(key, entry.value, entry)
-    }
-  }
-
-  dispose(_v, _k, _reason) {}
-
-  set(
-    k,
-    v,
-    {
-      ttl = this.ttl,
-      start,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      status,
-    } = {}
-  ) {
-    size = this.requireSize(k, v, size, sizeCalculation)
-    // if the item doesn't fit, don't do anything
-    // NB: maxEntrySize set to maxSize by default
-    if (this.maxEntrySize && size > this.maxEntrySize) {
-      if (status) {
-        status.set = 'miss'
-        status.maxEntrySizeExceeded = true
-      }
-      // have to delete, in case a background fetch is there already.
-      // in non-async cases, this is a no-op
-      this.delete(k)
-      return this
-    }
-    let index = this.size === 0 ? undefined : this.keyMap.get(k)
-    if (index === undefined) {
-      // addition
-      index = this.newIndex()
-      this.keyList[index] = k
-      this.valList[index] = v
-      this.keyMap.set(k, index)
-      this.next[this.tail] = index
-      this.prev[index] = this.tail
-      this.tail = index
-      this.size++
-      this.addItemSize(index, size, status)
-      if (status) {
-        status.set = 'add'
-      }
-      noUpdateTTL = false
-    } else {
-      // update
-      this.moveToTail(index)
-      const oldVal = this.valList[index]
-      if (v !== oldVal) {
-        if (this.isBackgroundFetch(oldVal)) {
-          oldVal.__abortController.abort(new Error('replaced'))
-        } else {
-          if (!noDisposeOnSet) {
-            this.dispose(oldVal, k, 'set')
-            if (this.disposeAfter) {
-              this.disposed.push([oldVal, k, 'set'])
-            }
-          }
-        }
-        this.removeItemSize(index)
-        this.valList[index] = v
-        this.addItemSize(index, size, status)
-        if (status) {
-          status.set = 'replace'
-          const oldValue =
-            oldVal && this.isBackgroundFetch(oldVal)
-              ? oldVal.__staleWhileFetching
-              : oldVal
-          if (oldValue !== undefined) status.oldValue = oldValue
-        }
-      } else if (status) {
-        status.set = 'update'
-      }
-    }
-    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
-      this.initializeTTLTracking()
-    }
-    if (!noUpdateTTL) {
-      this.setItemTTL(index, ttl, start)
-    }
-    this.statusTTL(status, index)
-    if (this.disposeAfter) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return this
-  }
-
-  newIndex() {
-    if (this.size === 0) {
-      return this.tail
-    }
-    if (this.size === this.max && this.max !== 0) {
-      return this.evict(false)
-    }
-    if (this.free.length !== 0) {
-      return this.free.pop()
-    }
-    // initial fill, just keep writing down the list
-    return this.initialFill++
-  }
-
-  pop() {
-    if (this.size) {
-      const val = this.valList[this.head]
-      this.evict(true)
-      return val
-    }
-  }
-
-  evict(free) {
-    const head = this.head
-    const k = this.keyList[head]
-    const v = this.valList[head]
-    if (this.isBackgroundFetch(v)) {
-      v.__abortController.abort(new Error('evicted'))
-    } else {
-      this.dispose(v, k, 'evict')
-      if (this.disposeAfter) {
-        this.disposed.push([v, k, 'evict'])
-      }
-    }
-    this.removeItemSize(head)
-    // if we aren't about to use the index, then null these out
-    if (free) {
-      this.keyList[head] = null
-      this.valList[head] = null
-      this.free.push(head)
-    }
-    this.head = this.next[head]
-    this.keyMap.delete(k)
-    this.size--
-    return head
-  }
-
-  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      if (!this.isStale(index)) {
-        if (updateAgeOnHas) {
-          this.updateItemAge(index)
-        }
-        if (status) status.has = 'hit'
-        this.statusTTL(status, index)
-        return true
-      } else if (status) {
-        status.has = 'stale'
-        this.statusTTL(status, index)
-      }
-    } else if (status) {
-      status.has = 'miss'
-    }
-    return false
-  }
-
-  // like get(), but without any LRU updating or TTL expiration
-  peek(k, { allowStale = this.allowStale } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined && (allowStale || !this.isStale(index))) {
-      const v = this.valList[index]
-      // either stale and allowed, or forcing a refresh of non-stale value
-      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
-    }
-  }
-
-  backgroundFetch(k, index, options, context) {
-    const v = index === undefined ? undefined : this.valList[index]
-    if (this.isBackgroundFetch(v)) {
-      return v
-    }
-    const ac = new AC()
-    if (options.signal) {
-      options.signal.addEventListener('abort', () =>
-        ac.abort(options.signal.reason)
-      )
-    }
-    const fetchOpts = {
-      signal: ac.signal,
-      options,
-      context,
-    }
-    const cb = (v, updateCache = false) => {
-      const { aborted } = ac.signal
-      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
-      if (options.status) {
-        if (aborted && !updateCache) {
-          options.status.fetchAborted = true
-          options.status.fetchError = ac.signal.reason
-          if (ignoreAbort) options.status.fetchAbortIgnored = true
-        } else {
-          options.status.fetchResolved = true
-        }
-      }
-      if (aborted && !ignoreAbort && !updateCache) {
-        return fetchFail(ac.signal.reason)
-      }
-      // either we didn't abort, and are still here, or we did, and ignored
-      if (this.valList[index] === p) {
-        if (v === undefined) {
-          if (p.__staleWhileFetching) {
-            this.valList[index] = p.__staleWhileFetching
-          } else {
-            this.delete(k)
-          }
-        } else {
-          if (options.status) options.status.fetchUpdated = true
-          this.set(k, v, fetchOpts.options)
-        }
-      }
-      return v
-    }
-    const eb = er => {
-      if (options.status) {
-        options.status.fetchRejected = true
-        options.status.fetchError = er
-      }
-      return fetchFail(er)
-    }
-    const fetchFail = er => {
-      const { aborted } = ac.signal
-      const allowStaleAborted =
-        aborted && options.allowStaleOnFetchAbort
-      const allowStale =
-        allowStaleAborted || options.allowStaleOnFetchRejection
-      const noDelete = allowStale || options.noDeleteOnFetchRejection
-      if (this.valList[index] === p) {
-        // if we allow stale on fetch rejections, then we need to ensure that
-        // the stale value is not removed from the cache when the fetch fails.
-        const del = !noDelete || p.__staleWhileFetching === undefined
-        if (del) {
-          this.delete(k)
-        } else if (!allowStaleAborted) {
-          // still replace the *promise* with the stale value,
-          // since we are done with the promise at this point.
-          // leave it untouched if we're still waiting for an
-          // aborted background fetch that hasn't yet returned.
-          this.valList[index] = p.__staleWhileFetching
-        }
-      }
-      if (allowStale) {
-        if (options.status && p.__staleWhileFetching !== undefined) {
-          options.status.returnedStale = true
-        }
-        return p.__staleWhileFetching
-      } else if (p.__returned === p) {
-        throw er
-      }
-    }
-    const pcall = (res, rej) => {
-      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
-      // ignored, we go until we finish, regardless.
-      // defer check until we are actually aborting,
-      // so fetchMethod can override.
-      ac.signal.addEventListener('abort', () => {
-        if (
-          !options.ignoreFetchAbort ||
-          options.allowStaleOnFetchAbort
-        ) {
-          res()
-          // when it eventually resolves, update the cache.
-          if (options.allowStaleOnFetchAbort) {
-            res = v => cb(v, true)
-          }
-        }
-      })
-    }
-    if (options.status) options.status.fetchDispatched = true
-    const p = new Promise(pcall).then(cb, eb)
-    p.__abortController = ac
-    p.__staleWhileFetching = v
-    p.__returned = null
-    if (index === undefined) {
-      // internal, don't expose status.
-      this.set(k, p, { ...fetchOpts.options, status: undefined })
-      index = this.keyMap.get(k)
-    } else {
-      this.valList[index] = p
-    }
-    return p
-  }
-
-  isBackgroundFetch(p) {
-    return (
-      p &&
-      typeof p === 'object' &&
-      typeof p.then === 'function' &&
-      Object.prototype.hasOwnProperty.call(
-        p,
-        '__staleWhileFetching'
-      ) &&
-      Object.prototype.hasOwnProperty.call(p, '__returned') &&
-      (p.__returned === p || p.__returned === null)
-    )
-  }
-
-  // this takes the union of get() and set() opts, because it does both
-  async fetch(
-    k,
-    {
-      // get options
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      // set options
-      ttl = this.ttl,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      // fetch exclusive options
-      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
-      ignoreFetchAbort = this.ignoreFetchAbort,
-      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
-      fetchContext = this.fetchContext,
-      forceRefresh = false,
-      status,
-      signal,
-    } = {}
-  ) {
-    if (!this.fetchMethod) {
-      if (status) status.fetch = 'get'
-      return this.get(k, {
-        allowStale,
-        updateAgeOnGet,
-        noDeleteOnStaleGet,
-        status,
-      })
-    }
-
-    const options = {
-      allowStale,
-      updateAgeOnGet,
-      noDeleteOnStaleGet,
-      ttl,
-      noDisposeOnSet,
-      size,
-      sizeCalculation,
-      noUpdateTTL,
-      noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-      status,
-      signal,
-    }
-
-    let index = this.keyMap.get(k)
-    if (index === undefined) {
-      if (status) status.fetch = 'miss'
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      return (p.__returned = p)
-    } else {
-      // in cache, maybe already fetching
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        const stale =
-          allowStale && v.__staleWhileFetching !== undefined
-        if (status) {
-          status.fetch = 'inflight'
-          if (stale) status.returnedStale = true
-        }
-        return stale ? v.__staleWhileFetching : (v.__returned = v)
-      }
-
-      // if we force a refresh, that means do NOT serve the cached value,
-      // unless we are already in the process of refreshing the cache.
-      const isStale = this.isStale(index)
-      if (!forceRefresh && !isStale) {
-        if (status) status.fetch = 'hit'
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        this.statusTTL(status, index)
-        return v
-      }
-
-      // ok, it is stale or a forced refresh, and not already fetching.
-      // refresh the cache.
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      const hasStale = p.__staleWhileFetching !== undefined
-      const staleVal = hasStale && allowStale
-      if (status) {
-        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
-        if (staleVal && isStale) status.returnedStale = true
-      }
-      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
-    }
-  }
-
-  get(
-    k,
-    {
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      status,
-    } = {}
-  ) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      const value = this.valList[index]
-      const fetching = this.isBackgroundFetch(value)
-      this.statusTTL(status, index)
-      if (this.isStale(index)) {
-        if (status) status.get = 'stale'
-        // delete only if not an in-flight background fetch
-        if (!fetching) {
-          if (!noDeleteOnStaleGet) {
-            this.delete(k)
-          }
-          if (status) status.returnedStale = allowStale
-          return allowStale ? value : undefined
-        } else {
-          if (status) {
-            status.returnedStale =
-              allowStale && value.__staleWhileFetching !== undefined
-          }
-          return allowStale ? value.__staleWhileFetching : undefined
-        }
-      } else {
-        if (status) status.get = 'hit'
-        // if we're currently fetching it, we don't actually have it yet
-        // it's not stale, which means this isn't a staleWhileRefetching.
-        // If it's not stale, and fetching, AND has a __staleWhileFetching
-        // value, then that means the user fetched with {forceRefresh:true},
-        // so it's safe to return that value.
-        if (fetching) {
-          return value.__staleWhileFetching
-        }
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        return value
-      }
-    } else if (status) {
-      status.get = 'miss'
-    }
-  }
-
-  connect(p, n) {
-    this.prev[n] = p
-    this.next[p] = n
-  }
-
-  moveToTail(index) {
-    // if tail already, nothing to do
-    // if head, move head to next[index]
-    // else
-    //   move next[prev[index]] to next[index] (head has no prev)
-    //   move prev[next[index]] to prev[index]
-    // prev[index] = tail
-    // next[tail] = index
-    // tail = index
-    if (index !== this.tail) {
-      if (index === this.head) {
-        this.head = this.next[index]
-      } else {
-        this.connect(this.prev[index], this.next[index])
-      }
-      this.connect(this.tail, index)
-      this.tail = index
-    }
-  }
-
-  get del() {
-    deprecatedMethod('del', 'delete')
-    return this.delete
-  }
-
-  delete(k) {
-    let deleted = false
-    if (this.size !== 0) {
-      const index = this.keyMap.get(k)
-      if (index !== undefined) {
-        deleted = true
-        if (this.size === 1) {
-          this.clear()
-        } else {
-          this.removeItemSize(index)
-          const v = this.valList[index]
-          if (this.isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('deleted'))
-          } else {
-            this.dispose(v, k, 'delete')
-            if (this.disposeAfter) {
-              this.disposed.push([v, k, 'delete'])
-            }
-          }
-          this.keyMap.delete(k)
-          this.keyList[index] = null
-          this.valList[index] = null
-          if (index === this.tail) {
-            this.tail = this.prev[index]
-          } else if (index === this.head) {
-            this.head = this.next[index]
-          } else {
-            this.next[this.prev[index]] = this.next[index]
-            this.prev[this.next[index]] = this.prev[index]
-          }
-          this.size--
-          this.free.push(index)
-        }
-      }
-    }
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return deleted
-  }
-
-  clear() {
-    for (const index of this.rindexes({ allowStale: true })) {
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        v.__abortController.abort(new Error('deleted'))
-      } else {
-        const k = this.keyList[index]
-        this.dispose(v, k, 'delete')
-        if (this.disposeAfter) {
-          this.disposed.push([v, k, 'delete'])
-        }
-      }
-    }
-
-    this.keyMap.clear()
-    this.valList.fill(null)
-    this.keyList.fill(null)
-    if (this.ttls) {
-      this.ttls.fill(0)
-      this.starts.fill(0)
-    }
-    if (this.sizes) {
-      this.sizes.fill(0)
-    }
-    this.head = 0
-    this.tail = 0
-    this.initialFill = 1
-    this.free.length = 0
-    this.calculatedSize = 0
-    this.size = 0
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-  }
-
-  get reset() {
-    deprecatedMethod('reset', 'clear')
-    return this.clear
-  }
-
-  get length() {
-    deprecatedProperty('length', 'size')
-    return this.size
-  }
-
-  static get AbortController() {
-    return AC
-  }
-  static get AbortSignal() {
-    return AS
-  }
-}
-
-module.exports = LRUCache
diff --git a/node_modules/tuf-js/node_modules/lru-cache/index.mjs b/node_modules/tuf-js/node_modules/lru-cache/index.mjs
deleted file mode 100644
index 4a0b4813ec515..0000000000000
--- a/node_modules/tuf-js/node_modules/lru-cache/index.mjs
+++ /dev/null
@@ -1,1227 +0,0 @@
-const perf =
-  typeof performance === 'object' &&
-  performance &&
-  typeof performance.now === 'function'
-    ? performance
-    : Date
-
-const hasAbortController = typeof AbortController === 'function'
-
-// minimal backwards-compatibility polyfill
-// this doesn't have nearly all the checks and whatnot that
-// actual AbortController/Signal has, but it's enough for
-// our purposes, and if used properly, behaves the same.
-const AC = hasAbortController
-  ? AbortController
-  : class AbortController {
-      constructor() {
-        this.signal = new AS()
-      }
-      abort(reason = new Error('This operation was aborted')) {
-        this.signal.reason = this.signal.reason || reason
-        this.signal.aborted = true
-        this.signal.dispatchEvent({
-          type: 'abort',
-          target: this.signal,
-        })
-      }
-    }
-
-const hasAbortSignal = typeof AbortSignal === 'function'
-// Some polyfills put this on the AC class, not global
-const hasACAbortSignal = typeof AC.AbortSignal === 'function'
-const AS = hasAbortSignal
-  ? AbortSignal
-  : hasACAbortSignal
-  ? AC.AbortController
-  : class AbortSignal {
-      constructor() {
-        this.reason = undefined
-        this.aborted = false
-        this._listeners = []
-      }
-      dispatchEvent(e) {
-        if (e.type === 'abort') {
-          this.aborted = true
-          this.onabort(e)
-          this._listeners.forEach(f => f(e), this)
-        }
-      }
-      onabort() {}
-      addEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners.push(fn)
-        }
-      }
-      removeEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners = this._listeners.filter(f => f !== fn)
-        }
-      }
-    }
-
-const warned = new Set()
-const deprecatedOption = (opt, instead) => {
-  const code = `LRU_CACHE_OPTION_${opt}`
-  if (shouldWarn(code)) {
-    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
-  }
-}
-const deprecatedMethod = (method, instead) => {
-  const code = `LRU_CACHE_METHOD_${method}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
-    warn(code, `${method} method`, `cache.${instead}()`, get)
-  }
-}
-const deprecatedProperty = (field, instead) => {
-  const code = `LRU_CACHE_PROPERTY_${field}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
-    warn(code, `${field} property`, `cache.${instead}`, get)
-  }
-}
-
-const emitWarning = (...a) => {
-  typeof process === 'object' &&
-  process &&
-  typeof process.emitWarning === 'function'
-    ? process.emitWarning(...a)
-    : console.error(...a)
-}
-
-const shouldWarn = code => !warned.has(code)
-
-const warn = (code, what, instead, fn) => {
-  warned.add(code)
-  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
-  emitWarning(msg, 'DeprecationWarning', code, fn)
-}
-
-const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
-
-/* istanbul ignore next - This is a little bit ridiculous, tbh.
- * The maximum array length is 2^32-1 or thereabouts on most JS impls.
- * And well before that point, you're caching the entire world, I mean,
- * that's ~32GB of just integers for the next/prev links, plus whatever
- * else to hold that many keys and values.  Just filling the memory with
- * zeroes at init time is brutal when you get that big.
- * But why not be complete?
- * Maybe in the future, these limits will have expanded. */
-const getUintArray = max =>
-  !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-    ? Uint8Array
-    : max <= Math.pow(2, 16)
-    ? Uint16Array
-    : max <= Math.pow(2, 32)
-    ? Uint32Array
-    : max <= Number.MAX_SAFE_INTEGER
-    ? ZeroArray
-    : null
-
-class ZeroArray extends Array {
-  constructor(size) {
-    super(size)
-    this.fill(0)
-  }
-}
-
-class Stack {
-  constructor(max) {
-    if (max === 0) {
-      return []
-    }
-    const UintArray = getUintArray(max)
-    this.heap = new UintArray(max)
-    this.length = 0
-  }
-  push(n) {
-    this.heap[this.length++] = n
-  }
-  pop() {
-    return this.heap[--this.length]
-  }
-}
-
-class LRUCache {
-  constructor(options = {}) {
-    const {
-      max = 0,
-      ttl,
-      ttlResolution = 1,
-      ttlAutopurge,
-      updateAgeOnGet,
-      updateAgeOnHas,
-      allowStale,
-      dispose,
-      disposeAfter,
-      noDisposeOnSet,
-      noUpdateTTL,
-      maxSize = 0,
-      maxEntrySize = 0,
-      sizeCalculation,
-      fetchMethod,
-      fetchContext,
-      noDeleteOnFetchRejection,
-      noDeleteOnStaleGet,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-    } = options
-
-    // deprecated options, don't trigger a warning for getting them if
-    // the thing being passed in is another LRUCache we're copying.
-    const { length, maxAge, stale } =
-      options instanceof LRUCache ? {} : options
-
-    if (max !== 0 && !isPosInt(max)) {
-      throw new TypeError('max option must be a nonnegative integer')
-    }
-
-    const UintArray = max ? getUintArray(max) : Array
-    if (!UintArray) {
-      throw new Error('invalid max value: ' + max)
-    }
-
-    this.max = max
-    this.maxSize = maxSize
-    this.maxEntrySize = maxEntrySize || this.maxSize
-    this.sizeCalculation = sizeCalculation || length
-    if (this.sizeCalculation) {
-      if (!this.maxSize && !this.maxEntrySize) {
-        throw new TypeError(
-          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
-        )
-      }
-      if (typeof this.sizeCalculation !== 'function') {
-        throw new TypeError('sizeCalculation set to non-function')
-      }
-    }
-
-    this.fetchMethod = fetchMethod || null
-    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
-      throw new TypeError(
-        'fetchMethod must be a function if specified'
-      )
-    }
-
-    this.fetchContext = fetchContext
-    if (!this.fetchMethod && fetchContext !== undefined) {
-      throw new TypeError(
-        'cannot set fetchContext without fetchMethod'
-      )
-    }
-
-    this.keyMap = new Map()
-    this.keyList = new Array(max).fill(null)
-    this.valList = new Array(max).fill(null)
-    this.next = new UintArray(max)
-    this.prev = new UintArray(max)
-    this.head = 0
-    this.tail = 0
-    this.free = new Stack(max)
-    this.initialFill = 1
-    this.size = 0
-
-    if (typeof dispose === 'function') {
-      this.dispose = dispose
-    }
-    if (typeof disposeAfter === 'function') {
-      this.disposeAfter = disposeAfter
-      this.disposed = []
-    } else {
-      this.disposeAfter = null
-      this.disposed = null
-    }
-    this.noDisposeOnSet = !!noDisposeOnSet
-    this.noUpdateTTL = !!noUpdateTTL
-    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
-    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
-    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
-    this.ignoreFetchAbort = !!ignoreFetchAbort
-
-    // NB: maxEntrySize is set to maxSize if it's set
-    if (this.maxEntrySize !== 0) {
-      if (this.maxSize !== 0) {
-        if (!isPosInt(this.maxSize)) {
-          throw new TypeError(
-            'maxSize must be a positive integer if specified'
-          )
-        }
-      }
-      if (!isPosInt(this.maxEntrySize)) {
-        throw new TypeError(
-          'maxEntrySize must be a positive integer if specified'
-        )
-      }
-      this.initializeSizeTracking()
-    }
-
-    this.allowStale = !!allowStale || !!stale
-    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
-    this.updateAgeOnGet = !!updateAgeOnGet
-    this.updateAgeOnHas = !!updateAgeOnHas
-    this.ttlResolution =
-      isPosInt(ttlResolution) || ttlResolution === 0
-        ? ttlResolution
-        : 1
-    this.ttlAutopurge = !!ttlAutopurge
-    this.ttl = ttl || maxAge || 0
-    if (this.ttl) {
-      if (!isPosInt(this.ttl)) {
-        throw new TypeError(
-          'ttl must be a positive integer if specified'
-        )
-      }
-      this.initializeTTLTracking()
-    }
-
-    // do not allow completely unbounded caches
-    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
-      throw new TypeError(
-        'At least one of max, maxSize, or ttl is required'
-      )
-    }
-    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
-      const code = 'LRU_CACHE_UNBOUNDED'
-      if (shouldWarn(code)) {
-        warned.add(code)
-        const msg =
-          'TTL caching without ttlAutopurge, max, or maxSize can ' +
-          'result in unbounded memory consumption.'
-        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
-      }
-    }
-
-    if (stale) {
-      deprecatedOption('stale', 'allowStale')
-    }
-    if (maxAge) {
-      deprecatedOption('maxAge', 'ttl')
-    }
-    if (length) {
-      deprecatedOption('length', 'sizeCalculation')
-    }
-  }
-
-  getRemainingTTL(key) {
-    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
-  }
-
-  initializeTTLTracking() {
-    this.ttls = new ZeroArray(this.max)
-    this.starts = new ZeroArray(this.max)
-
-    this.setItemTTL = (index, ttl, start = perf.now()) => {
-      this.starts[index] = ttl !== 0 ? start : 0
-      this.ttls[index] = ttl
-      if (ttl !== 0 && this.ttlAutopurge) {
-        const t = setTimeout(() => {
-          if (this.isStale(index)) {
-            this.delete(this.keyList[index])
-          }
-        }, ttl + 1)
-        /* istanbul ignore else - unref() not supported on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-    }
-
-    this.updateItemAge = index => {
-      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
-    }
-
-    this.statusTTL = (status, index) => {
-      if (status) {
-        status.ttl = this.ttls[index]
-        status.start = this.starts[index]
-        status.now = cachedNow || getNow()
-        status.remainingTTL = status.now + status.ttl - status.start
-      }
-    }
-
-    // debounce calls to perf.now() to 1s so we're not hitting
-    // that costly call repeatedly.
-    let cachedNow = 0
-    const getNow = () => {
-      const n = perf.now()
-      if (this.ttlResolution > 0) {
-        cachedNow = n
-        const t = setTimeout(
-          () => (cachedNow = 0),
-          this.ttlResolution
-        )
-        /* istanbul ignore else - not available on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-      return n
-    }
-
-    this.getRemainingTTL = key => {
-      const index = this.keyMap.get(key)
-      if (index === undefined) {
-        return 0
-      }
-      return this.ttls[index] === 0 || this.starts[index] === 0
-        ? Infinity
-        : this.starts[index] +
-            this.ttls[index] -
-            (cachedNow || getNow())
-    }
-
-    this.isStale = index => {
-      return (
-        this.ttls[index] !== 0 &&
-        this.starts[index] !== 0 &&
-        (cachedNow || getNow()) - this.starts[index] >
-          this.ttls[index]
-      )
-    }
-  }
-  updateItemAge(_index) {}
-  statusTTL(_status, _index) {}
-  setItemTTL(_index, _ttl, _start) {}
-  isStale(_index) {
-    return false
-  }
-
-  initializeSizeTracking() {
-    this.calculatedSize = 0
-    this.sizes = new ZeroArray(this.max)
-    this.removeItemSize = index => {
-      this.calculatedSize -= this.sizes[index]
-      this.sizes[index] = 0
-    }
-    this.requireSize = (k, v, size, sizeCalculation) => {
-      // provisionally accept background fetches.
-      // actual value size will be checked when they return.
-      if (this.isBackgroundFetch(v)) {
-        return 0
-      }
-      if (!isPosInt(size)) {
-        if (sizeCalculation) {
-          if (typeof sizeCalculation !== 'function') {
-            throw new TypeError('sizeCalculation must be a function')
-          }
-          size = sizeCalculation(v, k)
-          if (!isPosInt(size)) {
-            throw new TypeError(
-              'sizeCalculation return invalid (expect positive integer)'
-            )
-          }
-        } else {
-          throw new TypeError(
-            'invalid size value (must be positive integer). ' +
-              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
-              'must be set.'
-          )
-        }
-      }
-      return size
-    }
-    this.addItemSize = (index, size, status) => {
-      this.sizes[index] = size
-      if (this.maxSize) {
-        const maxSize = this.maxSize - this.sizes[index]
-        while (this.calculatedSize > maxSize) {
-          this.evict(true)
-        }
-      }
-      this.calculatedSize += this.sizes[index]
-      if (status) {
-        status.entrySize = size
-        status.totalCalculatedSize = this.calculatedSize
-      }
-    }
-  }
-  removeItemSize(_index) {}
-  addItemSize(_index, _size) {}
-  requireSize(_k, _v, size, sizeCalculation) {
-    if (size || sizeCalculation) {
-      throw new TypeError(
-        'cannot set size without setting maxSize or maxEntrySize on cache'
-      )
-    }
-  }
-
-  *indexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.tail; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.head) {
-          break
-        } else {
-          i = this.prev[i]
-        }
-      }
-    }
-  }
-
-  *rindexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.head; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.tail) {
-          break
-        } else {
-          i = this.next[i]
-        }
-      }
-    }
-  }
-
-  isValidIndex(index) {
-    return (
-      index !== undefined &&
-      this.keyMap.get(this.keyList[index]) === index
-    )
-  }
-
-  *entries() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-  *rentries() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-
-  *keys() {
-    for (const i of this.indexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-  *rkeys() {
-    for (const i of this.rindexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-
-  *values() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-  *rvalues() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-
-  [Symbol.iterator]() {
-    return this.entries()
-  }
-
-  find(fn, getOptions) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      if (fn(value, this.keyList[i], this)) {
-        return this.get(this.keyList[i], getOptions)
-      }
-    }
-  }
-
-  forEach(fn, thisp = this) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  rforEach(fn, thisp = this) {
-    for (const i of this.rindexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  get prune() {
-    deprecatedMethod('prune', 'purgeStale')
-    return this.purgeStale
-  }
-
-  purgeStale() {
-    let deleted = false
-    for (const i of this.rindexes({ allowStale: true })) {
-      if (this.isStale(i)) {
-        this.delete(this.keyList[i])
-        deleted = true
-      }
-    }
-    return deleted
-  }
-
-  dump() {
-    const arr = []
-    for (const i of this.indexes({ allowStale: true })) {
-      const key = this.keyList[i]
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      const entry = { value }
-      if (this.ttls) {
-        entry.ttl = this.ttls[i]
-        // always dump the start relative to a portable timestamp
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = perf.now() - this.starts[i]
-        entry.start = Math.floor(Date.now() - age)
-      }
-      if (this.sizes) {
-        entry.size = this.sizes[i]
-      }
-      arr.unshift([key, entry])
-    }
-    return arr
-  }
-
-  load(arr) {
-    this.clear()
-    for (const [key, entry] of arr) {
-      if (entry.start) {
-        // entry.start is a portable timestamp, but we may be using
-        // node's performance.now(), so calculate the offset.
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = Date.now() - entry.start
-        entry.start = perf.now() - age
-      }
-      this.set(key, entry.value, entry)
-    }
-  }
-
-  dispose(_v, _k, _reason) {}
-
-  set(
-    k,
-    v,
-    {
-      ttl = this.ttl,
-      start,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      status,
-    } = {}
-  ) {
-    size = this.requireSize(k, v, size, sizeCalculation)
-    // if the item doesn't fit, don't do anything
-    // NB: maxEntrySize set to maxSize by default
-    if (this.maxEntrySize && size > this.maxEntrySize) {
-      if (status) {
-        status.set = 'miss'
-        status.maxEntrySizeExceeded = true
-      }
-      // have to delete, in case a background fetch is there already.
-      // in non-async cases, this is a no-op
-      this.delete(k)
-      return this
-    }
-    let index = this.size === 0 ? undefined : this.keyMap.get(k)
-    if (index === undefined) {
-      // addition
-      index = this.newIndex()
-      this.keyList[index] = k
-      this.valList[index] = v
-      this.keyMap.set(k, index)
-      this.next[this.tail] = index
-      this.prev[index] = this.tail
-      this.tail = index
-      this.size++
-      this.addItemSize(index, size, status)
-      if (status) {
-        status.set = 'add'
-      }
-      noUpdateTTL = false
-    } else {
-      // update
-      this.moveToTail(index)
-      const oldVal = this.valList[index]
-      if (v !== oldVal) {
-        if (this.isBackgroundFetch(oldVal)) {
-          oldVal.__abortController.abort(new Error('replaced'))
-        } else {
-          if (!noDisposeOnSet) {
-            this.dispose(oldVal, k, 'set')
-            if (this.disposeAfter) {
-              this.disposed.push([oldVal, k, 'set'])
-            }
-          }
-        }
-        this.removeItemSize(index)
-        this.valList[index] = v
-        this.addItemSize(index, size, status)
-        if (status) {
-          status.set = 'replace'
-          const oldValue =
-            oldVal && this.isBackgroundFetch(oldVal)
-              ? oldVal.__staleWhileFetching
-              : oldVal
-          if (oldValue !== undefined) status.oldValue = oldValue
-        }
-      } else if (status) {
-        status.set = 'update'
-      }
-    }
-    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
-      this.initializeTTLTracking()
-    }
-    if (!noUpdateTTL) {
-      this.setItemTTL(index, ttl, start)
-    }
-    this.statusTTL(status, index)
-    if (this.disposeAfter) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return this
-  }
-
-  newIndex() {
-    if (this.size === 0) {
-      return this.tail
-    }
-    if (this.size === this.max && this.max !== 0) {
-      return this.evict(false)
-    }
-    if (this.free.length !== 0) {
-      return this.free.pop()
-    }
-    // initial fill, just keep writing down the list
-    return this.initialFill++
-  }
-
-  pop() {
-    if (this.size) {
-      const val = this.valList[this.head]
-      this.evict(true)
-      return val
-    }
-  }
-
-  evict(free) {
-    const head = this.head
-    const k = this.keyList[head]
-    const v = this.valList[head]
-    if (this.isBackgroundFetch(v)) {
-      v.__abortController.abort(new Error('evicted'))
-    } else {
-      this.dispose(v, k, 'evict')
-      if (this.disposeAfter) {
-        this.disposed.push([v, k, 'evict'])
-      }
-    }
-    this.removeItemSize(head)
-    // if we aren't about to use the index, then null these out
-    if (free) {
-      this.keyList[head] = null
-      this.valList[head] = null
-      this.free.push(head)
-    }
-    this.head = this.next[head]
-    this.keyMap.delete(k)
-    this.size--
-    return head
-  }
-
-  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      if (!this.isStale(index)) {
-        if (updateAgeOnHas) {
-          this.updateItemAge(index)
-        }
-        if (status) status.has = 'hit'
-        this.statusTTL(status, index)
-        return true
-      } else if (status) {
-        status.has = 'stale'
-        this.statusTTL(status, index)
-      }
-    } else if (status) {
-      status.has = 'miss'
-    }
-    return false
-  }
-
-  // like get(), but without any LRU updating or TTL expiration
-  peek(k, { allowStale = this.allowStale } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined && (allowStale || !this.isStale(index))) {
-      const v = this.valList[index]
-      // either stale and allowed, or forcing a refresh of non-stale value
-      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
-    }
-  }
-
-  backgroundFetch(k, index, options, context) {
-    const v = index === undefined ? undefined : this.valList[index]
-    if (this.isBackgroundFetch(v)) {
-      return v
-    }
-    const ac = new AC()
-    if (options.signal) {
-      options.signal.addEventListener('abort', () =>
-        ac.abort(options.signal.reason)
-      )
-    }
-    const fetchOpts = {
-      signal: ac.signal,
-      options,
-      context,
-    }
-    const cb = (v, updateCache = false) => {
-      const { aborted } = ac.signal
-      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
-      if (options.status) {
-        if (aborted && !updateCache) {
-          options.status.fetchAborted = true
-          options.status.fetchError = ac.signal.reason
-          if (ignoreAbort) options.status.fetchAbortIgnored = true
-        } else {
-          options.status.fetchResolved = true
-        }
-      }
-      if (aborted && !ignoreAbort && !updateCache) {
-        return fetchFail(ac.signal.reason)
-      }
-      // either we didn't abort, and are still here, or we did, and ignored
-      if (this.valList[index] === p) {
-        if (v === undefined) {
-          if (p.__staleWhileFetching) {
-            this.valList[index] = p.__staleWhileFetching
-          } else {
-            this.delete(k)
-          }
-        } else {
-          if (options.status) options.status.fetchUpdated = true
-          this.set(k, v, fetchOpts.options)
-        }
-      }
-      return v
-    }
-    const eb = er => {
-      if (options.status) {
-        options.status.fetchRejected = true
-        options.status.fetchError = er
-      }
-      return fetchFail(er)
-    }
-    const fetchFail = er => {
-      const { aborted } = ac.signal
-      const allowStaleAborted =
-        aborted && options.allowStaleOnFetchAbort
-      const allowStale =
-        allowStaleAborted || options.allowStaleOnFetchRejection
-      const noDelete = allowStale || options.noDeleteOnFetchRejection
-      if (this.valList[index] === p) {
-        // if we allow stale on fetch rejections, then we need to ensure that
-        // the stale value is not removed from the cache when the fetch fails.
-        const del = !noDelete || p.__staleWhileFetching === undefined
-        if (del) {
-          this.delete(k)
-        } else if (!allowStaleAborted) {
-          // still replace the *promise* with the stale value,
-          // since we are done with the promise at this point.
-          // leave it untouched if we're still waiting for an
-          // aborted background fetch that hasn't yet returned.
-          this.valList[index] = p.__staleWhileFetching
-        }
-      }
-      if (allowStale) {
-        if (options.status && p.__staleWhileFetching !== undefined) {
-          options.status.returnedStale = true
-        }
-        return p.__staleWhileFetching
-      } else if (p.__returned === p) {
-        throw er
-      }
-    }
-    const pcall = (res, rej) => {
-      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
-      // ignored, we go until we finish, regardless.
-      // defer check until we are actually aborting,
-      // so fetchMethod can override.
-      ac.signal.addEventListener('abort', () => {
-        if (
-          !options.ignoreFetchAbort ||
-          options.allowStaleOnFetchAbort
-        ) {
-          res()
-          // when it eventually resolves, update the cache.
-          if (options.allowStaleOnFetchAbort) {
-            res = v => cb(v, true)
-          }
-        }
-      })
-    }
-    if (options.status) options.status.fetchDispatched = true
-    const p = new Promise(pcall).then(cb, eb)
-    p.__abortController = ac
-    p.__staleWhileFetching = v
-    p.__returned = null
-    if (index === undefined) {
-      // internal, don't expose status.
-      this.set(k, p, { ...fetchOpts.options, status: undefined })
-      index = this.keyMap.get(k)
-    } else {
-      this.valList[index] = p
-    }
-    return p
-  }
-
-  isBackgroundFetch(p) {
-    return (
-      p &&
-      typeof p === 'object' &&
-      typeof p.then === 'function' &&
-      Object.prototype.hasOwnProperty.call(
-        p,
-        '__staleWhileFetching'
-      ) &&
-      Object.prototype.hasOwnProperty.call(p, '__returned') &&
-      (p.__returned === p || p.__returned === null)
-    )
-  }
-
-  // this takes the union of get() and set() opts, because it does both
-  async fetch(
-    k,
-    {
-      // get options
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      // set options
-      ttl = this.ttl,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      // fetch exclusive options
-      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
-      ignoreFetchAbort = this.ignoreFetchAbort,
-      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
-      fetchContext = this.fetchContext,
-      forceRefresh = false,
-      status,
-      signal,
-    } = {}
-  ) {
-    if (!this.fetchMethod) {
-      if (status) status.fetch = 'get'
-      return this.get(k, {
-        allowStale,
-        updateAgeOnGet,
-        noDeleteOnStaleGet,
-        status,
-      })
-    }
-
-    const options = {
-      allowStale,
-      updateAgeOnGet,
-      noDeleteOnStaleGet,
-      ttl,
-      noDisposeOnSet,
-      size,
-      sizeCalculation,
-      noUpdateTTL,
-      noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-      status,
-      signal,
-    }
-
-    let index = this.keyMap.get(k)
-    if (index === undefined) {
-      if (status) status.fetch = 'miss'
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      return (p.__returned = p)
-    } else {
-      // in cache, maybe already fetching
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        const stale =
-          allowStale && v.__staleWhileFetching !== undefined
-        if (status) {
-          status.fetch = 'inflight'
-          if (stale) status.returnedStale = true
-        }
-        return stale ? v.__staleWhileFetching : (v.__returned = v)
-      }
-
-      // if we force a refresh, that means do NOT serve the cached value,
-      // unless we are already in the process of refreshing the cache.
-      const isStale = this.isStale(index)
-      if (!forceRefresh && !isStale) {
-        if (status) status.fetch = 'hit'
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        this.statusTTL(status, index)
-        return v
-      }
-
-      // ok, it is stale or a forced refresh, and not already fetching.
-      // refresh the cache.
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      const hasStale = p.__staleWhileFetching !== undefined
-      const staleVal = hasStale && allowStale
-      if (status) {
-        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
-        if (staleVal && isStale) status.returnedStale = true
-      }
-      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
-    }
-  }
-
-  get(
-    k,
-    {
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      status,
-    } = {}
-  ) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      const value = this.valList[index]
-      const fetching = this.isBackgroundFetch(value)
-      this.statusTTL(status, index)
-      if (this.isStale(index)) {
-        if (status) status.get = 'stale'
-        // delete only if not an in-flight background fetch
-        if (!fetching) {
-          if (!noDeleteOnStaleGet) {
-            this.delete(k)
-          }
-          if (status) status.returnedStale = allowStale
-          return allowStale ? value : undefined
-        } else {
-          if (status) {
-            status.returnedStale =
-              allowStale && value.__staleWhileFetching !== undefined
-          }
-          return allowStale ? value.__staleWhileFetching : undefined
-        }
-      } else {
-        if (status) status.get = 'hit'
-        // if we're currently fetching it, we don't actually have it yet
-        // it's not stale, which means this isn't a staleWhileRefetching.
-        // If it's not stale, and fetching, AND has a __staleWhileFetching
-        // value, then that means the user fetched with {forceRefresh:true},
-        // so it's safe to return that value.
-        if (fetching) {
-          return value.__staleWhileFetching
-        }
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        return value
-      }
-    } else if (status) {
-      status.get = 'miss'
-    }
-  }
-
-  connect(p, n) {
-    this.prev[n] = p
-    this.next[p] = n
-  }
-
-  moveToTail(index) {
-    // if tail already, nothing to do
-    // if head, move head to next[index]
-    // else
-    //   move next[prev[index]] to next[index] (head has no prev)
-    //   move prev[next[index]] to prev[index]
-    // prev[index] = tail
-    // next[tail] = index
-    // tail = index
-    if (index !== this.tail) {
-      if (index === this.head) {
-        this.head = this.next[index]
-      } else {
-        this.connect(this.prev[index], this.next[index])
-      }
-      this.connect(this.tail, index)
-      this.tail = index
-    }
-  }
-
-  get del() {
-    deprecatedMethod('del', 'delete')
-    return this.delete
-  }
-
-  delete(k) {
-    let deleted = false
-    if (this.size !== 0) {
-      const index = this.keyMap.get(k)
-      if (index !== undefined) {
-        deleted = true
-        if (this.size === 1) {
-          this.clear()
-        } else {
-          this.removeItemSize(index)
-          const v = this.valList[index]
-          if (this.isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('deleted'))
-          } else {
-            this.dispose(v, k, 'delete')
-            if (this.disposeAfter) {
-              this.disposed.push([v, k, 'delete'])
-            }
-          }
-          this.keyMap.delete(k)
-          this.keyList[index] = null
-          this.valList[index] = null
-          if (index === this.tail) {
-            this.tail = this.prev[index]
-          } else if (index === this.head) {
-            this.head = this.next[index]
-          } else {
-            this.next[this.prev[index]] = this.next[index]
-            this.prev[this.next[index]] = this.prev[index]
-          }
-          this.size--
-          this.free.push(index)
-        }
-      }
-    }
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return deleted
-  }
-
-  clear() {
-    for (const index of this.rindexes({ allowStale: true })) {
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        v.__abortController.abort(new Error('deleted'))
-      } else {
-        const k = this.keyList[index]
-        this.dispose(v, k, 'delete')
-        if (this.disposeAfter) {
-          this.disposed.push([v, k, 'delete'])
-        }
-      }
-    }
-
-    this.keyMap.clear()
-    this.valList.fill(null)
-    this.keyList.fill(null)
-    if (this.ttls) {
-      this.ttls.fill(0)
-      this.starts.fill(0)
-    }
-    if (this.sizes) {
-      this.sizes.fill(0)
-    }
-    this.head = 0
-    this.tail = 0
-    this.initialFill = 1
-    this.free.length = 0
-    this.calculatedSize = 0
-    this.size = 0
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-  }
-
-  get reset() {
-    deprecatedMethod('reset', 'clear')
-    return this.clear
-  }
-
-  get length() {
-    deprecatedProperty('length', 'size')
-    return this.size
-  }
-
-  static get AbortController() {
-    return AC
-  }
-  static get AbortSignal() {
-    return AS
-  }
-}
-
-export default LRUCache
diff --git a/node_modules/tuf-js/node_modules/lru-cache/package.json b/node_modules/tuf-js/node_modules/lru-cache/package.json
deleted file mode 100644
index 9684991727e7a..0000000000000
--- a/node_modules/tuf-js/node_modules/lru-cache/package.json
+++ /dev/null
@@ -1,96 +0,0 @@
-{
-  "name": "lru-cache",
-  "description": "A cache object that deletes the least-recently-used items.",
-  "version": "7.18.3",
-  "author": "Isaac Z. Schlueter ",
-  "keywords": [
-    "mru",
-    "lru",
-    "cache"
-  ],
-  "sideEffects": false,
-  "scripts": {
-    "build": "npm run prepare",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "prepare": "node ./scripts/transpile-to-esm.js",
-    "size": "size-limit",
-    "test": "tap",
-    "snap": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write .",
-    "typedoc": "typedoc ./index.d.ts"
-  },
-  "type": "commonjs",
-  "main": "./index.js",
-  "module": "./index.mjs",
-  "types": "./index.d.ts",
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./index.d.ts",
-        "default": "./index.mjs"
-      },
-      "require": {
-        "types": "./index.d.ts",
-        "default": "./index.js"
-      }
-    },
-    "./package.json": "./package.json"
-  },
-  "repository": "git://github.com/isaacs/node-lru-cache.git",
-  "devDependencies": {
-    "@size-limit/preset-small-lib": "^7.0.8",
-    "@types/node": "^17.0.31",
-    "@types/tap": "^15.0.6",
-    "benchmark": "^2.1.4",
-    "c8": "^7.11.2",
-    "clock-mock": "^1.0.6",
-    "eslint-config-prettier": "^8.5.0",
-    "prettier": "^2.6.2",
-    "size-limit": "^7.0.8",
-    "tap": "^16.3.4",
-    "ts-node": "^10.7.0",
-    "tslib": "^2.4.0",
-    "typedoc": "^0.23.24",
-    "typescript": "^4.6.4"
-  },
-  "license": "ISC",
-  "files": [
-    "index.js",
-    "index.mjs",
-    "index.d.ts"
-  ],
-  "engines": {
-    "node": ">=12"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tap": {
-    "nyc-arg": [
-      "--include=index.js"
-    ],
-    "node-arg": [
-      "--expose-gc",
-      "--require",
-      "ts-node/register"
-    ],
-    "ts": false
-  },
-  "size-limit": [
-    {
-      "path": "./index.js"
-    }
-  ]
-}
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE b/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE
deleted file mode 100644
index 1808eb2844231..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright 2017-2022 (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/agent.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/agent.js
deleted file mode 100644
index dd68492ed7ea7..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/agent.js
+++ /dev/null
@@ -1,214 +0,0 @@
-'use strict'
-const LRU = require('lru-cache')
-const url = require('url')
-const isLambda = require('is-lambda')
-const dns = require('./dns.js')
-
-const AGENT_CACHE = new LRU({ max: 50 })
-const HttpAgent = require('agentkeepalive')
-const HttpsAgent = HttpAgent.HttpsAgent
-
-module.exports = getAgent
-
-const getAgentTimeout = timeout =>
-  typeof timeout !== 'number' || !timeout ? 0 : timeout + 1
-
-const getMaxSockets = maxSockets => maxSockets || 15
-
-function getAgent (uri, opts) {
-  const parsedUri = new url.URL(typeof uri === 'string' ? uri : uri.url)
-  const isHttps = parsedUri.protocol === 'https:'
-  const pxuri = getProxyUri(parsedUri.href, opts)
-
-  // If opts.timeout is zero, set the agentTimeout to zero as well. A timeout
-  // of zero disables the timeout behavior (OS limits still apply). Else, if
-  // opts.timeout is a non-zero value, set it to timeout + 1, to ensure that
-  // the node-fetch-npm timeout will always fire first, giving us more
-  // consistent errors.
-  const agentTimeout = getAgentTimeout(opts.timeout)
-  const agentMaxSockets = getMaxSockets(opts.maxSockets)
-
-  const key = [
-    `https:${isHttps}`,
-    pxuri
-      ? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}`
-      : '>no-proxy<',
-    `local-address:${opts.localAddress || '>no-local-address<'}`,
-    `strict-ssl:${isHttps ? opts.rejectUnauthorized : '>no-strict-ssl<'}`,
-    `ca:${(isHttps && opts.ca) || '>no-ca<'}`,
-    `cert:${(isHttps && opts.cert) || '>no-cert<'}`,
-    `key:${(isHttps && opts.key) || '>no-key<'}`,
-    `timeout:${agentTimeout}`,
-    `maxSockets:${agentMaxSockets}`,
-  ].join(':')
-
-  if (opts.agent != null) { // `agent: false` has special behavior!
-    return opts.agent
-  }
-
-  // keep alive in AWS lambda makes no sense
-  const lambdaAgent = !isLambda ? null
-    : isHttps ? require('https').globalAgent
-    : require('http').globalAgent
-
-  if (isLambda && !pxuri) {
-    return lambdaAgent
-  }
-
-  if (AGENT_CACHE.peek(key)) {
-    return AGENT_CACHE.get(key)
-  }
-
-  if (pxuri) {
-    const pxopts = isLambda ? {
-      ...opts,
-      agent: lambdaAgent,
-    } : opts
-    const proxy = getProxy(pxuri, pxopts, isHttps)
-    AGENT_CACHE.set(key, proxy)
-    return proxy
-  }
-
-  const agent = isHttps ? new HttpsAgent({
-    maxSockets: agentMaxSockets,
-    ca: opts.ca,
-    cert: opts.cert,
-    key: opts.key,
-    localAddress: opts.localAddress,
-    rejectUnauthorized: opts.rejectUnauthorized,
-    timeout: agentTimeout,
-    freeSocketTimeout: 15000,
-    lookup: dns.getLookup(opts.dns),
-  }) : new HttpAgent({
-    maxSockets: agentMaxSockets,
-    localAddress: opts.localAddress,
-    timeout: agentTimeout,
-    freeSocketTimeout: 15000,
-    lookup: dns.getLookup(opts.dns),
-  })
-  AGENT_CACHE.set(key, agent)
-  return agent
-}
-
-function checkNoProxy (uri, opts) {
-  const host = new url.URL(uri).hostname.split('.').reverse()
-  let noproxy = (opts.noProxy || getProcessEnv('no_proxy'))
-  if (typeof noproxy === 'string') {
-    noproxy = noproxy.split(',').map(n => n.trim())
-  }
-
-  return noproxy && noproxy.some(no => {
-    const noParts = no.split('.').filter(x => x).reverse()
-    if (!noParts.length) {
-      return false
-    }
-    for (let i = 0; i < noParts.length; i++) {
-      if (host[i] !== noParts[i]) {
-        return false
-      }
-    }
-    return true
-  })
-}
-
-module.exports.getProcessEnv = getProcessEnv
-
-function getProcessEnv (env) {
-  if (!env) {
-    return
-  }
-
-  let value
-
-  if (Array.isArray(env)) {
-    for (const e of env) {
-      value = process.env[e] ||
-        process.env[e.toUpperCase()] ||
-        process.env[e.toLowerCase()]
-      if (typeof value !== 'undefined') {
-        break
-      }
-    }
-  }
-
-  if (typeof env === 'string') {
-    value = process.env[env] ||
-      process.env[env.toUpperCase()] ||
-      process.env[env.toLowerCase()]
-  }
-
-  return value
-}
-
-module.exports.getProxyUri = getProxyUri
-function getProxyUri (uri, opts) {
-  const protocol = new url.URL(uri).protocol
-
-  const proxy = opts.proxy ||
-    (
-      protocol === 'https:' &&
-      getProcessEnv('https_proxy')
-    ) ||
-    (
-      protocol === 'http:' &&
-      getProcessEnv(['https_proxy', 'http_proxy', 'proxy'])
-    )
-  if (!proxy) {
-    return null
-  }
-
-  const parsedProxy = (typeof proxy === 'string') ? new url.URL(proxy) : proxy
-
-  return !checkNoProxy(uri, opts) && parsedProxy
-}
-
-const getAuth = u =>
-  u.username && u.password ? decodeURIComponent(`${u.username}:${u.password}`)
-  : u.username ? decodeURIComponent(u.username)
-  : null
-
-const getPath = u => u.pathname + u.search + u.hash
-
-const HttpProxyAgent = require('http-proxy-agent')
-const HttpsProxyAgent = require('https-proxy-agent')
-const { SocksProxyAgent } = require('socks-proxy-agent')
-module.exports.getProxy = getProxy
-function getProxy (proxyUrl, opts, isHttps) {
-  // our current proxy agents do not support an overridden dns lookup method, so will not
-  // benefit from the dns cache
-  const popts = {
-    host: proxyUrl.hostname,
-    port: proxyUrl.port,
-    protocol: proxyUrl.protocol,
-    path: getPath(proxyUrl),
-    auth: getAuth(proxyUrl),
-    ca: opts.ca,
-    cert: opts.cert,
-    key: opts.key,
-    timeout: getAgentTimeout(opts.timeout),
-    localAddress: opts.localAddress,
-    maxSockets: getMaxSockets(opts.maxSockets),
-    rejectUnauthorized: opts.rejectUnauthorized,
-  }
-
-  if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') {
-    if (!isHttps) {
-      return new HttpProxyAgent(popts)
-    } else {
-      return new HttpsProxyAgent(popts)
-    }
-  } else if (proxyUrl.protocol.startsWith('socks')) {
-    // socks-proxy-agent uses hostname not host
-    popts.hostname = popts.host
-    delete popts.host
-    return new SocksProxyAgent(popts)
-  } else {
-    throw Object.assign(
-      new Error(`unsupported proxy protocol: '${proxyUrl.protocol}'`),
-      {
-        code: 'EUNSUPPORTEDPROXY',
-        url: proxyUrl.href,
-      }
-    )
-  }
-}
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js
deleted file mode 100644
index 45141095074ec..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js
+++ /dev/null
@@ -1,469 +0,0 @@
-const { Request, Response } = require('minipass-fetch')
-const { Minipass } = require('minipass')
-const MinipassFlush = require('minipass-flush')
-const cacache = require('cacache')
-const url = require('url')
-
-const CachingMinipassPipeline = require('../pipeline.js')
-const CachePolicy = require('./policy.js')
-const cacheKey = require('./key.js')
-const remote = require('../remote.js')
-
-const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop)
-
-// allow list for request headers that will be written to the cache index
-// note: we will also store any request headers
-// that are named in a response's vary header
-const KEEP_REQUEST_HEADERS = [
-  'accept-charset',
-  'accept-encoding',
-  'accept-language',
-  'accept',
-  'cache-control',
-]
-
-// allow list for response headers that will be written to the cache index
-// note: we must not store the real response's age header, or when we load
-// a cache policy based on the metadata it will think the cached response
-// is always stale
-const KEEP_RESPONSE_HEADERS = [
-  'cache-control',
-  'content-encoding',
-  'content-language',
-  'content-type',
-  'date',
-  'etag',
-  'expires',
-  'last-modified',
-  'link',
-  'location',
-  'pragma',
-  'vary',
-]
-
-// return an object containing all metadata to be written to the index
-const getMetadata = (request, response, options) => {
-  const metadata = {
-    time: Date.now(),
-    url: request.url,
-    reqHeaders: {},
-    resHeaders: {},
-
-    // options on which we must match the request and vary the response
-    options: {
-      compress: options.compress != null ? options.compress : request.compress,
-    },
-  }
-
-  // only save the status if it's not a 200 or 304
-  if (response.status !== 200 && response.status !== 304) {
-    metadata.status = response.status
-  }
-
-  for (const name of KEEP_REQUEST_HEADERS) {
-    if (request.headers.has(name)) {
-      metadata.reqHeaders[name] = request.headers.get(name)
-    }
-  }
-
-  // if the request's host header differs from the host in the url
-  // we need to keep it, otherwise it's just noise and we ignore it
-  const host = request.headers.get('host')
-  const parsedUrl = new url.URL(request.url)
-  if (host && parsedUrl.host !== host) {
-    metadata.reqHeaders.host = host
-  }
-
-  // if the response has a vary header, make sure
-  // we store the relevant request headers too
-  if (response.headers.has('vary')) {
-    const vary = response.headers.get('vary')
-    // a vary of "*" means every header causes a different response.
-    // in that scenario, we do not include any additional headers
-    // as the freshness check will always fail anyway and we don't
-    // want to bloat the cache indexes
-    if (vary !== '*') {
-      // copy any other request headers that will vary the response
-      const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/)
-      for (const name of varyHeaders) {
-        if (request.headers.has(name)) {
-          metadata.reqHeaders[name] = request.headers.get(name)
-        }
-      }
-    }
-  }
-
-  for (const name of KEEP_RESPONSE_HEADERS) {
-    if (response.headers.has(name)) {
-      metadata.resHeaders[name] = response.headers.get(name)
-    }
-  }
-
-  for (const name of options.cacheAdditionalHeaders) {
-    if (response.headers.has(name)) {
-      metadata.resHeaders[name] = response.headers.get(name)
-    }
-  }
-
-  return metadata
-}
-
-// symbols used to hide objects that may be lazily evaluated in a getter
-const _request = Symbol('request')
-const _response = Symbol('response')
-const _policy = Symbol('policy')
-
-class CacheEntry {
-  constructor ({ entry, request, response, options }) {
-    if (entry) {
-      this.key = entry.key
-      this.entry = entry
-      // previous versions of this module didn't write an explicit timestamp in
-      // the metadata, so fall back to the entry's timestamp. we can't use the
-      // entry timestamp to determine staleness because cacache will update it
-      // when it verifies its data
-      this.entry.metadata.time = this.entry.metadata.time || this.entry.time
-    } else {
-      this.key = cacheKey(request)
-    }
-
-    this.options = options
-
-    // these properties are behind getters that lazily evaluate
-    this[_request] = request
-    this[_response] = response
-    this[_policy] = null
-  }
-
-  // returns a CacheEntry instance that satisfies the given request
-  // or undefined if no existing entry satisfies
-  static async find (request, options) {
-    try {
-      // compacts the index and returns an array of unique entries
-      var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => {
-        const entryA = new CacheEntry({ entry: A, options })
-        const entryB = new CacheEntry({ entry: B, options })
-        return entryA.policy.satisfies(entryB.request)
-      }, {
-        validateEntry: (entry) => {
-          // clean out entries with a buggy content-encoding value
-          if (entry.metadata &&
-              entry.metadata.resHeaders &&
-              entry.metadata.resHeaders['content-encoding'] === null) {
-            return false
-          }
-
-          // if an integrity is null, it needs to have a status specified
-          if (entry.integrity === null) {
-            return !!(entry.metadata && entry.metadata.status)
-          }
-
-          return true
-        },
-      })
-    } catch (err) {
-      // if the compact request fails, ignore the error and return
-      return
-    }
-
-    // a cache mode of 'reload' means to behave as though we have no cache
-    // on the way to the network. return undefined to allow cacheFetch to
-    // create a brand new request no matter what.
-    if (options.cache === 'reload') {
-      return
-    }
-
-    // find the specific entry that satisfies the request
-    let match
-    for (const entry of matches) {
-      const _entry = new CacheEntry({
-        entry,
-        options,
-      })
-
-      if (_entry.policy.satisfies(request)) {
-        match = _entry
-        break
-      }
-    }
-
-    return match
-  }
-
-  // if the user made a PUT/POST/PATCH then we invalidate our
-  // cache for the same url by deleting the index entirely
-  static async invalidate (request, options) {
-    const key = cacheKey(request)
-    try {
-      await cacache.rm.entry(options.cachePath, key, { removeFully: true })
-    } catch (err) {
-      // ignore errors
-    }
-  }
-
-  get request () {
-    if (!this[_request]) {
-      this[_request] = new Request(this.entry.metadata.url, {
-        method: 'GET',
-        headers: this.entry.metadata.reqHeaders,
-        ...this.entry.metadata.options,
-      })
-    }
-
-    return this[_request]
-  }
-
-  get response () {
-    if (!this[_response]) {
-      this[_response] = new Response(null, {
-        url: this.entry.metadata.url,
-        counter: this.options.counter,
-        status: this.entry.metadata.status || 200,
-        headers: {
-          ...this.entry.metadata.resHeaders,
-          'content-length': this.entry.size,
-        },
-      })
-    }
-
-    return this[_response]
-  }
-
-  get policy () {
-    if (!this[_policy]) {
-      this[_policy] = new CachePolicy({
-        entry: this.entry,
-        request: this.request,
-        response: this.response,
-        options: this.options,
-      })
-    }
-
-    return this[_policy]
-  }
-
-  // wraps the response in a pipeline that stores the data
-  // in the cache while the user consumes it
-  async store (status) {
-    // if we got a status other than 200, 301, or 308,
-    // or the CachePolicy forbid storage, append the
-    // cache status header and return it untouched
-    if (
-      this.request.method !== 'GET' ||
-      ![200, 301, 308].includes(this.response.status) ||
-      !this.policy.storable()
-    ) {
-      this.response.headers.set('x-local-cache-status', 'skip')
-      return this.response
-    }
-
-    const size = this.response.headers.get('content-length')
-    const cacheOpts = {
-      algorithms: this.options.algorithms,
-      metadata: getMetadata(this.request, this.response, this.options),
-      size,
-      integrity: this.options.integrity,
-      integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body,
-    }
-
-    let body = null
-    // we only set a body if the status is a 200, redirects are
-    // stored as metadata only
-    if (this.response.status === 200) {
-      let cacheWriteResolve, cacheWriteReject
-      const cacheWritePromise = new Promise((resolve, reject) => {
-        cacheWriteResolve = resolve
-        cacheWriteReject = reject
-      })
-
-      body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({
-        flush () {
-          return cacheWritePromise
-        },
-      }))
-      // this is always true since if we aren't reusing the one from the remote fetch, we
-      // are using the one from cacache
-      body.hasIntegrityEmitter = true
-
-      const onResume = () => {
-        const tee = new Minipass()
-        const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts)
-        // re-emit the integrity and size events on our new response body so they can be reused
-        cacheStream.on('integrity', i => body.emit('integrity', i))
-        cacheStream.on('size', s => body.emit('size', s))
-        // stick a flag on here so downstream users will know if they can expect integrity events
-        tee.pipe(cacheStream)
-        // TODO if the cache write fails, log a warning but return the response anyway
-        // eslint-disable-next-line promise/catch-or-return
-        cacheStream.promise().then(cacheWriteResolve, cacheWriteReject)
-        body.unshift(tee)
-        body.unshift(this.response.body)
-      }
-
-      body.once('resume', onResume)
-      body.once('end', () => body.removeListener('resume', onResume))
-    } else {
-      await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts)
-    }
-
-    // note: we do not set the x-local-cache-hash header because we do not know
-    // the hash value until after the write to the cache completes, which doesn't
-    // happen until after the response has been sent and it's too late to write
-    // the header anyway
-    this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
-    this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
-    this.response.headers.set('x-local-cache-mode', 'stream')
-    this.response.headers.set('x-local-cache-status', status)
-    this.response.headers.set('x-local-cache-time', new Date().toISOString())
-    const newResponse = new Response(body, {
-      url: this.response.url,
-      status: this.response.status,
-      headers: this.response.headers,
-      counter: this.options.counter,
-    })
-    return newResponse
-  }
-
-  // use the cached data to create a response and return it
-  async respond (method, options, status) {
-    let response
-    if (method === 'HEAD' || [301, 308].includes(this.response.status)) {
-      // if the request is a HEAD, or the response is a redirect,
-      // then the metadata in the entry already includes everything
-      // we need to build a response
-      response = this.response
-    } else {
-      // we're responding with a full cached response, so create a body
-      // that reads from cacache and attach it to a new Response
-      const body = new Minipass()
-      const headers = { ...this.policy.responseHeaders() }
-
-      const onResume = () => {
-        const cacheStream = cacache.get.stream.byDigest(
-          this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
-        )
-        cacheStream.on('error', async (err) => {
-          cacheStream.pause()
-          if (err.code === 'EINTEGRITY') {
-            await cacache.rm.content(
-              this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
-            )
-          }
-          if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') {
-            await CacheEntry.invalidate(this.request, this.options)
-          }
-          body.emit('error', err)
-          cacheStream.resume()
-        })
-        // emit the integrity and size events based on our metadata so we're consistent
-        body.emit('integrity', this.entry.integrity)
-        body.emit('size', Number(headers['content-length']))
-        cacheStream.pipe(body)
-      }
-
-      body.once('resume', onResume)
-      body.once('end', () => body.removeListener('resume', onResume))
-      response = new Response(body, {
-        url: this.entry.metadata.url,
-        counter: options.counter,
-        status: 200,
-        headers,
-      })
-    }
-
-    response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
-    response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity))
-    response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
-    response.headers.set('x-local-cache-mode', 'stream')
-    response.headers.set('x-local-cache-status', status)
-    response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString())
-    return response
-  }
-
-  // use the provided request along with this cache entry to
-  // revalidate the stored response. returns a response, either
-  // from the cache or from the update
-  async revalidate (request, options) {
-    const revalidateRequest = new Request(request, {
-      headers: this.policy.revalidationHeaders(request),
-    })
-
-    try {
-      // NOTE: be sure to remove the headers property from the
-      // user supplied options, since we have already defined
-      // them on the new request object. if they're still in the
-      // options then those will overwrite the ones from the policy
-      var response = await remote(revalidateRequest, {
-        ...options,
-        headers: undefined,
-      })
-    } catch (err) {
-      // if the network fetch fails, return the stale
-      // cached response unless it has a cache-control
-      // of 'must-revalidate'
-      if (!this.policy.mustRevalidate) {
-        return this.respond(request.method, options, 'stale')
-      }
-
-      throw err
-    }
-
-    if (this.policy.revalidated(revalidateRequest, response)) {
-      // we got a 304, write a new index to the cache and respond from cache
-      const metadata = getMetadata(request, response, options)
-      // 304 responses do not include headers that are specific to the response data
-      // since they do not include a body, so we copy values for headers that were
-      // in the old cache entry to the new one, if the new metadata does not already
-      // include that header
-      for (const name of KEEP_RESPONSE_HEADERS) {
-        if (
-          !hasOwnProperty(metadata.resHeaders, name) &&
-          hasOwnProperty(this.entry.metadata.resHeaders, name)
-        ) {
-          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
-        }
-      }
-
-      for (const name of options.cacheAdditionalHeaders) {
-        const inMeta = hasOwnProperty(metadata.resHeaders, name)
-        const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name)
-        const inPolicy = hasOwnProperty(this.policy.response.headers, name)
-
-        // if the header is in the existing entry, but it is not in the metadata
-        // then we need to write it to the metadata as this will refresh the on-disk cache
-        if (!inMeta && inEntry) {
-          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
-        }
-        // if the header is in the metadata, but not in the policy, then we need to set
-        // it in the policy so that it's included in the immediate response. future
-        // responses will load a new cache entry, so we don't need to change that
-        if (!inPolicy && inMeta) {
-          this.policy.response.headers[name] = metadata.resHeaders[name]
-        }
-      }
-
-      try {
-        await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, {
-          size: this.entry.size,
-          metadata,
-        })
-      } catch (err) {
-        // if updating the cache index fails, we ignore it and
-        // respond anyway
-      }
-      return this.respond(request.method, options, 'revalidated')
-    }
-
-    // if we got a modified response, create a new entry based on it
-    const newEntry = new CacheEntry({
-      request,
-      response,
-      options,
-    })
-
-    // respond with the new entry while writing it to the cache
-    return newEntry.store('updated')
-  }
-}
-
-module.exports = CacheEntry
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js
deleted file mode 100644
index 67a66573bebe6..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js
+++ /dev/null
@@ -1,11 +0,0 @@
-class NotCachedError extends Error {
-  constructor (url) {
-    /* eslint-disable-next-line max-len */
-    super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`)
-    this.code = 'ENOTCACHED'
-  }
-}
-
-module.exports = {
-  NotCachedError,
-}
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js
deleted file mode 100644
index 0de49d23fb933..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js
+++ /dev/null
@@ -1,49 +0,0 @@
-const { NotCachedError } = require('./errors.js')
-const CacheEntry = require('./entry.js')
-const remote = require('../remote.js')
-
-// do whatever is necessary to get a Response and return it
-const cacheFetch = async (request, options) => {
-  // try to find a cached entry that satisfies this request
-  const entry = await CacheEntry.find(request, options)
-  if (!entry) {
-    // no cached result, if the cache mode is 'only-if-cached' that's a failure
-    if (options.cache === 'only-if-cached') {
-      throw new NotCachedError(request.url)
-    }
-
-    // otherwise, we make a request, store it and return it
-    const response = await remote(request, options)
-    const newEntry = new CacheEntry({ request, response, options })
-    return newEntry.store('miss')
-  }
-
-  // we have a cached response that satisfies this request, however if the cache
-  // mode is 'no-cache' then we send the revalidation request no matter what
-  if (options.cache === 'no-cache') {
-    return entry.revalidate(request, options)
-  }
-
-  // if the cached entry is not stale, or if the cache mode is 'force-cache' or
-  // 'only-if-cached' we can respond with the cached entry. set the status
-  // based on the result of needsRevalidation and respond
-  const _needsRevalidation = entry.policy.needsRevalidation(request)
-  if (options.cache === 'force-cache' ||
-      options.cache === 'only-if-cached' ||
-      !_needsRevalidation) {
-    return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit')
-  }
-
-  // if we got here, the cache entry is stale so revalidate it
-  return entry.revalidate(request, options)
-}
-
-cacheFetch.invalidate = async (request, options) => {
-  if (!options.cachePath) {
-    return
-  }
-
-  return CacheEntry.invalidate(request, options)
-}
-
-module.exports = cacheFetch
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js
deleted file mode 100644
index f7684d562b7fa..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js
+++ /dev/null
@@ -1,17 +0,0 @@
-const { URL, format } = require('url')
-
-// options passed to url.format() when generating a key
-const formatOptions = {
-  auth: false,
-  fragment: false,
-  search: true,
-  unicode: false,
-}
-
-// returns a string to be used as the cache key for the Request
-const cacheKey = (request) => {
-  const parsed = new URL(request.url)
-  return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}`
-}
-
-module.exports = cacheKey
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js
deleted file mode 100644
index ada3c8600dae9..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js
+++ /dev/null
@@ -1,161 +0,0 @@
-const CacheSemantics = require('http-cache-semantics')
-const Negotiator = require('negotiator')
-const ssri = require('ssri')
-
-// options passed to http-cache-semantics constructor
-const policyOptions = {
-  shared: false,
-  ignoreCargoCult: true,
-}
-
-// a fake empty response, used when only testing the
-// request for storability
-const emptyResponse = { status: 200, headers: {} }
-
-// returns a plain object representation of the Request
-const requestObject = (request) => {
-  const _obj = {
-    method: request.method,
-    url: request.url,
-    headers: {},
-    compress: request.compress,
-  }
-
-  request.headers.forEach((value, key) => {
-    _obj.headers[key] = value
-  })
-
-  return _obj
-}
-
-// returns a plain object representation of the Response
-const responseObject = (response) => {
-  const _obj = {
-    status: response.status,
-    headers: {},
-  }
-
-  response.headers.forEach((value, key) => {
-    _obj.headers[key] = value
-  })
-
-  return _obj
-}
-
-class CachePolicy {
-  constructor ({ entry, request, response, options }) {
-    this.entry = entry
-    this.request = requestObject(request)
-    this.response = responseObject(response)
-    this.options = options
-    this.policy = new CacheSemantics(this.request, this.response, policyOptions)
-
-    if (this.entry) {
-      // if we have an entry, copy the timestamp to the _responseTime
-      // this is necessary because the CacheSemantics constructor forces
-      // the value to Date.now() which means a policy created from a
-      // cache entry is likely to always identify itself as stale
-      this.policy._responseTime = this.entry.metadata.time
-    }
-  }
-
-  // static method to quickly determine if a request alone is storable
-  static storable (request, options) {
-    // no cachePath means no caching
-    if (!options.cachePath) {
-      return false
-    }
-
-    // user explicitly asked not to cache
-    if (options.cache === 'no-store') {
-      return false
-    }
-
-    // we only cache GET and HEAD requests
-    if (!['GET', 'HEAD'].includes(request.method)) {
-      return false
-    }
-
-    // otherwise, let http-cache-semantics make the decision
-    // based on the request's headers
-    const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions)
-    return policy.storable()
-  }
-
-  // returns true if the policy satisfies the request
-  satisfies (request) {
-    const _req = requestObject(request)
-    if (this.request.headers.host !== _req.headers.host) {
-      return false
-    }
-
-    if (this.request.compress !== _req.compress) {
-      return false
-    }
-
-    const negotiatorA = new Negotiator(this.request)
-    const negotiatorB = new Negotiator(_req)
-
-    if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) {
-      return false
-    }
-
-    if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) {
-      return false
-    }
-
-    if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) {
-      return false
-    }
-
-    if (this.options.integrity) {
-      return ssri.parse(this.options.integrity).match(this.entry.integrity)
-    }
-
-    return true
-  }
-
-  // returns true if the request and response allow caching
-  storable () {
-    return this.policy.storable()
-  }
-
-  // NOTE: this is a hack to avoid parsing the cache-control
-  // header ourselves, it returns true if the response's
-  // cache-control contains must-revalidate
-  get mustRevalidate () {
-    return !!this.policy._rescc['must-revalidate']
-  }
-
-  // returns true if the cached response requires revalidation
-  // for the given request
-  needsRevalidation (request) {
-    const _req = requestObject(request)
-    // force method to GET because we only cache GETs
-    // but can serve a HEAD from a cached GET
-    _req.method = 'GET'
-    return !this.policy.satisfiesWithoutRevalidation(_req)
-  }
-
-  responseHeaders () {
-    return this.policy.responseHeaders()
-  }
-
-  // returns a new object containing the appropriate headers
-  // to send a revalidation request
-  revalidationHeaders (request) {
-    const _req = requestObject(request)
-    return this.policy.revalidationHeaders(_req)
-  }
-
-  // returns true if the request/response was revalidated
-  // successfully. returns false if a new response was received
-  revalidated (request, response) {
-    const _req = requestObject(request)
-    const _res = responseObject(response)
-    const policy = this.policy.revalidatedPolicy(_req, _res)
-    return !policy.modified
-  }
-}
-
-module.exports = CachePolicy
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/dns.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/dns.js
deleted file mode 100644
index 13102b57c4aa0..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/dns.js
+++ /dev/null
@@ -1,49 +0,0 @@
-const LRUCache = require('lru-cache')
-const dns = require('dns')
-
-const defaultOptions = exports.defaultOptions = {
-  family: undefined,
-  hints: dns.ADDRCONFIG,
-  all: false,
-  verbatim: undefined,
-}
-
-const lookupCache = exports.lookupCache = new LRUCache({ max: 50 })
-
-// this is a factory so that each request can have its own opts (i.e. ttl)
-// while still sharing the cache across all requests
-exports.getLookup = (dnsOptions) => {
-  return (hostname, options, callback) => {
-    if (typeof options === 'function') {
-      callback = options
-      options = null
-    } else if (typeof options === 'number') {
-      options = { family: options }
-    }
-
-    options = { ...defaultOptions, ...options }
-
-    const key = JSON.stringify({
-      hostname,
-      family: options.family,
-      hints: options.hints,
-      all: options.all,
-      verbatim: options.verbatim,
-    })
-
-    if (lookupCache.has(key)) {
-      const [address, family] = lookupCache.get(key)
-      process.nextTick(callback, null, address, family)
-      return
-    }
-
-    dnsOptions.lookup(hostname, options, (err, address, family) => {
-      if (err) {
-        return callback(err)
-      }
-
-      lookupCache.set(key, [address, family], { ttl: dnsOptions.ttl })
-      return callback(null, address, family)
-    })
-  }
-}
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js
deleted file mode 100644
index 233ba67e16550..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js
+++ /dev/null
@@ -1,118 +0,0 @@
-'use strict'
-
-const { FetchError, Request, isRedirect } = require('minipass-fetch')
-const url = require('url')
-
-const CachePolicy = require('./cache/policy.js')
-const cache = require('./cache/index.js')
-const remote = require('./remote.js')
-
-// given a Request, a Response and user options
-// return true if the response is a redirect that
-// can be followed. we throw errors that will result
-// in the fetch being rejected if the redirect is
-// possible but invalid for some reason
-const canFollowRedirect = (request, response, options) => {
-  if (!isRedirect(response.status)) {
-    return false
-  }
-
-  if (options.redirect === 'manual') {
-    return false
-  }
-
-  if (options.redirect === 'error') {
-    throw new FetchError(`redirect mode is set to error: ${request.url}`,
-      'no-redirect', { code: 'ENOREDIRECT' })
-  }
-
-  if (!response.headers.has('location')) {
-    throw new FetchError(`redirect location header missing for: ${request.url}`,
-      'no-location', { code: 'EINVALIDREDIRECT' })
-  }
-
-  if (request.counter >= request.follow) {
-    throw new FetchError(`maximum redirect reached at: ${request.url}`,
-      'max-redirect', { code: 'EMAXREDIRECT' })
-  }
-
-  return true
-}
-
-// given a Request, a Response, and the user's options return an object
-// with a new Request and a new options object that will be used for
-// following the redirect
-const getRedirect = (request, response, options) => {
-  const _opts = { ...options }
-  const location = response.headers.get('location')
-  const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url)
-  // Comment below is used under the following license:
-  /**
-   * @license
-   * Copyright (c) 2010-2012 Mikeal Rogers
-   * Licensed under the Apache License, Version 2.0 (the "License");
-   * you may not use this file except in compliance with the License.
-   * You may obtain a copy of the License at
-   * http://www.apache.org/licenses/LICENSE-2.0
-   * Unless required by applicable law or agreed to in writing,
-   * software distributed under the License is distributed on an "AS
-   * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
-   * express or implied. See the License for the specific language
-   * governing permissions and limitations under the License.
-   */
-
-  // Remove authorization if changing hostnames (but not if just
-  // changing ports or protocols).  This matches the behavior of request:
-  // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
-  if (new url.URL(request.url).hostname !== redirectUrl.hostname) {
-    request.headers.delete('authorization')
-    request.headers.delete('cookie')
-  }
-
-  // for POST request with 301/302 response, or any request with 303 response,
-  // use GET when following redirect
-  if (
-    response.status === 303 ||
-    (request.method === 'POST' && [301, 302].includes(response.status))
-  ) {
-    _opts.method = 'GET'
-    _opts.body = null
-    request.headers.delete('content-length')
-  }
-
-  _opts.headers = {}
-  request.headers.forEach((value, key) => {
-    _opts.headers[key] = value
-  })
-
-  _opts.counter = ++request.counter
-  const redirectReq = new Request(url.format(redirectUrl), _opts)
-  return {
-    request: redirectReq,
-    options: _opts,
-  }
-}
-
-const fetch = async (request, options) => {
-  const response = CachePolicy.storable(request, options)
-    ? await cache(request, options)
-    : await remote(request, options)
-
-  // if the request wasn't a GET or HEAD, and the response
-  // status is between 200 and 399 inclusive, invalidate the
-  // request url
-  if (!['GET', 'HEAD'].includes(request.method) &&
-      response.status >= 200 &&
-      response.status <= 399) {
-    await cache.invalidate(request, options)
-  }
-
-  if (!canFollowRedirect(request, response, options)) {
-    return response
-  }
-
-  const redirect = getRedirect(request, response, options)
-  return fetch(redirect.request, redirect.options)
-}
-
-module.exports = fetch
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js
deleted file mode 100644
index 2f12e8e1b6113..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js
+++ /dev/null
@@ -1,41 +0,0 @@
-const { FetchError, Headers, Request, Response } = require('minipass-fetch')
-
-const configureOptions = require('./options.js')
-const fetch = require('./fetch.js')
-
-const makeFetchHappen = (url, opts) => {
-  const options = configureOptions(opts)
-
-  const request = new Request(url, options)
-  return fetch(request, options)
-}
-
-makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => {
-  if (typeof defaultUrl === 'object') {
-    defaultOptions = defaultUrl
-    defaultUrl = null
-  }
-
-  const defaultedFetch = (url, options = {}) => {
-    const finalUrl = url || defaultUrl
-    const finalOptions = {
-      ...defaultOptions,
-      ...options,
-      headers: {
-        ...defaultOptions.headers,
-        ...options.headers,
-      },
-    }
-    return wrappedFetch(finalUrl, finalOptions)
-  }
-
-  defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) =>
-    makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch)
-  return defaultedFetch
-}
-
-module.exports = makeFetchHappen
-module.exports.FetchError = FetchError
-module.exports.Headers = Headers
-module.exports.Request = Request
-module.exports.Response = Response
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js
deleted file mode 100644
index f77511279f831..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js
+++ /dev/null
@@ -1,54 +0,0 @@
-const dns = require('dns')
-
-const conditionalHeaders = [
-  'if-modified-since',
-  'if-none-match',
-  'if-unmodified-since',
-  'if-match',
-  'if-range',
-]
-
-const configureOptions = (opts) => {
-  const { strictSSL, ...options } = { ...opts }
-  options.method = options.method ? options.method.toUpperCase() : 'GET'
-  options.rejectUnauthorized = strictSSL !== false
-
-  if (!options.retry) {
-    options.retry = { retries: 0 }
-  } else if (typeof options.retry === 'string') {
-    const retries = parseInt(options.retry, 10)
-    if (isFinite(retries)) {
-      options.retry = { retries }
-    } else {
-      options.retry = { retries: 0 }
-    }
-  } else if (typeof options.retry === 'number') {
-    options.retry = { retries: options.retry }
-  } else {
-    options.retry = { retries: 0, ...options.retry }
-  }
-
-  options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns }
-
-  options.cache = options.cache || 'default'
-  if (options.cache === 'default') {
-    const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => {
-      return conditionalHeaders.includes(name.toLowerCase())
-    })
-    if (hasConditionalHeader) {
-      options.cache = 'no-store'
-    }
-  }
-
-  options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || []
-
-  // cacheManager is deprecated, but if it's set and
-  // cachePath is not we should copy it to the new field
-  if (options.cacheManager && !options.cachePath) {
-    options.cachePath = options.cacheManager
-  }
-
-  return options
-}
-
-module.exports = configureOptions
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js
deleted file mode 100644
index b1d221b2d0ce3..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js
+++ /dev/null
@@ -1,41 +0,0 @@
-'use strict'
-
-const MinipassPipeline = require('minipass-pipeline')
-
-class CachingMinipassPipeline extends MinipassPipeline {
-  #events = []
-  #data = new Map()
-
-  constructor (opts, ...streams) {
-    // CRITICAL: do NOT pass the streams to the call to super(), this will start
-    // the flow of data and potentially cause the events we need to catch to emit
-    // before we've finished our own setup. instead we call super() with no args,
-    // finish our setup, and then push the streams into ourselves to start the
-    // data flow
-    super()
-    this.#events = opts.events
-
-    /* istanbul ignore next - coverage disabled because this is pointless to test here */
-    if (streams.length) {
-      this.push(...streams)
-    }
-  }
-
-  on (event, handler) {
-    if (this.#events.includes(event) && this.#data.has(event)) {
-      return handler(...this.#data.get(event))
-    }
-
-    return super.on(event, handler)
-  }
-
-  emit (event, ...data) {
-    if (this.#events.includes(event)) {
-      this.#data.set(event, data)
-    }
-
-    return super.emit(event, ...data)
-  }
-}
-
-module.exports = CachingMinipassPipeline
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js
deleted file mode 100644
index bdbcc79cad908..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js
+++ /dev/null
@@ -1,121 +0,0 @@
-const { Minipass } = require('minipass')
-const fetch = require('minipass-fetch')
-const promiseRetry = require('promise-retry')
-const ssri = require('ssri')
-
-const CachingMinipassPipeline = require('./pipeline.js')
-const getAgent = require('./agent.js')
-const pkg = require('../package.json')
-
-const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
-
-const RETRY_ERRORS = [
-  'ECONNRESET', // remote socket closed on us
-  'ECONNREFUSED', // remote host refused to open connection
-  'EADDRINUSE', // failed to bind to a local port (proxy?)
-  'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW
-  'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive
-  // Known codes we do NOT retry on:
-  // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
-]
-
-const RETRY_TYPES = [
-  'request-timeout',
-]
-
-// make a request directly to the remote source,
-// retrying certain classes of errors as well as
-// following redirects (through the cache if necessary)
-// and verifying response integrity
-const remoteFetch = (request, options) => {
-  const agent = getAgent(request.url, options)
-  if (!request.headers.has('connection')) {
-    request.headers.set('connection', agent ? 'keep-alive' : 'close')
-  }
-
-  if (!request.headers.has('user-agent')) {
-    request.headers.set('user-agent', USER_AGENT)
-  }
-
-  // keep our own options since we're overriding the agent
-  // and the redirect mode
-  const _opts = {
-    ...options,
-    agent,
-    redirect: 'manual',
-  }
-
-  return promiseRetry(async (retryHandler, attemptNum) => {
-    const req = new fetch.Request(request, _opts)
-    try {
-      let res = await fetch(req, _opts)
-      if (_opts.integrity && res.status === 200) {
-        // we got a 200 response and the user has specified an expected
-        // integrity value, so wrap the response in an ssri stream to verify it
-        const integrityStream = ssri.integrityStream({
-          algorithms: _opts.algorithms,
-          integrity: _opts.integrity,
-          size: _opts.size,
-        })
-        const pipeline = new CachingMinipassPipeline({
-          events: ['integrity', 'size'],
-        }, res.body, integrityStream)
-        // we also propagate the integrity and size events out to the pipeline so we can use
-        // this new response body as an integrityEmitter for cacache
-        integrityStream.on('integrity', i => pipeline.emit('integrity', i))
-        integrityStream.on('size', s => pipeline.emit('size', s))
-        res = new fetch.Response(pipeline, res)
-        // set an explicit flag so we know if our response body will emit integrity and size
-        res.body.hasIntegrityEmitter = true
-      }
-
-      res.headers.set('x-fetch-attempts', attemptNum)
-
-      // do not retry POST requests, or requests with a streaming body
-      // do retry requests with a 408, 420, 429 or 500+ status in the response
-      const isStream = Minipass.isStream(req.body)
-      const isRetriable = req.method !== 'POST' &&
-          !isStream &&
-          ([408, 420, 429].includes(res.status) || res.status >= 500)
-
-      if (isRetriable) {
-        if (typeof options.onRetry === 'function') {
-          options.onRetry(res)
-        }
-
-        return retryHandler(res)
-      }
-
-      return res
-    } catch (err) {
-      const code = (err.code === 'EPROMISERETRY')
-        ? err.retried.code
-        : err.code
-
-      // err.retried will be the thing that was thrown from above
-      // if it's a response, we just got a bad status code and we
-      // can re-throw to allow the retry
-      const isRetryError = err.retried instanceof fetch.Response ||
-        (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type))
-
-      if (req.method === 'POST' || isRetryError) {
-        throw err
-      }
-
-      if (typeof options.onRetry === 'function') {
-        options.onRetry(err)
-      }
-
-      return retryHandler(err)
-    }
-  }, options.retry).catch((err) => {
-    // don't reject for http errors, just return them
-    if (err.status >= 400 && err.type !== 'system') {
-      return err
-    }
-
-    throw err
-  })
-}
-
-module.exports = remoteFetch
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/package.json b/node_modules/tuf-js/node_modules/make-fetch-happen/package.json
deleted file mode 100644
index fd415dc9966fa..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/package.json
+++ /dev/null
@@ -1,78 +0,0 @@
-{
-  "name": "make-fetch-happen",
-  "version": "11.1.1",
-  "description": "Opinionated, caching, retrying fetch client",
-  "main": "lib/index.js",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "test": "tap",
-    "posttest": "npm run lint",
-    "eslint": "eslint",
-    "lint": "eslint \"**/*.js\"",
-    "lintfix": "npm run lint -- --fix",
-    "postlint": "template-oss-check",
-    "snap": "tap",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/make-fetch-happen.git"
-  },
-  "keywords": [
-    "http",
-    "request",
-    "fetch",
-    "mean girls",
-    "caching",
-    "cache",
-    "subresource integrity"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "dependencies": {
-    "agentkeepalive": "^4.2.1",
-    "cacache": "^17.0.0",
-    "http-cache-semantics": "^4.1.1",
-    "http-proxy-agent": "^5.0.0",
-    "https-proxy-agent": "^5.0.0",
-    "is-lambda": "^1.0.1",
-    "lru-cache": "^7.7.1",
-    "minipass": "^5.0.0",
-    "minipass-fetch": "^3.0.0",
-    "minipass-flush": "^1.0.5",
-    "minipass-pipeline": "^1.2.4",
-    "negotiator": "^0.6.3",
-    "promise-retry": "^2.0.1",
-    "socks-proxy-agent": "^7.0.0",
-    "ssri": "^10.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.14.1",
-    "nock": "^13.2.4",
-    "safe-buffer": "^5.2.1",
-    "standard-version": "^9.3.2",
-    "tap": "^16.0.0"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "tap": {
-    "color": 1,
-    "files": "test/*.js",
-    "check-coverage": true,
-    "timeout": 60,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.14.1",
-    "publish": "true"
-  }
-}
diff --git a/node_modules/tuf-js/node_modules/minipass/LICENSE b/node_modules/tuf-js/node_modules/minipass/LICENSE
deleted file mode 100644
index 97f8e32ed82e4..0000000000000
--- a/node_modules/tuf-js/node_modules/minipass/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tuf-js/node_modules/minipass/index.js b/node_modules/tuf-js/node_modules/minipass/index.js
deleted file mode 100644
index ed07c17acd97b..0000000000000
--- a/node_modules/tuf-js/node_modules/minipass/index.js
+++ /dev/null
@@ -1,702 +0,0 @@
-'use strict'
-const proc =
-  typeof process === 'object' && process
-    ? process
-    : {
-        stdout: null,
-        stderr: null,
-      }
-const EE = require('events')
-const Stream = require('stream')
-const stringdecoder = require('string_decoder')
-const SD = stringdecoder.StringDecoder
-
-const EOF = Symbol('EOF')
-const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
-const EMITTED_END = Symbol('emittedEnd')
-const EMITTING_END = Symbol('emittingEnd')
-const EMITTED_ERROR = Symbol('emittedError')
-const CLOSED = Symbol('closed')
-const READ = Symbol('read')
-const FLUSH = Symbol('flush')
-const FLUSHCHUNK = Symbol('flushChunk')
-const ENCODING = Symbol('encoding')
-const DECODER = Symbol('decoder')
-const FLOWING = Symbol('flowing')
-const PAUSED = Symbol('paused')
-const RESUME = Symbol('resume')
-const BUFFER = Symbol('buffer')
-const PIPES = Symbol('pipes')
-const BUFFERLENGTH = Symbol('bufferLength')
-const BUFFERPUSH = Symbol('bufferPush')
-const BUFFERSHIFT = Symbol('bufferShift')
-const OBJECTMODE = Symbol('objectMode')
-// internal event when stream is destroyed
-const DESTROYED = Symbol('destroyed')
-// internal event when stream has an error
-const ERROR = Symbol('error')
-const EMITDATA = Symbol('emitData')
-const EMITEND = Symbol('emitEnd')
-const EMITEND2 = Symbol('emitEnd2')
-const ASYNC = Symbol('async')
-const ABORT = Symbol('abort')
-const ABORTED = Symbol('aborted')
-const SIGNAL = Symbol('signal')
-
-const defer = fn => Promise.resolve().then(fn)
-
-// TODO remove when Node v8 support drops
-const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
-const ASYNCITERATOR =
-  (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented')
-const ITERATOR =
-  (doIter && Symbol.iterator) || Symbol('iterator not implemented')
-
-// events that mean 'the stream is over'
-// these are treated specially, and re-emitted
-// if they are listened for after emitting.
-const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish'
-
-const isArrayBuffer = b =>
-  b instanceof ArrayBuffer ||
-  (typeof b === 'object' &&
-    b.constructor &&
-    b.constructor.name === 'ArrayBuffer' &&
-    b.byteLength >= 0)
-
-const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
-
-class Pipe {
-  constructor(src, dest, opts) {
-    this.src = src
-    this.dest = dest
-    this.opts = opts
-    this.ondrain = () => src[RESUME]()
-    dest.on('drain', this.ondrain)
-  }
-  unpipe() {
-    this.dest.removeListener('drain', this.ondrain)
-  }
-  // istanbul ignore next - only here for the prototype
-  proxyErrors() {}
-  end() {
-    this.unpipe()
-    if (this.opts.end) this.dest.end()
-  }
-}
-
-class PipeProxyErrors extends Pipe {
-  unpipe() {
-    this.src.removeListener('error', this.proxyErrors)
-    super.unpipe()
-  }
-  constructor(src, dest, opts) {
-    super(src, dest, opts)
-    this.proxyErrors = er => dest.emit('error', er)
-    src.on('error', this.proxyErrors)
-  }
-}
-
-class Minipass extends Stream {
-  constructor(options) {
-    super()
-    this[FLOWING] = false
-    // whether we're explicitly paused
-    this[PAUSED] = false
-    this[PIPES] = []
-    this[BUFFER] = []
-    this[OBJECTMODE] = (options && options.objectMode) || false
-    if (this[OBJECTMODE]) this[ENCODING] = null
-    else this[ENCODING] = (options && options.encoding) || null
-    if (this[ENCODING] === 'buffer') this[ENCODING] = null
-    this[ASYNC] = (options && !!options.async) || false
-    this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
-    this[EOF] = false
-    this[EMITTED_END] = false
-    this[EMITTING_END] = false
-    this[CLOSED] = false
-    this[EMITTED_ERROR] = null
-    this.writable = true
-    this.readable = true
-    this[BUFFERLENGTH] = 0
-    this[DESTROYED] = false
-    if (options && options.debugExposeBuffer === true) {
-      Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] })
-    }
-    if (options && options.debugExposePipes === true) {
-      Object.defineProperty(this, 'pipes', { get: () => this[PIPES] })
-    }
-    this[SIGNAL] = options && options.signal
-    this[ABORTED] = false
-    if (this[SIGNAL]) {
-      this[SIGNAL].addEventListener('abort', () => this[ABORT]())
-      if (this[SIGNAL].aborted) {
-        this[ABORT]()
-      }
-    }
-  }
-
-  get bufferLength() {
-    return this[BUFFERLENGTH]
-  }
-
-  get encoding() {
-    return this[ENCODING]
-  }
-  set encoding(enc) {
-    if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode')
-
-    if (
-      this[ENCODING] &&
-      enc !== this[ENCODING] &&
-      ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH])
-    )
-      throw new Error('cannot change encoding')
-
-    if (this[ENCODING] !== enc) {
-      this[DECODER] = enc ? new SD(enc) : null
-      if (this[BUFFER].length)
-        this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk))
-    }
-
-    this[ENCODING] = enc
-  }
-
-  setEncoding(enc) {
-    this.encoding = enc
-  }
-
-  get objectMode() {
-    return this[OBJECTMODE]
-  }
-  set objectMode(om) {
-    this[OBJECTMODE] = this[OBJECTMODE] || !!om
-  }
-
-  get ['async']() {
-    return this[ASYNC]
-  }
-  set ['async'](a) {
-    this[ASYNC] = this[ASYNC] || !!a
-  }
-
-  // drop everything and get out of the flow completely
-  [ABORT]() {
-    this[ABORTED] = true
-    this.emit('abort', this[SIGNAL].reason)
-    this.destroy(this[SIGNAL].reason)
-  }
-
-  get aborted() {
-    return this[ABORTED]
-  }
-  set aborted(_) {}
-
-  write(chunk, encoding, cb) {
-    if (this[ABORTED]) return false
-    if (this[EOF]) throw new Error('write after end')
-
-    if (this[DESTROYED]) {
-      this.emit(
-        'error',
-        Object.assign(
-          new Error('Cannot call write after a stream was destroyed'),
-          { code: 'ERR_STREAM_DESTROYED' }
-        )
-      )
-      return true
-    }
-
-    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
-
-    if (!encoding) encoding = 'utf8'
-
-    const fn = this[ASYNC] ? defer : f => f()
-
-    // convert array buffers and typed array views into buffers
-    // at some point in the future, we may want to do the opposite!
-    // leave strings and buffers as-is
-    // anything else switches us into object mode
-    if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
-      if (isArrayBufferView(chunk))
-        chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
-      else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk)
-      else if (typeof chunk !== 'string')
-        // use the setter so we throw if we have encoding set
-        this.objectMode = true
-    }
-
-    // handle object mode up front, since it's simpler
-    // this yields better performance, fewer checks later.
-    if (this[OBJECTMODE]) {
-      /* istanbul ignore if - maybe impossible? */
-      if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
-
-      if (this.flowing) this.emit('data', chunk)
-      else this[BUFFERPUSH](chunk)
-
-      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
-
-      if (cb) fn(cb)
-
-      return this.flowing
-    }
-
-    // at this point the chunk is a buffer or string
-    // don't buffer it up or send it to the decoder
-    if (!chunk.length) {
-      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
-      if (cb) fn(cb)
-      return this.flowing
-    }
-
-    // fast-path writing strings of same encoding to a stream with
-    // an empty buffer, skipping the buffer/decoder dance
-    if (
-      typeof chunk === 'string' &&
-      // unless it is a string already ready for us to use
-      !(encoding === this[ENCODING] && !this[DECODER].lastNeed)
-    ) {
-      chunk = Buffer.from(chunk, encoding)
-    }
-
-    if (Buffer.isBuffer(chunk) && this[ENCODING])
-      chunk = this[DECODER].write(chunk)
-
-    // Note: flushing CAN potentially switch us into not-flowing mode
-    if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
-
-    if (this.flowing) this.emit('data', chunk)
-    else this[BUFFERPUSH](chunk)
-
-    if (this[BUFFERLENGTH] !== 0) this.emit('readable')
-
-    if (cb) fn(cb)
-
-    return this.flowing
-  }
-
-  read(n) {
-    if (this[DESTROYED]) return null
-
-    if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
-      this[MAYBE_EMIT_END]()
-      return null
-    }
-
-    if (this[OBJECTMODE]) n = null
-
-    if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
-      if (this.encoding) this[BUFFER] = [this[BUFFER].join('')]
-      else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])]
-    }
-
-    const ret = this[READ](n || null, this[BUFFER][0])
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [READ](n, chunk) {
-    if (n === chunk.length || n === null) this[BUFFERSHIFT]()
-    else {
-      this[BUFFER][0] = chunk.slice(n)
-      chunk = chunk.slice(0, n)
-      this[BUFFERLENGTH] -= n
-    }
-
-    this.emit('data', chunk)
-
-    if (!this[BUFFER].length && !this[EOF]) this.emit('drain')
-
-    return chunk
-  }
-
-  end(chunk, encoding, cb) {
-    if (typeof chunk === 'function') (cb = chunk), (chunk = null)
-    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
-    if (chunk) this.write(chunk, encoding)
-    if (cb) this.once('end', cb)
-    this[EOF] = true
-    this.writable = false
-
-    // if we haven't written anything, then go ahead and emit,
-    // even if we're not reading.
-    // we'll re-emit if a new 'end' listener is added anyway.
-    // This makes MP more suitable to write-only use cases.
-    if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]()
-    return this
-  }
-
-  // don't let the internal resume be overwritten
-  [RESUME]() {
-    if (this[DESTROYED]) return
-
-    this[PAUSED] = false
-    this[FLOWING] = true
-    this.emit('resume')
-    if (this[BUFFER].length) this[FLUSH]()
-    else if (this[EOF]) this[MAYBE_EMIT_END]()
-    else this.emit('drain')
-  }
-
-  resume() {
-    return this[RESUME]()
-  }
-
-  pause() {
-    this[FLOWING] = false
-    this[PAUSED] = true
-  }
-
-  get destroyed() {
-    return this[DESTROYED]
-  }
-
-  get flowing() {
-    return this[FLOWING]
-  }
-
-  get paused() {
-    return this[PAUSED]
-  }
-
-  [BUFFERPUSH](chunk) {
-    if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1
-    else this[BUFFERLENGTH] += chunk.length
-    this[BUFFER].push(chunk)
-  }
-
-  [BUFFERSHIFT]() {
-    if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1
-    else this[BUFFERLENGTH] -= this[BUFFER][0].length
-    return this[BUFFER].shift()
-  }
-
-  [FLUSH](noDrain) {
-    do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length)
-
-    if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain')
-  }
-
-  [FLUSHCHUNK](chunk) {
-    this.emit('data', chunk)
-    return this.flowing
-  }
-
-  pipe(dest, opts) {
-    if (this[DESTROYED]) return
-
-    const ended = this[EMITTED_END]
-    opts = opts || {}
-    if (dest === proc.stdout || dest === proc.stderr) opts.end = false
-    else opts.end = opts.end !== false
-    opts.proxyErrors = !!opts.proxyErrors
-
-    // piping an ended stream ends immediately
-    if (ended) {
-      if (opts.end) dest.end()
-    } else {
-      this[PIPES].push(
-        !opts.proxyErrors
-          ? new Pipe(this, dest, opts)
-          : new PipeProxyErrors(this, dest, opts)
-      )
-      if (this[ASYNC]) defer(() => this[RESUME]())
-      else this[RESUME]()
-    }
-
-    return dest
-  }
-
-  unpipe(dest) {
-    const p = this[PIPES].find(p => p.dest === dest)
-    if (p) {
-      this[PIPES].splice(this[PIPES].indexOf(p), 1)
-      p.unpipe()
-    }
-  }
-
-  addListener(ev, fn) {
-    return this.on(ev, fn)
-  }
-
-  on(ev, fn) {
-    const ret = super.on(ev, fn)
-    if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]()
-    else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
-      super.emit('readable')
-    else if (isEndish(ev) && this[EMITTED_END]) {
-      super.emit(ev)
-      this.removeAllListeners(ev)
-    } else if (ev === 'error' && this[EMITTED_ERROR]) {
-      if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR]))
-      else fn.call(this, this[EMITTED_ERROR])
-    }
-    return ret
-  }
-
-  get emittedEnd() {
-    return this[EMITTED_END]
-  }
-
-  [MAYBE_EMIT_END]() {
-    if (
-      !this[EMITTING_END] &&
-      !this[EMITTED_END] &&
-      !this[DESTROYED] &&
-      this[BUFFER].length === 0 &&
-      this[EOF]
-    ) {
-      this[EMITTING_END] = true
-      this.emit('end')
-      this.emit('prefinish')
-      this.emit('finish')
-      if (this[CLOSED]) this.emit('close')
-      this[EMITTING_END] = false
-    }
-  }
-
-  emit(ev, data, ...extra) {
-    // error and close are only events allowed after calling destroy()
-    if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
-      return
-    else if (ev === 'data') {
-      return !this[OBJECTMODE] && !data
-        ? false
-        : this[ASYNC]
-        ? defer(() => this[EMITDATA](data))
-        : this[EMITDATA](data)
-    } else if (ev === 'end') {
-      return this[EMITEND]()
-    } else if (ev === 'close') {
-      this[CLOSED] = true
-      // don't emit close before 'end' and 'finish'
-      if (!this[EMITTED_END] && !this[DESTROYED]) return
-      const ret = super.emit('close')
-      this.removeAllListeners('close')
-      return ret
-    } else if (ev === 'error') {
-      this[EMITTED_ERROR] = data
-      super.emit(ERROR, data)
-      const ret =
-        !this[SIGNAL] || this.listeners('error').length
-          ? super.emit('error', data)
-          : false
-      this[MAYBE_EMIT_END]()
-      return ret
-    } else if (ev === 'resume') {
-      const ret = super.emit('resume')
-      this[MAYBE_EMIT_END]()
-      return ret
-    } else if (ev === 'finish' || ev === 'prefinish') {
-      const ret = super.emit(ev)
-      this.removeAllListeners(ev)
-      return ret
-    }
-
-    // Some other unknown event
-    const ret = super.emit(ev, data, ...extra)
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [EMITDATA](data) {
-    for (const p of this[PIPES]) {
-      if (p.dest.write(data) === false) this.pause()
-    }
-    const ret = super.emit('data', data)
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [EMITEND]() {
-    if (this[EMITTED_END]) return
-
-    this[EMITTED_END] = true
-    this.readable = false
-    if (this[ASYNC]) defer(() => this[EMITEND2]())
-    else this[EMITEND2]()
-  }
-
-  [EMITEND2]() {
-    if (this[DECODER]) {
-      const data = this[DECODER].end()
-      if (data) {
-        for (const p of this[PIPES]) {
-          p.dest.write(data)
-        }
-        super.emit('data', data)
-      }
-    }
-
-    for (const p of this[PIPES]) {
-      p.end()
-    }
-    const ret = super.emit('end')
-    this.removeAllListeners('end')
-    return ret
-  }
-
-  // const all = await stream.collect()
-  collect() {
-    const buf = []
-    if (!this[OBJECTMODE]) buf.dataLength = 0
-    // set the promise first, in case an error is raised
-    // by triggering the flow here.
-    const p = this.promise()
-    this.on('data', c => {
-      buf.push(c)
-      if (!this[OBJECTMODE]) buf.dataLength += c.length
-    })
-    return p.then(() => buf)
-  }
-
-  // const data = await stream.concat()
-  concat() {
-    return this[OBJECTMODE]
-      ? Promise.reject(new Error('cannot concat in objectMode'))
-      : this.collect().then(buf =>
-          this[OBJECTMODE]
-            ? Promise.reject(new Error('cannot concat in objectMode'))
-            : this[ENCODING]
-            ? buf.join('')
-            : Buffer.concat(buf, buf.dataLength)
-        )
-  }
-
-  // stream.promise().then(() => done, er => emitted error)
-  promise() {
-    return new Promise((resolve, reject) => {
-      this.on(DESTROYED, () => reject(new Error('stream destroyed')))
-      this.on('error', er => reject(er))
-      this.on('end', () => resolve())
-    })
-  }
-
-  // for await (let chunk of stream)
-  [ASYNCITERATOR]() {
-    let stopped = false
-    const stop = () => {
-      this.pause()
-      stopped = true
-      return Promise.resolve({ done: true })
-    }
-    const next = () => {
-      if (stopped) return stop()
-      const res = this.read()
-      if (res !== null) return Promise.resolve({ done: false, value: res })
-
-      if (this[EOF]) return stop()
-
-      let resolve = null
-      let reject = null
-      const onerr = er => {
-        this.removeListener('data', ondata)
-        this.removeListener('end', onend)
-        this.removeListener(DESTROYED, ondestroy)
-        stop()
-        reject(er)
-      }
-      const ondata = value => {
-        this.removeListener('error', onerr)
-        this.removeListener('end', onend)
-        this.removeListener(DESTROYED, ondestroy)
-        this.pause()
-        resolve({ value: value, done: !!this[EOF] })
-      }
-      const onend = () => {
-        this.removeListener('error', onerr)
-        this.removeListener('data', ondata)
-        this.removeListener(DESTROYED, ondestroy)
-        stop()
-        resolve({ done: true })
-      }
-      const ondestroy = () => onerr(new Error('stream destroyed'))
-      return new Promise((res, rej) => {
-        reject = rej
-        resolve = res
-        this.once(DESTROYED, ondestroy)
-        this.once('error', onerr)
-        this.once('end', onend)
-        this.once('data', ondata)
-      })
-    }
-
-    return {
-      next,
-      throw: stop,
-      return: stop,
-      [ASYNCITERATOR]() {
-        return this
-      },
-    }
-  }
-
-  // for (let chunk of stream)
-  [ITERATOR]() {
-    let stopped = false
-    const stop = () => {
-      this.pause()
-      this.removeListener(ERROR, stop)
-      this.removeListener(DESTROYED, stop)
-      this.removeListener('end', stop)
-      stopped = true
-      return { done: true }
-    }
-
-    const next = () => {
-      if (stopped) return stop()
-      const value = this.read()
-      return value === null ? stop() : { value }
-    }
-    this.once('end', stop)
-    this.once(ERROR, stop)
-    this.once(DESTROYED, stop)
-
-    return {
-      next,
-      throw: stop,
-      return: stop,
-      [ITERATOR]() {
-        return this
-      },
-    }
-  }
-
-  destroy(er) {
-    if (this[DESTROYED]) {
-      if (er) this.emit('error', er)
-      else this.emit(DESTROYED)
-      return this
-    }
-
-    this[DESTROYED] = true
-
-    // throw away all buffered data, it's never coming out
-    this[BUFFER].length = 0
-    this[BUFFERLENGTH] = 0
-
-    if (typeof this.close === 'function' && !this[CLOSED]) this.close()
-
-    if (er) this.emit('error', er)
-    // if no error to emit, still reject pending promises
-    else this.emit(DESTROYED)
-
-    return this
-  }
-
-  static isStream(s) {
-    return (
-      !!s &&
-      (s instanceof Minipass ||
-        s instanceof Stream ||
-        (s instanceof EE &&
-          // readable
-          (typeof s.pipe === 'function' ||
-            // writable
-            (typeof s.write === 'function' && typeof s.end === 'function'))))
-    )
-  }
-}
-
-exports.Minipass = Minipass
diff --git a/node_modules/tuf-js/node_modules/minipass/index.mjs b/node_modules/tuf-js/node_modules/minipass/index.mjs
deleted file mode 100644
index 6ef6cd8cf0703..0000000000000
--- a/node_modules/tuf-js/node_modules/minipass/index.mjs
+++ /dev/null
@@ -1,702 +0,0 @@
-'use strict'
-const proc =
-  typeof process === 'object' && process
-    ? process
-    : {
-        stdout: null,
-        stderr: null,
-      }
-import EE from 'events'
-import Stream from 'stream'
-import stringdecoder from 'string_decoder'
-const SD = stringdecoder.StringDecoder
-
-const EOF = Symbol('EOF')
-const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
-const EMITTED_END = Symbol('emittedEnd')
-const EMITTING_END = Symbol('emittingEnd')
-const EMITTED_ERROR = Symbol('emittedError')
-const CLOSED = Symbol('closed')
-const READ = Symbol('read')
-const FLUSH = Symbol('flush')
-const FLUSHCHUNK = Symbol('flushChunk')
-const ENCODING = Symbol('encoding')
-const DECODER = Symbol('decoder')
-const FLOWING = Symbol('flowing')
-const PAUSED = Symbol('paused')
-const RESUME = Symbol('resume')
-const BUFFER = Symbol('buffer')
-const PIPES = Symbol('pipes')
-const BUFFERLENGTH = Symbol('bufferLength')
-const BUFFERPUSH = Symbol('bufferPush')
-const BUFFERSHIFT = Symbol('bufferShift')
-const OBJECTMODE = Symbol('objectMode')
-// internal event when stream is destroyed
-const DESTROYED = Symbol('destroyed')
-// internal event when stream has an error
-const ERROR = Symbol('error')
-const EMITDATA = Symbol('emitData')
-const EMITEND = Symbol('emitEnd')
-const EMITEND2 = Symbol('emitEnd2')
-const ASYNC = Symbol('async')
-const ABORT = Symbol('abort')
-const ABORTED = Symbol('aborted')
-const SIGNAL = Symbol('signal')
-
-const defer = fn => Promise.resolve().then(fn)
-
-// TODO remove when Node v8 support drops
-const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
-const ASYNCITERATOR =
-  (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented')
-const ITERATOR =
-  (doIter && Symbol.iterator) || Symbol('iterator not implemented')
-
-// events that mean 'the stream is over'
-// these are treated specially, and re-emitted
-// if they are listened for after emitting.
-const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish'
-
-const isArrayBuffer = b =>
-  b instanceof ArrayBuffer ||
-  (typeof b === 'object' &&
-    b.constructor &&
-    b.constructor.name === 'ArrayBuffer' &&
-    b.byteLength >= 0)
-
-const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
-
-class Pipe {
-  constructor(src, dest, opts) {
-    this.src = src
-    this.dest = dest
-    this.opts = opts
-    this.ondrain = () => src[RESUME]()
-    dest.on('drain', this.ondrain)
-  }
-  unpipe() {
-    this.dest.removeListener('drain', this.ondrain)
-  }
-  // istanbul ignore next - only here for the prototype
-  proxyErrors() {}
-  end() {
-    this.unpipe()
-    if (this.opts.end) this.dest.end()
-  }
-}
-
-class PipeProxyErrors extends Pipe {
-  unpipe() {
-    this.src.removeListener('error', this.proxyErrors)
-    super.unpipe()
-  }
-  constructor(src, dest, opts) {
-    super(src, dest, opts)
-    this.proxyErrors = er => dest.emit('error', er)
-    src.on('error', this.proxyErrors)
-  }
-}
-
-export class Minipass extends Stream {
-  constructor(options) {
-    super()
-    this[FLOWING] = false
-    // whether we're explicitly paused
-    this[PAUSED] = false
-    this[PIPES] = []
-    this[BUFFER] = []
-    this[OBJECTMODE] = (options && options.objectMode) || false
-    if (this[OBJECTMODE]) this[ENCODING] = null
-    else this[ENCODING] = (options && options.encoding) || null
-    if (this[ENCODING] === 'buffer') this[ENCODING] = null
-    this[ASYNC] = (options && !!options.async) || false
-    this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
-    this[EOF] = false
-    this[EMITTED_END] = false
-    this[EMITTING_END] = false
-    this[CLOSED] = false
-    this[EMITTED_ERROR] = null
-    this.writable = true
-    this.readable = true
-    this[BUFFERLENGTH] = 0
-    this[DESTROYED] = false
-    if (options && options.debugExposeBuffer === true) {
-      Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] })
-    }
-    if (options && options.debugExposePipes === true) {
-      Object.defineProperty(this, 'pipes', { get: () => this[PIPES] })
-    }
-    this[SIGNAL] = options && options.signal
-    this[ABORTED] = false
-    if (this[SIGNAL]) {
-      this[SIGNAL].addEventListener('abort', () => this[ABORT]())
-      if (this[SIGNAL].aborted) {
-        this[ABORT]()
-      }
-    }
-  }
-
-  get bufferLength() {
-    return this[BUFFERLENGTH]
-  }
-
-  get encoding() {
-    return this[ENCODING]
-  }
-  set encoding(enc) {
-    if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode')
-
-    if (
-      this[ENCODING] &&
-      enc !== this[ENCODING] &&
-      ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH])
-    )
-      throw new Error('cannot change encoding')
-
-    if (this[ENCODING] !== enc) {
-      this[DECODER] = enc ? new SD(enc) : null
-      if (this[BUFFER].length)
-        this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk))
-    }
-
-    this[ENCODING] = enc
-  }
-
-  setEncoding(enc) {
-    this.encoding = enc
-  }
-
-  get objectMode() {
-    return this[OBJECTMODE]
-  }
-  set objectMode(om) {
-    this[OBJECTMODE] = this[OBJECTMODE] || !!om
-  }
-
-  get ['async']() {
-    return this[ASYNC]
-  }
-  set ['async'](a) {
-    this[ASYNC] = this[ASYNC] || !!a
-  }
-
-  // drop everything and get out of the flow completely
-  [ABORT]() {
-    this[ABORTED] = true
-    this.emit('abort', this[SIGNAL].reason)
-    this.destroy(this[SIGNAL].reason)
-  }
-
-  get aborted() {
-    return this[ABORTED]
-  }
-  set aborted(_) {}
-
-  write(chunk, encoding, cb) {
-    if (this[ABORTED]) return false
-    if (this[EOF]) throw new Error('write after end')
-
-    if (this[DESTROYED]) {
-      this.emit(
-        'error',
-        Object.assign(
-          new Error('Cannot call write after a stream was destroyed'),
-          { code: 'ERR_STREAM_DESTROYED' }
-        )
-      )
-      return true
-    }
-
-    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
-
-    if (!encoding) encoding = 'utf8'
-
-    const fn = this[ASYNC] ? defer : f => f()
-
-    // convert array buffers and typed array views into buffers
-    // at some point in the future, we may want to do the opposite!
-    // leave strings and buffers as-is
-    // anything else switches us into object mode
-    if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
-      if (isArrayBufferView(chunk))
-        chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
-      else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk)
-      else if (typeof chunk !== 'string')
-        // use the setter so we throw if we have encoding set
-        this.objectMode = true
-    }
-
-    // handle object mode up front, since it's simpler
-    // this yields better performance, fewer checks later.
-    if (this[OBJECTMODE]) {
-      /* istanbul ignore if - maybe impossible? */
-      if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
-
-      if (this.flowing) this.emit('data', chunk)
-      else this[BUFFERPUSH](chunk)
-
-      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
-
-      if (cb) fn(cb)
-
-      return this.flowing
-    }
-
-    // at this point the chunk is a buffer or string
-    // don't buffer it up or send it to the decoder
-    if (!chunk.length) {
-      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
-      if (cb) fn(cb)
-      return this.flowing
-    }
-
-    // fast-path writing strings of same encoding to a stream with
-    // an empty buffer, skipping the buffer/decoder dance
-    if (
-      typeof chunk === 'string' &&
-      // unless it is a string already ready for us to use
-      !(encoding === this[ENCODING] && !this[DECODER].lastNeed)
-    ) {
-      chunk = Buffer.from(chunk, encoding)
-    }
-
-    if (Buffer.isBuffer(chunk) && this[ENCODING])
-      chunk = this[DECODER].write(chunk)
-
-    // Note: flushing CAN potentially switch us into not-flowing mode
-    if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
-
-    if (this.flowing) this.emit('data', chunk)
-    else this[BUFFERPUSH](chunk)
-
-    if (this[BUFFERLENGTH] !== 0) this.emit('readable')
-
-    if (cb) fn(cb)
-
-    return this.flowing
-  }
-
-  read(n) {
-    if (this[DESTROYED]) return null
-
-    if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
-      this[MAYBE_EMIT_END]()
-      return null
-    }
-
-    if (this[OBJECTMODE]) n = null
-
-    if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
-      if (this.encoding) this[BUFFER] = [this[BUFFER].join('')]
-      else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])]
-    }
-
-    const ret = this[READ](n || null, this[BUFFER][0])
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [READ](n, chunk) {
-    if (n === chunk.length || n === null) this[BUFFERSHIFT]()
-    else {
-      this[BUFFER][0] = chunk.slice(n)
-      chunk = chunk.slice(0, n)
-      this[BUFFERLENGTH] -= n
-    }
-
-    this.emit('data', chunk)
-
-    if (!this[BUFFER].length && !this[EOF]) this.emit('drain')
-
-    return chunk
-  }
-
-  end(chunk, encoding, cb) {
-    if (typeof chunk === 'function') (cb = chunk), (chunk = null)
-    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
-    if (chunk) this.write(chunk, encoding)
-    if (cb) this.once('end', cb)
-    this[EOF] = true
-    this.writable = false
-
-    // if we haven't written anything, then go ahead and emit,
-    // even if we're not reading.
-    // we'll re-emit if a new 'end' listener is added anyway.
-    // This makes MP more suitable to write-only use cases.
-    if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]()
-    return this
-  }
-
-  // don't let the internal resume be overwritten
-  [RESUME]() {
-    if (this[DESTROYED]) return
-
-    this[PAUSED] = false
-    this[FLOWING] = true
-    this.emit('resume')
-    if (this[BUFFER].length) this[FLUSH]()
-    else if (this[EOF]) this[MAYBE_EMIT_END]()
-    else this.emit('drain')
-  }
-
-  resume() {
-    return this[RESUME]()
-  }
-
-  pause() {
-    this[FLOWING] = false
-    this[PAUSED] = true
-  }
-
-  get destroyed() {
-    return this[DESTROYED]
-  }
-
-  get flowing() {
-    return this[FLOWING]
-  }
-
-  get paused() {
-    return this[PAUSED]
-  }
-
-  [BUFFERPUSH](chunk) {
-    if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1
-    else this[BUFFERLENGTH] += chunk.length
-    this[BUFFER].push(chunk)
-  }
-
-  [BUFFERSHIFT]() {
-    if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1
-    else this[BUFFERLENGTH] -= this[BUFFER][0].length
-    return this[BUFFER].shift()
-  }
-
-  [FLUSH](noDrain) {
-    do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length)
-
-    if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain')
-  }
-
-  [FLUSHCHUNK](chunk) {
-    this.emit('data', chunk)
-    return this.flowing
-  }
-
-  pipe(dest, opts) {
-    if (this[DESTROYED]) return
-
-    const ended = this[EMITTED_END]
-    opts = opts || {}
-    if (dest === proc.stdout || dest === proc.stderr) opts.end = false
-    else opts.end = opts.end !== false
-    opts.proxyErrors = !!opts.proxyErrors
-
-    // piping an ended stream ends immediately
-    if (ended) {
-      if (opts.end) dest.end()
-    } else {
-      this[PIPES].push(
-        !opts.proxyErrors
-          ? new Pipe(this, dest, opts)
-          : new PipeProxyErrors(this, dest, opts)
-      )
-      if (this[ASYNC]) defer(() => this[RESUME]())
-      else this[RESUME]()
-    }
-
-    return dest
-  }
-
-  unpipe(dest) {
-    const p = this[PIPES].find(p => p.dest === dest)
-    if (p) {
-      this[PIPES].splice(this[PIPES].indexOf(p), 1)
-      p.unpipe()
-    }
-  }
-
-  addListener(ev, fn) {
-    return this.on(ev, fn)
-  }
-
-  on(ev, fn) {
-    const ret = super.on(ev, fn)
-    if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]()
-    else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
-      super.emit('readable')
-    else if (isEndish(ev) && this[EMITTED_END]) {
-      super.emit(ev)
-      this.removeAllListeners(ev)
-    } else if (ev === 'error' && this[EMITTED_ERROR]) {
-      if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR]))
-      else fn.call(this, this[EMITTED_ERROR])
-    }
-    return ret
-  }
-
-  get emittedEnd() {
-    return this[EMITTED_END]
-  }
-
-  [MAYBE_EMIT_END]() {
-    if (
-      !this[EMITTING_END] &&
-      !this[EMITTED_END] &&
-      !this[DESTROYED] &&
-      this[BUFFER].length === 0 &&
-      this[EOF]
-    ) {
-      this[EMITTING_END] = true
-      this.emit('end')
-      this.emit('prefinish')
-      this.emit('finish')
-      if (this[CLOSED]) this.emit('close')
-      this[EMITTING_END] = false
-    }
-  }
-
-  emit(ev, data, ...extra) {
-    // error and close are only events allowed after calling destroy()
-    if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
-      return
-    else if (ev === 'data') {
-      return !this[OBJECTMODE] && !data
-        ? false
-        : this[ASYNC]
-        ? defer(() => this[EMITDATA](data))
-        : this[EMITDATA](data)
-    } else if (ev === 'end') {
-      return this[EMITEND]()
-    } else if (ev === 'close') {
-      this[CLOSED] = true
-      // don't emit close before 'end' and 'finish'
-      if (!this[EMITTED_END] && !this[DESTROYED]) return
-      const ret = super.emit('close')
-      this.removeAllListeners('close')
-      return ret
-    } else if (ev === 'error') {
-      this[EMITTED_ERROR] = data
-      super.emit(ERROR, data)
-      const ret =
-        !this[SIGNAL] || this.listeners('error').length
-          ? super.emit('error', data)
-          : false
-      this[MAYBE_EMIT_END]()
-      return ret
-    } else if (ev === 'resume') {
-      const ret = super.emit('resume')
-      this[MAYBE_EMIT_END]()
-      return ret
-    } else if (ev === 'finish' || ev === 'prefinish') {
-      const ret = super.emit(ev)
-      this.removeAllListeners(ev)
-      return ret
-    }
-
-    // Some other unknown event
-    const ret = super.emit(ev, data, ...extra)
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [EMITDATA](data) {
-    for (const p of this[PIPES]) {
-      if (p.dest.write(data) === false) this.pause()
-    }
-    const ret = super.emit('data', data)
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [EMITEND]() {
-    if (this[EMITTED_END]) return
-
-    this[EMITTED_END] = true
-    this.readable = false
-    if (this[ASYNC]) defer(() => this[EMITEND2]())
-    else this[EMITEND2]()
-  }
-
-  [EMITEND2]() {
-    if (this[DECODER]) {
-      const data = this[DECODER].end()
-      if (data) {
-        for (const p of this[PIPES]) {
-          p.dest.write(data)
-        }
-        super.emit('data', data)
-      }
-    }
-
-    for (const p of this[PIPES]) {
-      p.end()
-    }
-    const ret = super.emit('end')
-    this.removeAllListeners('end')
-    return ret
-  }
-
-  // const all = await stream.collect()
-  collect() {
-    const buf = []
-    if (!this[OBJECTMODE]) buf.dataLength = 0
-    // set the promise first, in case an error is raised
-    // by triggering the flow here.
-    const p = this.promise()
-    this.on('data', c => {
-      buf.push(c)
-      if (!this[OBJECTMODE]) buf.dataLength += c.length
-    })
-    return p.then(() => buf)
-  }
-
-  // const data = await stream.concat()
-  concat() {
-    return this[OBJECTMODE]
-      ? Promise.reject(new Error('cannot concat in objectMode'))
-      : this.collect().then(buf =>
-          this[OBJECTMODE]
-            ? Promise.reject(new Error('cannot concat in objectMode'))
-            : this[ENCODING]
-            ? buf.join('')
-            : Buffer.concat(buf, buf.dataLength)
-        )
-  }
-
-  // stream.promise().then(() => done, er => emitted error)
-  promise() {
-    return new Promise((resolve, reject) => {
-      this.on(DESTROYED, () => reject(new Error('stream destroyed')))
-      this.on('error', er => reject(er))
-      this.on('end', () => resolve())
-    })
-  }
-
-  // for await (let chunk of stream)
-  [ASYNCITERATOR]() {
-    let stopped = false
-    const stop = () => {
-      this.pause()
-      stopped = true
-      return Promise.resolve({ done: true })
-    }
-    const next = () => {
-      if (stopped) return stop()
-      const res = this.read()
-      if (res !== null) return Promise.resolve({ done: false, value: res })
-
-      if (this[EOF]) return stop()
-
-      let resolve = null
-      let reject = null
-      const onerr = er => {
-        this.removeListener('data', ondata)
-        this.removeListener('end', onend)
-        this.removeListener(DESTROYED, ondestroy)
-        stop()
-        reject(er)
-      }
-      const ondata = value => {
-        this.removeListener('error', onerr)
-        this.removeListener('end', onend)
-        this.removeListener(DESTROYED, ondestroy)
-        this.pause()
-        resolve({ value: value, done: !!this[EOF] })
-      }
-      const onend = () => {
-        this.removeListener('error', onerr)
-        this.removeListener('data', ondata)
-        this.removeListener(DESTROYED, ondestroy)
-        stop()
-        resolve({ done: true })
-      }
-      const ondestroy = () => onerr(new Error('stream destroyed'))
-      return new Promise((res, rej) => {
-        reject = rej
-        resolve = res
-        this.once(DESTROYED, ondestroy)
-        this.once('error', onerr)
-        this.once('end', onend)
-        this.once('data', ondata)
-      })
-    }
-
-    return {
-      next,
-      throw: stop,
-      return: stop,
-      [ASYNCITERATOR]() {
-        return this
-      },
-    }
-  }
-
-  // for (let chunk of stream)
-  [ITERATOR]() {
-    let stopped = false
-    const stop = () => {
-      this.pause()
-      this.removeListener(ERROR, stop)
-      this.removeListener(DESTROYED, stop)
-      this.removeListener('end', stop)
-      stopped = true
-      return { done: true }
-    }
-
-    const next = () => {
-      if (stopped) return stop()
-      const value = this.read()
-      return value === null ? stop() : { value }
-    }
-    this.once('end', stop)
-    this.once(ERROR, stop)
-    this.once(DESTROYED, stop)
-
-    return {
-      next,
-      throw: stop,
-      return: stop,
-      [ITERATOR]() {
-        return this
-      },
-    }
-  }
-
-  destroy(er) {
-    if (this[DESTROYED]) {
-      if (er) this.emit('error', er)
-      else this.emit(DESTROYED)
-      return this
-    }
-
-    this[DESTROYED] = true
-
-    // throw away all buffered data, it's never coming out
-    this[BUFFER].length = 0
-    this[BUFFERLENGTH] = 0
-
-    if (typeof this.close === 'function' && !this[CLOSED]) this.close()
-
-    if (er) this.emit('error', er)
-    // if no error to emit, still reject pending promises
-    else this.emit(DESTROYED)
-
-    return this
-  }
-
-  static isStream(s) {
-    return (
-      !!s &&
-      (s instanceof Minipass ||
-        s instanceof Stream ||
-        (s instanceof EE &&
-          // readable
-          (typeof s.pipe === 'function' ||
-            // writable
-            (typeof s.write === 'function' && typeof s.end === 'function'))))
-    )
-  }
-}
-
-
diff --git a/node_modules/tuf-js/node_modules/minipass/package.json b/node_modules/tuf-js/node_modules/minipass/package.json
deleted file mode 100644
index 0e20e988047f2..0000000000000
--- a/node_modules/tuf-js/node_modules/minipass/package.json
+++ /dev/null
@@ -1,76 +0,0 @@
-{
-  "name": "minipass",
-  "version": "5.0.0",
-  "description": "minimal implementation of a PassThrough stream",
-  "main": "./index.js",
-  "module": "./index.mjs",
-  "types": "./index.d.ts",
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./index.d.ts",
-        "default": "./index.mjs"
-      },
-      "require": {
-        "types": "./index.d.ts",
-        "default": "./index.js"
-      }
-    },
-    "./package.json": "./package.json"
-  },
-  "devDependencies": {
-    "@types/node": "^17.0.41",
-    "end-of-stream": "^1.4.0",
-    "node-abort-controller": "^3.1.1",
-    "prettier": "^2.6.2",
-    "tap": "^16.2.0",
-    "through2": "^2.0.3",
-    "ts-node": "^10.8.1",
-    "typedoc": "^0.23.24",
-    "typescript": "^4.7.3"
-  },
-  "scripts": {
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "prepare": "node ./scripts/transpile-to-esm.js",
-    "snap": "tap",
-    "test": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "postpublish": "git push origin --follow-tags",
-    "typedoc": "typedoc ./index.d.ts",
-    "format": "prettier --write . --loglevel warn"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/minipass.git"
-  },
-  "keywords": [
-    "passthrough",
-    "stream"
-  ],
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "ISC",
-  "files": [
-    "index.d.ts",
-    "index.js",
-    "index.mjs"
-  ],
-  "tap": {
-    "check-coverage": true
-  },
-  "engines": {
-    "node": ">=8"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 80,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  }
-}
diff --git a/node_modules/tuf-js/package.json b/node_modules/tuf-js/package.json
index 9187d88083272..6286e034453d6 100644
--- a/node_modules/tuf-js/package.json
+++ b/node_modules/tuf-js/package.json
@@ -1,6 +1,6 @@
 {
   "name": "tuf-js",
-  "version": "1.1.7",
+  "version": "2.0.0",
   "description": "JavaScript implementation of The Update Framework (TUF)",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
@@ -28,19 +28,16 @@
   },
   "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/client#readme",
   "devDependencies": {
-    "@tufjs/repo-mock": "1.3.1",
+    "@tufjs/repo-mock": "2.0.0",
     "@types/debug": "^4.1.8",
-    "@types/make-fetch-happen": "^10.0.1",
-    "@types/node": "^20.2.5",
-    "nock": "^13.3.1",
-    "typescript": "^5.1.3"
+    "@types/make-fetch-happen": "^10.0.1"
   },
   "dependencies": {
-    "@tufjs/models": "1.0.4",
+    "@tufjs/models": "2.0.0",
     "debug": "^4.3.4",
-    "make-fetch-happen": "^11.1.1"
+    "make-fetch-happen": "^13.0.0"
   },
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   }
 }
diff --git a/package-lock.json b/package-lock.json
index 2144542b2f5f8..43b0ba6054880 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -16,6 +16,7 @@
         "@npmcli/package-json",
         "@npmcli/promise-spawn",
         "@npmcli/run-script",
+        "@sigstore/tuf",
         "abbrev",
         "archy",
         "cacache",
@@ -66,7 +67,6 @@
         "qrcode-terminal",
         "read",
         "semver",
-        "sigstore",
         "ssri",
         "supports-color",
         "tar",
@@ -94,6 +94,7 @@
         "@npmcli/package-json": "^5.0.0",
         "@npmcli/promise-spawn": "^6.0.2",
         "@npmcli/run-script": "^6.0.2",
+        "@sigstore/tuf": "^2.0.0",
         "abbrev": "^2.0.0",
         "archy": "~1.0.0",
         "cacache": "^18.0.0",
@@ -144,7 +145,6 @@
         "qrcode-terminal": "^0.12.0",
         "read": "^2.1.0",
         "semver": "^7.5.4",
-        "sigstore": "^1.7.0",
         "ssri": "^10.0.5",
         "supports-color": "^9.4.0",
         "tar": "^6.1.15",
@@ -166,7 +166,7 @@
         "@npmcli/mock-globals": "^1.0.0",
         "@npmcli/mock-registry": "^1.0.0",
         "@npmcli/template-oss": "4.18.0",
-        "@tufjs/repo-mock": "^1.3.1",
+        "@tufjs/repo-mock": "^2.0.0",
         "diff": "^5.1.0",
         "licensee": "^10.0.0",
         "nock": "^13.3.3",
@@ -2847,6 +2847,25 @@
         "node": ">=14"
       }
     },
+    "node_modules/@sigstore/bundle": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-2.0.0.tgz",
+      "integrity": "sha512-EO7D7/kMtUsYn596WP+b5N/txWTgOt7N8vsZ2gyneMsxfrPW4FJHRZtMlZeGKCgBNCcjZhZ8ItyawkZqJC8XiA==",
+      "dependencies": {
+        "@sigstore/protobuf-specs": "^0.2.1"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
+    "node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs": {
+      "version": "0.2.1",
+      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.2.1.tgz",
+      "integrity": "sha512-XTWVxnWJu+c1oCshMLwnKvz8ZQJJDVOlciMfgpJBQbThVjKTCG8dwyhgLngBD2KN0ap9F/gOV8rFDEx8uh7R2A==",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@sigstore/protobuf-specs": {
       "version": "0.1.0",
       "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.1.0.tgz",
@@ -2856,15 +2875,45 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@sigstore/sign": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-2.0.0.tgz",
+      "integrity": "sha512-f+r1jEDwM5969DTORRln9sDmWjTy1cOQzhU/iisGNzFdbF2TglmwNScbH6aiQ6QH4lc3jOXNMgKP6sec1kSVKA==",
+      "dependencies": {
+        "@sigstore/bundle": "^2.0.0",
+        "@sigstore/protobuf-specs": "^0.2.1",
+        "make-fetch-happen": "^13.0.0"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
+    "node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs": {
+      "version": "0.2.1",
+      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.2.1.tgz",
+      "integrity": "sha512-XTWVxnWJu+c1oCshMLwnKvz8ZQJJDVOlciMfgpJBQbThVjKTCG8dwyhgLngBD2KN0ap9F/gOV8rFDEx8uh7R2A==",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@sigstore/tuf": {
-      "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-1.0.2.tgz",
-      "integrity": "sha512-vjwcYePJzM01Ha6oWWZ9gNcdIgnzyFxfqfWzph483DPJTH8Tb7f7bQRRll3CYVkyH56j0AgcPAcl6Vg95DPF+Q==",
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-2.0.0.tgz",
+      "integrity": "sha512-Ow/ZMFH9kdHbMNOH//rDuINblqufpqD+e3xS9JY5RRce+euh9eUsjSc6jodioMMi2roN9rSAk8LCuyW2hngAKw==",
       "inBundle": true,
       "dependencies": {
-        "@sigstore/protobuf-specs": "^0.1.0",
-        "tuf-js": "^1.1.7"
+        "@sigstore/protobuf-specs": "^0.2.1",
+        "tuf-js": "^2.0.0"
       },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
+    "node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs": {
+      "version": "0.2.1",
+      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.2.1.tgz",
+      "integrity": "sha512-XTWVxnWJu+c1oCshMLwnKvz8ZQJJDVOlciMfgpJBQbThVjKTCG8dwyhgLngBD2KN0ap9F/gOV8rFDEx8uh7R2A==",
+      "inBundle": true,
       "engines": {
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
@@ -2903,38 +2952,38 @@
       "dev": true
     },
     "node_modules/@tufjs/canonical-json": {
-      "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/@tufjs/canonical-json/-/canonical-json-1.0.0.tgz",
-      "integrity": "sha512-QTnf++uxunWvG2z3UFNzAoQPHxnSXOwtaI3iJ+AohhV+5vONuArPjJE7aPXPVXfXJsqrVbZBu9b81AJoSd09IQ==",
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/@tufjs/canonical-json/-/canonical-json-2.0.0.tgz",
+      "integrity": "sha512-yVtV8zsdo8qFHe+/3kw81dSLyF7D576A5cCFCi4X7B39tWT7SekaEFUnvnWJHz+9qO7qJTah1JbrDjWKqFtdWA==",
       "inBundle": true,
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "node_modules/@tufjs/models": {
-      "version": "1.0.4",
-      "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-1.0.4.tgz",
-      "integrity": "sha512-qaGV9ltJP0EO25YfFUPhxRVK0evXFIAGicsVXuRim4Ed9cjPxYhNnNJ49SFmbeLgtxpslIkX317IgpfcHPVj/A==",
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-2.0.0.tgz",
+      "integrity": "sha512-c8nj8BaOExmZKO2DXhDfegyhSGcG9E/mPN3U13L+/PsoWm1uaGiHHjxqSHQiasDBQwDA3aHuw9+9spYAP1qvvg==",
       "inBundle": true,
       "dependencies": {
-        "@tufjs/canonical-json": "1.0.0",
-        "minimatch": "^9.0.0"
+        "@tufjs/canonical-json": "2.0.0",
+        "minimatch": "^9.0.3"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "node_modules/@tufjs/repo-mock": {
-      "version": "1.3.1",
-      "resolved": "https://registry.npmjs.org/@tufjs/repo-mock/-/repo-mock-1.3.1.tgz",
-      "integrity": "sha512-7IDezQbPGReWD3xmgR2pAfG61BZpvW51XnB87OfuiJOe5mkGnziCTTGITtUC3A6htQr9shkk5qIKrhpoMXBwpQ==",
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/@tufjs/repo-mock/-/repo-mock-2.0.0.tgz",
+      "integrity": "sha512-z0OtgGBn05dhpcvLaOMQPG1V16zOAL8+2A/L6EGIUE8pOP6PMX6vlJGSsN9JQ9glakXupCN4fdX8JF9XrNYUrw==",
       "dev": true,
       "dependencies": {
-        "@tufjs/models": "1.0.4",
-        "nock": "^13.3.1"
+        "@tufjs/models": "2.0.0",
+        "nock": "^13.3.3"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "node_modules/@types/debug": {
@@ -11723,6 +11772,50 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/sigstore/node_modules/@sigstore/tuf": {
+      "version": "1.0.3",
+      "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-1.0.3.tgz",
+      "integrity": "sha512-2bRovzs0nJZFlCN3rXirE4gwxCn97JNjMmwpecqlbgV9WcxX7WRuIrgzx/X7Ib7MYRbyUTpBYE0s2x6AmZXnlg==",
+      "inBundle": true,
+      "dependencies": {
+        "@sigstore/protobuf-specs": "^0.2.0",
+        "tuf-js": "^1.1.7"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs": {
+      "version": "0.2.1",
+      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.2.1.tgz",
+      "integrity": "sha512-XTWVxnWJu+c1oCshMLwnKvz8ZQJJDVOlciMfgpJBQbThVjKTCG8dwyhgLngBD2KN0ap9F/gOV8rFDEx8uh7R2A==",
+      "inBundle": true,
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/sigstore/node_modules/@tufjs/canonical-json": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/@tufjs/canonical-json/-/canonical-json-1.0.0.tgz",
+      "integrity": "sha512-QTnf++uxunWvG2z3UFNzAoQPHxnSXOwtaI3iJ+AohhV+5vONuArPjJE7aPXPVXfXJsqrVbZBu9b81AJoSd09IQ==",
+      "inBundle": true,
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/sigstore/node_modules/@tufjs/models": {
+      "version": "1.0.4",
+      "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-1.0.4.tgz",
+      "integrity": "sha512-qaGV9ltJP0EO25YfFUPhxRVK0evXFIAGicsVXuRim4Ed9cjPxYhNnNJ49SFmbeLgtxpslIkX317IgpfcHPVj/A==",
+      "inBundle": true,
+      "dependencies": {
+        "@tufjs/canonical-json": "1.0.0",
+        "minimatch": "^9.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/sigstore/node_modules/cacache": {
       "version": "17.1.4",
       "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
@@ -11799,6 +11892,20 @@
         "node": ">=8"
       }
     },
+    "node_modules/sigstore/node_modules/tuf-js": {
+      "version": "1.1.7",
+      "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-1.1.7.tgz",
+      "integrity": "sha512-i3P9Kgw3ytjELUfpuKVDNBJvk4u5bXL6gskv572mcevPbSKCV3zt3djhmlEQ65yERjIbOSncy7U4cQJaB1CBCg==",
+      "inBundle": true,
+      "dependencies": {
+        "@tufjs/models": "1.0.4",
+        "debug": "^4.3.4",
+        "make-fetch-happen": "^11.1.1"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/slash": {
       "version": "3.0.0",
       "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
@@ -14912,93 +15019,17 @@
       }
     },
     "node_modules/tuf-js": {
-      "version": "1.1.7",
-      "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-1.1.7.tgz",
-      "integrity": "sha512-i3P9Kgw3ytjELUfpuKVDNBJvk4u5bXL6gskv572mcevPbSKCV3zt3djhmlEQ65yERjIbOSncy7U4cQJaB1CBCg==",
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-2.0.0.tgz",
+      "integrity": "sha512-Oq6w0MMFihvxCM0o733TIeLeuUrDuaVaOEUVXrQtq/J6YXoUmQU84JcAftJcDkxDkuTZ9jumZN7Dh7VlyNaeWA==",
       "inBundle": true,
       "dependencies": {
-        "@tufjs/models": "1.0.4",
+        "@tufjs/models": "2.0.0",
         "debug": "^4.3.4",
-        "make-fetch-happen": "^11.1.1"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/tuf-js/node_modules/cacache": {
-      "version": "17.1.4",
-      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
-      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
-      "inBundle": true,
-      "dependencies": {
-        "@npmcli/fs": "^3.1.0",
-        "fs-minipass": "^3.0.0",
-        "glob": "^10.2.2",
-        "lru-cache": "^7.7.1",
-        "minipass": "^7.0.3",
-        "minipass-collect": "^1.0.2",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "p-map": "^4.0.0",
-        "ssri": "^10.0.0",
-        "tar": "^6.1.11",
-        "unique-filename": "^3.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/tuf-js/node_modules/cacache/node_modules/minipass": {
-      "version": "7.0.3",
-      "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz",
-      "integrity": "sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg==",
-      "inBundle": true,
-      "engines": {
-        "node": ">=16 || 14 >=14.17"
-      }
-    },
-    "node_modules/tuf-js/node_modules/lru-cache": {
-      "version": "7.18.3",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
-      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
-      "inBundle": true,
-      "engines": {
-        "node": ">=12"
-      }
-    },
-    "node_modules/tuf-js/node_modules/make-fetch-happen": {
-      "version": "11.1.1",
-      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz",
-      "integrity": "sha512-rLWS7GCSTcEujjVBs2YqG7Y4643u8ucvCJeSRqiLYhesrDuzeuFIk37xREzAsfQaqzl8b9rNCE4m6J8tvX4Q8w==",
-      "inBundle": true,
-      "dependencies": {
-        "agentkeepalive": "^4.2.1",
-        "cacache": "^17.0.0",
-        "http-cache-semantics": "^4.1.1",
-        "http-proxy-agent": "^5.0.0",
-        "https-proxy-agent": "^5.0.0",
-        "is-lambda": "^1.0.1",
-        "lru-cache": "^7.7.1",
-        "minipass": "^5.0.0",
-        "minipass-fetch": "^3.0.0",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "negotiator": "^0.6.3",
-        "promise-retry": "^2.0.1",
-        "socks-proxy-agent": "^7.0.0",
-        "ssri": "^10.0.0"
+        "make-fetch-happen": "^13.0.0"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/tuf-js/node_modules/minipass": {
-      "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz",
-      "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==",
-      "inBundle": true,
-      "engines": {
-        "node": ">=8"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "node_modules/tunnel": {
@@ -16183,7 +16214,7 @@
         "npm-registry-fetch": "^16.0.0",
         "proc-log": "^3.0.0",
         "semver": "^7.3.7",
-        "sigstore": "^1.4.0",
+        "sigstore": "^2.0.0",
         "ssri": "^10.0.5"
       },
       "devDependencies": {
@@ -16198,6 +16229,28 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "workspaces/libnpmpublish/node_modules/@sigstore/protobuf-specs": {
+      "version": "0.2.1",
+      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.2.1.tgz",
+      "integrity": "sha512-XTWVxnWJu+c1oCshMLwnKvz8ZQJJDVOlciMfgpJBQbThVjKTCG8dwyhgLngBD2KN0ap9F/gOV8rFDEx8uh7R2A==",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "workspaces/libnpmpublish/node_modules/sigstore": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-2.0.0.tgz",
+      "integrity": "sha512-RtTi90xIdzFmQAAKb9+Ki1nx4IR2Z5c+mFn3dN0xuPHgk3gTt3f7ZqKsZ9UFQP40ZAlm7un8LMyjhwgrTIXNPA==",
+      "dependencies": {
+        "@sigstore/bundle": "^2.0.0",
+        "@sigstore/protobuf-specs": "^0.2.1",
+        "@sigstore/sign": "^2.0.0",
+        "@sigstore/tuf": "^2.0.0"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
     "workspaces/libnpmsearch": {
       "version": "6.0.2",
       "license": "ISC",
diff --git a/package.json b/package.json
index f03a8635c8c98..303fd1e42c853 100644
--- a/package.json
+++ b/package.json
@@ -59,6 +59,7 @@
     "@npmcli/package-json": "^5.0.0",
     "@npmcli/promise-spawn": "^6.0.2",
     "@npmcli/run-script": "^6.0.2",
+    "@sigstore/tuf": "^2.0.0",
     "abbrev": "^2.0.0",
     "archy": "~1.0.0",
     "cacache": "^18.0.0",
@@ -109,7 +110,6 @@
     "qrcode-terminal": "^0.12.0",
     "read": "^2.1.0",
     "semver": "^7.5.4",
-    "sigstore": "^1.7.0",
     "ssri": "^10.0.5",
     "supports-color": "^9.4.0",
     "tar": "^6.1.15",
@@ -129,6 +129,7 @@
     "@npmcli/package-json",
     "@npmcli/promise-spawn",
     "@npmcli/run-script",
+    "@sigstore/tuf",
     "abbrev",
     "archy",
     "cacache",
@@ -179,7 +180,6 @@
     "qrcode-terminal",
     "read",
     "semver",
-    "sigstore",
     "ssri",
     "supports-color",
     "tar",
@@ -197,7 +197,7 @@
     "@npmcli/mock-globals": "^1.0.0",
     "@npmcli/mock-registry": "^1.0.0",
     "@npmcli/template-oss": "4.18.0",
-    "@tufjs/repo-mock": "^1.3.1",
+    "@tufjs/repo-mock": "^2.0.0",
     "diff": "^5.1.0",
     "licensee": "^10.0.0",
     "nock": "^13.3.3",
diff --git a/test/lib/commands/audit.js b/test/lib/commands/audit.js
index 4014e73387351..ae6d6c170842f 100644
--- a/test/lib/commands/audit.js
+++ b/test/lib/commands/audit.js
@@ -1699,16 +1699,12 @@ t.test('audit signatures', async t => {
     const { npm } = await loadMockNpm(t, {
       prefixDir: installWithMultipleDeps,
       mocks: {
-        sigstore: {
-          sigstore: {
-            tuf: {
-              client: async () => ({
-                getTarget: async () => {
-                  throw new Error('error refreshing TUF metadata')
-                },
-              }),
+        '@sigstore/tuf': {
+          initTUF: async () => ({
+            getTarget: async () => {
+              throw new Error('error refreshing TUF metadata')
             },
-          },
+          }),
         },
       },
     })
diff --git a/workspaces/libnpmpublish/lib/provenance.js b/workspaces/libnpmpublish/lib/provenance.js
index 398db1b4cd467..45fe963d5f36f 100644
--- a/workspaces/libnpmpublish/lib/provenance.js
+++ b/workspaces/libnpmpublish/lib/provenance.js
@@ -1,4 +1,4 @@
-const { sigstore } = require('sigstore')
+const sigstore = require('sigstore')
 const { readFile } = require('fs/promises')
 const ci = require('ci-info')
 const { env } = process
diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json
index 7dd2809873e4f..094414e716102 100644
--- a/workspaces/libnpmpublish/package.json
+++ b/workspaces/libnpmpublish/package.json
@@ -44,7 +44,7 @@
     "npm-registry-fetch": "^16.0.0",
     "proc-log": "^3.0.0",
     "semver": "^7.3.7",
-    "sigstore": "^1.4.0",
+    "sigstore": "^2.0.0",
     "ssri": "^10.0.5"
   },
   "engines": {
diff --git a/workspaces/libnpmpublish/test/publish.js b/workspaces/libnpmpublish/test/publish.js
index f7e824a3805c8..4a745bab8a78b 100644
--- a/workspaces/libnpmpublish/test/publish.js
+++ b/workspaces/libnpmpublish/test/publish.js
@@ -725,7 +725,7 @@ t.test('automatic provenance with incorrect permissions', async t => {
 t.test('user-supplied provenance - success', async t => {
   const { publish } = t.mock('..', {
     '../lib/provenance': t.mock('../lib/provenance', {
-      sigstore: { sigstore: { verify: () => {} } },
+      sigstore: { verify: () => {} },
     }),
   })
 

From 16157b0317cd15f8e6880bbec0f6c5e451494cb8 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Fri, 18 Aug 2023 11:38:16 -0700
Subject: [PATCH 55/68] deps: pacote@17.0.2

---
 DEPENDENCIES.md                               |    1 -
 mock-registry/package.json                    |    2 +-
 node_modules/.gitignore                       |   37 -
 .../@sigstore/protobuf-specs/LICENSE          |  202 ---
 .../dist/__generated__/envelope.js            |   89 --
 .../google/api/field_behavior.js              |  119 --
 .../google/protobuf/descriptor.js             | 1308 -----------------
 .../google/protobuf/timestamp.js              |   24 -
 .../dist/__generated__/sigstore_bundle.js     |  106 --
 .../dist/__generated__/sigstore_common.js     |  457 ------
 .../dist/__generated__/sigstore_rekor.js      |  167 ---
 .../dist/__generated__/sigstore_trustroot.js  |  103 --
 .../__generated__/sigstore_verification.js    |  273 ----
 .../@sigstore/protobuf-specs/dist/index.js    |   37 -
 .../@sigstore/protobuf-specs/package.json     |   31 -
 .../dist/__generated__/envelope.js            |   14 +-
 .../dist/__generated__/events.js              |    0
 .../google/api/field_behavior.js              |    6 +-
 .../dist/__generated__/google/protobuf/any.js |    0
 .../google/protobuf/descriptor.js             |   38 +-
 .../dist/__generated__/sigstore_common.js     |   26 +-
 .../dist/__generated__/sigstore_rekor.js      |   14 +-
 .../__generated__/sigstore_verification.js    |   14 +-
 .../@sigstore/protobuf-specs/package.json     |    2 +-
 .../@sigstore/protobuf-specs/LICENSE          |  202 ---
 .../dist/__generated__/envelope.js            |   89 --
 .../dist/__generated__/events.js              |  185 ---
 .../google/api/field_behavior.js              |  119 --
 .../dist/__generated__/google/protobuf/any.js |   65 -
 .../google/protobuf/descriptor.js             | 1308 -----------------
 .../google/protobuf/timestamp.js              |   24 -
 .../dist/__generated__/sigstore_bundle.js     |  106 --
 .../dist/__generated__/sigstore_common.js     |  457 ------
 .../dist/__generated__/sigstore_rekor.js      |  167 ---
 .../dist/__generated__/sigstore_trustroot.js  |  103 --
 .../__generated__/sigstore_verification.js    |  273 ----
 .../@sigstore/protobuf-specs/dist/index.js    |   37 -
 .../@sigstore/protobuf-specs/package.json     |   31 -
 .../@sigstore/protobuf-specs/LICENSE          |  202 ---
 .../dist/__generated__/envelope.js            |   89 --
 .../dist/__generated__/events.js              |  185 ---
 .../google/api/field_behavior.js              |  119 --
 .../dist/__generated__/google/protobuf/any.js |   65 -
 .../google/protobuf/descriptor.js             | 1308 -----------------
 .../google/protobuf/timestamp.js              |   24 -
 .../dist/__generated__/sigstore_bundle.js     |  106 --
 .../dist/__generated__/sigstore_common.js     |  457 ------
 .../dist/__generated__/sigstore_rekor.js      |  167 ---
 .../dist/__generated__/sigstore_trustroot.js  |  103 --
 .../__generated__/sigstore_verification.js    |  273 ----
 .../@sigstore/protobuf-specs/dist/index.js    |   37 -
 .../@sigstore/protobuf-specs/package.json     |   31 -
 node_modules/pacote/lib/registry.js           |    4 +-
 node_modules/pacote/package.json              |    4 +-
 node_modules/sigstore/bin/sigstore.js         |   17 -
 node_modules/sigstore/dist/ca/format.js       |   20 -
 node_modules/sigstore/dist/ca/index.js        |   39 -
 .../sigstore/dist/ca/verify/signer.js         |    5 +-
 node_modules/sigstore/dist/cli/index.js       |  125 --
 node_modules/sigstore/dist/config.js          |  113 +-
 node_modules/sigstore/dist/error.js           |   21 +-
 node_modules/sigstore/dist/external/error.js  |   21 -
 node_modules/sigstore/dist/external/fulcio.js |   51 -
 node_modules/sigstore/dist/external/index.js  |   26 -
 node_modules/sigstore/dist/external/rekor.js  |  115 --
 node_modules/sigstore/dist/external/tsa.js    |   47 -
 node_modules/sigstore/dist/identity/ci.js     |   75 -
 node_modules/sigstore/dist/identity/index.js  |   51 -
 node_modules/sigstore/dist/identity/issuer.js |   53 -
 node_modules/sigstore/dist/identity/oauth.js  |  197 ---
 .../sigstore/dist/identity/provider.js        |    2 -
 node_modules/sigstore/dist/index.js           |   58 +-
 node_modules/sigstore/dist/sign.js            |  120 --
 node_modules/sigstore/dist/sigstore-utils.js  |   80 -
 node_modules/sigstore/dist/sigstore.js        |  112 +-
 node_modules/sigstore/dist/tlog/format.js     |  134 --
 node_modules/sigstore/dist/tlog/index.js      |   75 -
 .../sigstore/dist/tlog/verify/checkpoint.js   |  148 ++
 .../sigstore/dist/tlog/verify/index.js        |   71 +-
 .../sigstore/dist/tlog/verify/merkle.js       |   12 +-
 node_modules/sigstore/dist/tlog/verify/set.js |    5 +-
 node_modules/sigstore/dist/tsa/index.js       |   47 -
 node_modules/sigstore/dist/types/signature.js |   15 -
 node_modules/sigstore/dist/types/sigstore.js  |   27 +
 .../sigstore/dist/types/sigstore/index.js     |  162 --
 .../dist/types/sigstore/serialized.js         |    2 -
 .../sigstore/dist/types/sigstore/validate.js  |   88 --
 node_modules/sigstore/dist/types/utility.js   |    1 -
 node_modules/sigstore/dist/util/crypto.js     |   25 +-
 node_modules/sigstore/dist/util/index.js      |    5 +-
 node_modules/sigstore/dist/util/oidc.js       |   54 -
 node_modules/sigstore/dist/util/promise.js    |   27 -
 node_modules/sigstore/dist/util/ua.js         |   33 -
 node_modules/sigstore/dist/verify.js          |   18 +-
 .../node_modules/@sigstore/tuf/LICENSE        |  202 ---
 .../@sigstore/tuf/dist/appdata.js             |   44 -
 .../node_modules/@sigstore/tuf/dist/client.js |  101 --
 .../node_modules/@sigstore/tuf/dist/error.js  |   12 -
 .../node_modules/@sigstore/tuf/dist/index.js  |   55 -
 .../node_modules/@sigstore/tuf/dist/target.js |   80 -
 .../@sigstore/protobuf-specs/LICENSE          |  202 ---
 .../dist/__generated__/envelope.js            |   89 --
 .../dist/__generated__/events.js              |  185 ---
 .../google/api/field_behavior.js              |  119 --
 .../dist/__generated__/google/protobuf/any.js |   65 -
 .../google/protobuf/descriptor.js             | 1308 -----------------
 .../google/protobuf/timestamp.js              |   24 -
 .../dist/__generated__/sigstore_bundle.js     |  106 --
 .../dist/__generated__/sigstore_common.js     |  457 ------
 .../dist/__generated__/sigstore_rekor.js      |  167 ---
 .../dist/__generated__/sigstore_trustroot.js  |  103 --
 .../__generated__/sigstore_verification.js    |  273 ----
 .../@sigstore/protobuf-specs/dist/index.js    |   37 -
 .../@sigstore/protobuf-specs/package.json     |   31 -
 .../node_modules/@sigstore/tuf/package.json   |   41 -
 .../tuf/store/public-good-instance-root.json  |    1 -
 .../@tufjs/canonical-json/LICENSE             |   21 -
 .../@tufjs/canonical-json/lib/index.js        |   64 -
 .../@tufjs/canonical-json/package.json        |   39 -
 .../node_modules/@tufjs/models/LICENSE        |   21 -
 .../node_modules/@tufjs/models/dist/base.js   |   83 --
 .../@tufjs/models/dist/delegations.js         |  115 --
 .../node_modules/@tufjs/models/dist/error.js  |   27 -
 .../node_modules/@tufjs/models/dist/file.js   |  183 ---
 .../node_modules/@tufjs/models/dist/index.js  |   24 -
 .../node_modules/@tufjs/models/dist/key.js    |   85 --
 .../@tufjs/models/dist/metadata.js            |  158 --
 .../node_modules/@tufjs/models/dist/role.js   |  299 ----
 .../node_modules/@tufjs/models/dist/root.js   |  116 --
 .../@tufjs/models/dist/signature.js           |   38 -
 .../@tufjs/models/dist/snapshot.js            |   71 -
 .../@tufjs/models/dist/targets.js             |   92 --
 .../@tufjs/models/dist/timestamp.js           |   58 -
 .../@tufjs/models/dist/utils/guard.js         |   33 -
 .../@tufjs/models/dist/utils/index.js         |   28 -
 .../@tufjs/models/dist/utils/key.js           |  143 --
 .../@tufjs/models/dist/utils/oid.js           |   27 -
 .../@tufjs/models/dist/utils/types.js         |    2 -
 .../@tufjs/models/dist/utils/verify.js        |   13 -
 .../node_modules/@tufjs/models/package.json   |   41 -
 .../sigstore/node_modules/cacache/LICENSE.md  |   16 -
 .../node_modules/cacache/lib/content/path.js  |   29 -
 .../node_modules/cacache/lib/content/read.js  |  166 ---
 .../node_modules/cacache/lib/content/rm.js    |   18 -
 .../node_modules/cacache/lib/content/write.js |  205 ---
 .../node_modules/cacache/lib/entry-index.js   |  330 -----
 .../sigstore/node_modules/cacache/lib/get.js  |  170 ---
 .../node_modules/cacache/lib/index.js         |   42 -
 .../node_modules/cacache/lib/memoization.js   |   72 -
 .../sigstore/node_modules/cacache/lib/put.js  |   80 -
 .../sigstore/node_modules/cacache/lib/rm.js   |   31 -
 .../node_modules/cacache/lib/util/glob.js     |    7 -
 .../cacache/lib/util/hash-to-segments.js      |    7 -
 .../node_modules/cacache/lib/util/tmp.js      |   26 -
 .../node_modules/cacache/lib/verify.js        |  257 ----
 .../cacache/node_modules/minipass/LICENSE     |   15 -
 .../node_modules/minipass/dist/cjs/index.js   | 1028 -------------
 .../minipass/dist/cjs/package.json            |    3 -
 .../node_modules/minipass/dist/mjs/index.js   | 1018 -------------
 .../minipass/dist/mjs/package.json            |    3 -
 .../node_modules/minipass/package.json        |   82 --
 .../node_modules/cacache/package.json         |   82 --
 .../sigstore/node_modules/lru-cache/LICENSE   |   15 -
 .../sigstore/node_modules/lru-cache/index.js  | 1227 ----------------
 .../sigstore/node_modules/lru-cache/index.mjs | 1227 ----------------
 .../node_modules/lru-cache/package.json       |   96 --
 .../node_modules/make-fetch-happen/LICENSE    |   16 -
 .../make-fetch-happen/lib/agent.js            |  214 ---
 .../make-fetch-happen/lib/cache/entry.js      |  469 ------
 .../make-fetch-happen/lib/cache/errors.js     |   11 -
 .../make-fetch-happen/lib/cache/index.js      |   49 -
 .../make-fetch-happen/lib/cache/key.js        |   17 -
 .../make-fetch-happen/lib/cache/policy.js     |  161 --
 .../node_modules/make-fetch-happen/lib/dns.js |   49 -
 .../make-fetch-happen/lib/fetch.js            |  118 --
 .../make-fetch-happen/lib/index.js            |   41 -
 .../make-fetch-happen/lib/options.js          |   54 -
 .../make-fetch-happen/lib/pipeline.js         |   41 -
 .../make-fetch-happen/lib/remote.js           |  121 --
 .../make-fetch-happen/package.json            |   78 -
 .../sigstore/node_modules/minipass/LICENSE    |   15 -
 .../sigstore/node_modules/minipass/index.js   |  702 ---------
 .../sigstore/node_modules/minipass/index.mjs  |  702 ---------
 .../node_modules/minipass/package.json        |   76 -
 .../sigstore/node_modules/tuf-js/LICENSE      |   21 -
 .../node_modules/tuf-js/dist/config.js        |   14 -
 .../node_modules/tuf-js/dist/error.js         |   48 -
 .../node_modules/tuf-js/dist/fetcher.js       |   84 --
 .../node_modules/tuf-js/dist/index.js         |    9 -
 .../node_modules/tuf-js/dist/store.js         |  208 ---
 .../node_modules/tuf-js/dist/updater.js       |  320 ----
 .../node_modules/tuf-js/dist/utils/tmpfile.js |   25 -
 .../node_modules/tuf-js/dist/utils/url.js     |   14 -
 .../sigstore/node_modules/tuf-js/package.json |   46 -
 node_modules/sigstore/package.json            |   19 +-
 package-lock.json                             |  223 +--
 package.json                                  |    2 +-
 test/lib/commands/audit.js                    |   26 +-
 workspaces/arborist/package.json              |    2 +-
 workspaces/libnpmdiff/package.json            |    2 +-
 workspaces/libnpmexec/package.json            |    2 +-
 workspaces/libnpmpack/package.json            |    2 +-
 202 files changed, 491 insertions(+), 26952 deletions(-)
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/LICENSE
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/index.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/package.json
 rename node_modules/@sigstore/{bundle/node_modules/@sigstore => }/protobuf-specs/dist/__generated__/events.js (100%)
 rename node_modules/@sigstore/{bundle/node_modules/@sigstore => }/protobuf-specs/dist/__generated__/google/protobuf/any.js (100%)
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/LICENSE
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/index.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/package.json
 delete mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE
 delete mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
 delete mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
 delete mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
 delete mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
 delete mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
 delete mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
 delete mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
 delete mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
 delete mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
 delete mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
 delete mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
 delete mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js
 delete mode 100644 node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json
 delete mode 100755 node_modules/sigstore/bin/sigstore.js
 delete mode 100644 node_modules/sigstore/dist/ca/format.js
 delete mode 100644 node_modules/sigstore/dist/ca/index.js
 delete mode 100644 node_modules/sigstore/dist/cli/index.js
 delete mode 100644 node_modules/sigstore/dist/external/error.js
 delete mode 100644 node_modules/sigstore/dist/external/fulcio.js
 delete mode 100644 node_modules/sigstore/dist/external/index.js
 delete mode 100644 node_modules/sigstore/dist/external/rekor.js
 delete mode 100644 node_modules/sigstore/dist/external/tsa.js
 delete mode 100644 node_modules/sigstore/dist/identity/ci.js
 delete mode 100644 node_modules/sigstore/dist/identity/index.js
 delete mode 100644 node_modules/sigstore/dist/identity/issuer.js
 delete mode 100644 node_modules/sigstore/dist/identity/oauth.js
 delete mode 100644 node_modules/sigstore/dist/identity/provider.js
 delete mode 100644 node_modules/sigstore/dist/sign.js
 delete mode 100644 node_modules/sigstore/dist/sigstore-utils.js
 delete mode 100644 node_modules/sigstore/dist/tlog/format.js
 delete mode 100644 node_modules/sigstore/dist/tlog/index.js
 create mode 100644 node_modules/sigstore/dist/tlog/verify/checkpoint.js
 delete mode 100644 node_modules/sigstore/dist/tsa/index.js
 delete mode 100644 node_modules/sigstore/dist/types/signature.js
 create mode 100644 node_modules/sigstore/dist/types/sigstore.js
 delete mode 100644 node_modules/sigstore/dist/types/sigstore/index.js
 delete mode 100644 node_modules/sigstore/dist/types/sigstore/serialized.js
 delete mode 100644 node_modules/sigstore/dist/types/sigstore/validate.js
 delete mode 100644 node_modules/sigstore/dist/util/oidc.js
 delete mode 100644 node_modules/sigstore/dist/util/promise.js
 delete mode 100644 node_modules/sigstore/dist/util/ua.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/LICENSE
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/dist/appdata.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/dist/client.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/dist/error.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/dist/index.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/dist/target.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/package.json
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/store/public-good-instance-root.json
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/canonical-json/LICENSE
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/canonical-json/lib/index.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/canonical-json/package.json
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/LICENSE
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/base.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/delegations.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/error.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/file.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/index.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/key.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/metadata.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/role.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/root.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/signature.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/snapshot.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/targets.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/timestamp.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/utils/guard.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/utils/index.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/utils/key.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/utils/oid.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/utils/types.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/utils/verify.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/package.json
 delete mode 100644 node_modules/sigstore/node_modules/cacache/LICENSE.md
 delete mode 100644 node_modules/sigstore/node_modules/cacache/lib/content/path.js
 delete mode 100644 node_modules/sigstore/node_modules/cacache/lib/content/read.js
 delete mode 100644 node_modules/sigstore/node_modules/cacache/lib/content/rm.js
 delete mode 100644 node_modules/sigstore/node_modules/cacache/lib/content/write.js
 delete mode 100644 node_modules/sigstore/node_modules/cacache/lib/entry-index.js
 delete mode 100644 node_modules/sigstore/node_modules/cacache/lib/get.js
 delete mode 100644 node_modules/sigstore/node_modules/cacache/lib/index.js
 delete mode 100644 node_modules/sigstore/node_modules/cacache/lib/memoization.js
 delete mode 100644 node_modules/sigstore/node_modules/cacache/lib/put.js
 delete mode 100644 node_modules/sigstore/node_modules/cacache/lib/rm.js
 delete mode 100644 node_modules/sigstore/node_modules/cacache/lib/util/glob.js
 delete mode 100644 node_modules/sigstore/node_modules/cacache/lib/util/hash-to-segments.js
 delete mode 100644 node_modules/sigstore/node_modules/cacache/lib/util/tmp.js
 delete mode 100644 node_modules/sigstore/node_modules/cacache/lib/verify.js
 delete mode 100644 node_modules/sigstore/node_modules/cacache/node_modules/minipass/LICENSE
 delete mode 100644 node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
 delete mode 100644 node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
 delete mode 100644 node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
 delete mode 100644 node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
 delete mode 100644 node_modules/sigstore/node_modules/cacache/node_modules/minipass/package.json
 delete mode 100644 node_modules/sigstore/node_modules/cacache/package.json
 delete mode 100644 node_modules/sigstore/node_modules/lru-cache/LICENSE
 delete mode 100644 node_modules/sigstore/node_modules/lru-cache/index.js
 delete mode 100644 node_modules/sigstore/node_modules/lru-cache/index.mjs
 delete mode 100644 node_modules/sigstore/node_modules/lru-cache/package.json
 delete mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/LICENSE
 delete mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/agent.js
 delete mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/entry.js
 delete mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/errors.js
 delete mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/index.js
 delete mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/key.js
 delete mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/policy.js
 delete mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/dns.js
 delete mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/fetch.js
 delete mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/index.js
 delete mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/options.js
 delete mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/pipeline.js
 delete mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/remote.js
 delete mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/package.json
 delete mode 100644 node_modules/sigstore/node_modules/minipass/LICENSE
 delete mode 100644 node_modules/sigstore/node_modules/minipass/index.js
 delete mode 100644 node_modules/sigstore/node_modules/minipass/index.mjs
 delete mode 100644 node_modules/sigstore/node_modules/minipass/package.json
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/LICENSE
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/config.js
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/error.js
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/fetcher.js
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/index.js
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/store.js
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/updater.js
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/utils/tmpfile.js
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/utils/url.js
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/package.json

diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md
index 7ff1866331389..b080058cb387e 100644
--- a/DEPENDENCIES.md
+++ b/DEPENDENCIES.md
@@ -767,7 +767,6 @@ graph LR;
   rimraf-->glob;
   semver-->lru-cache;
   shebang-command-->shebang-regex;
-  sigstore-->make-fetch-happen;
   sigstore-->sigstore-bundle["@sigstore/bundle"];
   sigstore-->sigstore-protobuf-specs["@sigstore/protobuf-specs"];
   sigstore-->sigstore-sign["@sigstore/sign"];
diff --git a/mock-registry/package.json b/mock-registry/package.json
index cf80f303096b7..d07851b37ae38 100644
--- a/mock-registry/package.json
+++ b/mock-registry/package.json
@@ -56,7 +56,7 @@
     "json-stringify-safe": "^5.0.1",
     "nock": "^13.3.3",
     "npm-package-arg": "^11.0.0",
-    "pacote": "^17.0.1",
+    "pacote": "^17.0.2",
     "tap": "^16.3.8"
   }
 }
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 6073ab13442a5..5e29e58b28c44 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -37,24 +37,9 @@
 !/@sigstore/
 /@sigstore/*
 !/@sigstore/bundle
-!/@sigstore/bundle/node_modules/
-/@sigstore/bundle/node_modules/*
-!/@sigstore/bundle/node_modules/@sigstore/
-/@sigstore/bundle/node_modules/@sigstore/*
-!/@sigstore/bundle/node_modules/@sigstore/protobuf-specs
 !/@sigstore/protobuf-specs
 !/@sigstore/sign
-!/@sigstore/sign/node_modules/
-/@sigstore/sign/node_modules/*
-!/@sigstore/sign/node_modules/@sigstore/
-/@sigstore/sign/node_modules/@sigstore/*
-!/@sigstore/sign/node_modules/@sigstore/protobuf-specs
 !/@sigstore/tuf
-!/@sigstore/tuf/node_modules/
-/@sigstore/tuf/node_modules/*
-!/@sigstore/tuf/node_modules/@sigstore/
-/@sigstore/tuf/node_modules/@sigstore/*
-!/@sigstore/tuf/node_modules/@sigstore/protobuf-specs
 !/@tootallnate/
 /@tootallnate/*
 !/@tootallnate/once
@@ -262,28 +247,6 @@
 !/shebang-regex
 !/signal-exit
 !/sigstore
-!/sigstore/node_modules/
-/sigstore/node_modules/*
-!/sigstore/node_modules/@sigstore/
-/sigstore/node_modules/@sigstore/*
-!/sigstore/node_modules/@sigstore/tuf
-!/sigstore/node_modules/@sigstore/tuf/node_modules/
-/sigstore/node_modules/@sigstore/tuf/node_modules/*
-!/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/
-/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/*
-!/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs
-!/sigstore/node_modules/@tufjs/
-/sigstore/node_modules/@tufjs/*
-!/sigstore/node_modules/@tufjs/canonical-json
-!/sigstore/node_modules/@tufjs/models
-!/sigstore/node_modules/cacache
-!/sigstore/node_modules/cacache/node_modules/
-/sigstore/node_modules/cacache/node_modules/*
-!/sigstore/node_modules/cacache/node_modules/minipass
-!/sigstore/node_modules/lru-cache
-!/sigstore/node_modules/make-fetch-happen
-!/sigstore/node_modules/minipass
-!/sigstore/node_modules/tuf-js
 !/smart-buffer
 !/socks-proxy-agent
 !/socks
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/LICENSE b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/LICENSE
deleted file mode 100644
index e9e7c1679a09d..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright 2023 The Sigstore Authors
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
deleted file mode 100644
index 0c367a8384454..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
+++ /dev/null
@@ -1,89 +0,0 @@
-"use strict";
-/* eslint-disable */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Signature = exports.Envelope = void 0;
-function createBaseEnvelope() {
-    return { payload: Buffer.alloc(0), payloadType: "", signatures: [] };
-}
-exports.Envelope = {
-    fromJSON(object) {
-        return {
-            payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
-            payloadType: isSet(object.payloadType) ? String(object.payloadType) : "",
-            signatures: Array.isArray(object?.signatures) ? object.signatures.map((e) => exports.Signature.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.payload !== undefined &&
-            (obj.payload = base64FromBytes(message.payload !== undefined ? message.payload : Buffer.alloc(0)));
-        message.payloadType !== undefined && (obj.payloadType = message.payloadType);
-        if (message.signatures) {
-            obj.signatures = message.signatures.map((e) => e ? exports.Signature.toJSON(e) : undefined);
-        }
-        else {
-            obj.signatures = [];
-        }
-        return obj;
-    },
-};
-function createBaseSignature() {
-    return { sig: Buffer.alloc(0), keyid: "" };
-}
-exports.Signature = {
-    fromJSON(object) {
-        return {
-            sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
-            keyid: isSet(object.keyid) ? String(object.keyid) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.sig !== undefined && (obj.sig = base64FromBytes(message.sig !== undefined ? message.sig : Buffer.alloc(0)));
-        message.keyid !== undefined && (obj.keyid = message.keyid);
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
deleted file mode 100644
index da627499ad765..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
+++ /dev/null
@@ -1,119 +0,0 @@
-"use strict";
-/* eslint-disable */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.fieldBehaviorToJSON = exports.fieldBehaviorFromJSON = exports.FieldBehavior = void 0;
-/**
- * An indicator of the behavior of a given field (for example, that a field
- * is required in requests, or given as output but ignored as input).
- * This **does not** change the behavior in protocol buffers itself; it only
- * denotes the behavior and may affect how API tooling handles the field.
- *
- * Note: This enum **may** receive new values in the future.
- */
-var FieldBehavior;
-(function (FieldBehavior) {
-    /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
-    FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
-    /**
-     * OPTIONAL - Specifically denotes a field as optional.
-     * While all fields in protocol buffers are optional, this may be specified
-     * for emphasis if appropriate.
-     */
-    FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
-    /**
-     * REQUIRED - Denotes a field as required.
-     * This indicates that the field **must** be provided as part of the request,
-     * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
-     */
-    FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
-    /**
-     * OUTPUT_ONLY - Denotes a field as output only.
-     * This indicates that the field is provided in responses, but including the
-     * field in a request does nothing (the server *must* ignore it and
-     * *must not* throw an error as a result of the field's presence).
-     */
-    FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
-    /**
-     * INPUT_ONLY - Denotes a field as input only.
-     * This indicates that the field is provided in requests, and the
-     * corresponding field is not included in output.
-     */
-    FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
-    /**
-     * IMMUTABLE - Denotes a field as immutable.
-     * This indicates that the field may be set once in a request to create a
-     * resource, but may not be changed thereafter.
-     */
-    FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
-    /**
-     * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
-     * This indicates that the service may provide the elements of the list
-     * in any arbitrary order, rather than the order the user originally
-     * provided. Additionally, the list's order may or may not be stable.
-     */
-    FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
-})(FieldBehavior = exports.FieldBehavior || (exports.FieldBehavior = {}));
-function fieldBehaviorFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "FIELD_BEHAVIOR_UNSPECIFIED":
-            return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
-        case 1:
-        case "OPTIONAL":
-            return FieldBehavior.OPTIONAL;
-        case 2:
-        case "REQUIRED":
-            return FieldBehavior.REQUIRED;
-        case 3:
-        case "OUTPUT_ONLY":
-            return FieldBehavior.OUTPUT_ONLY;
-        case 4:
-        case "INPUT_ONLY":
-            return FieldBehavior.INPUT_ONLY;
-        case 5:
-        case "IMMUTABLE":
-            return FieldBehavior.IMMUTABLE;
-        case 6:
-        case "UNORDERED_LIST":
-            return FieldBehavior.UNORDERED_LIST;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
-    }
-}
-exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
-function fieldBehaviorToJSON(object) {
-    switch (object) {
-        case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
-            return "FIELD_BEHAVIOR_UNSPECIFIED";
-        case FieldBehavior.OPTIONAL:
-            return "OPTIONAL";
-        case FieldBehavior.REQUIRED:
-            return "REQUIRED";
-        case FieldBehavior.OUTPUT_ONLY:
-            return "OUTPUT_ONLY";
-        case FieldBehavior.INPUT_ONLY:
-            return "INPUT_ONLY";
-        case FieldBehavior.IMMUTABLE:
-            return "IMMUTABLE";
-        case FieldBehavior.UNORDERED_LIST:
-            return "UNORDERED_LIST";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
-    }
-}
-exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
deleted file mode 100644
index d429aac846043..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
+++ /dev/null
@@ -1,1308 +0,0 @@
-"use strict";
-/* eslint-disable */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.GeneratedCodeInfo_Annotation = exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.methodOptions_IdempotencyLevelToJSON = exports.methodOptions_IdempotencyLevelFromJSON = exports.MethodOptions_IdempotencyLevel = exports.fieldOptions_JSTypeToJSON = exports.fieldOptions_JSTypeFromJSON = exports.FieldOptions_JSType = exports.fieldOptions_CTypeToJSON = exports.fieldOptions_CTypeFromJSON = exports.FieldOptions_CType = exports.fileOptions_OptimizeModeToJSON = exports.fileOptions_OptimizeModeFromJSON = exports.FileOptions_OptimizeMode = exports.fieldDescriptorProto_LabelToJSON = exports.fieldDescriptorProto_LabelFromJSON = exports.FieldDescriptorProto_Label = exports.fieldDescriptorProto_TypeToJSON = exports.fieldDescriptorProto_TypeFromJSON = exports.FieldDescriptorProto_Type = void 0;
-var FieldDescriptorProto_Type;
-(function (FieldDescriptorProto_Type) {
-    /**
-     * TYPE_DOUBLE - 0 is reserved for errors.
-     * Order is weird for historical reasons.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT";
-    /**
-     * TYPE_INT64 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
-     * negative values are likely.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64";
-    /**
-     * TYPE_INT32 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
-     * negative values are likely.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
-    /**
-     * TYPE_GROUP - Tag-delimited aggregate.
-     * Group type is deprecated and not supported in proto3. However, Proto3
-     * implementations should still be able to parse the group wire format and
-     * treat group fields as unknown fields.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
-    /** TYPE_MESSAGE - Length-delimited aggregate. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE";
-    /** TYPE_BYTES - New in version 2. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64";
-    /** TYPE_SINT32 - Uses ZigZag encoding. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
-    /** TYPE_SINT64 - Uses ZigZag encoding. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
-})(FieldDescriptorProto_Type = exports.FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = {}));
-function fieldDescriptorProto_TypeFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "TYPE_DOUBLE":
-            return FieldDescriptorProto_Type.TYPE_DOUBLE;
-        case 2:
-        case "TYPE_FLOAT":
-            return FieldDescriptorProto_Type.TYPE_FLOAT;
-        case 3:
-        case "TYPE_INT64":
-            return FieldDescriptorProto_Type.TYPE_INT64;
-        case 4:
-        case "TYPE_UINT64":
-            return FieldDescriptorProto_Type.TYPE_UINT64;
-        case 5:
-        case "TYPE_INT32":
-            return FieldDescriptorProto_Type.TYPE_INT32;
-        case 6:
-        case "TYPE_FIXED64":
-            return FieldDescriptorProto_Type.TYPE_FIXED64;
-        case 7:
-        case "TYPE_FIXED32":
-            return FieldDescriptorProto_Type.TYPE_FIXED32;
-        case 8:
-        case "TYPE_BOOL":
-            return FieldDescriptorProto_Type.TYPE_BOOL;
-        case 9:
-        case "TYPE_STRING":
-            return FieldDescriptorProto_Type.TYPE_STRING;
-        case 10:
-        case "TYPE_GROUP":
-            return FieldDescriptorProto_Type.TYPE_GROUP;
-        case 11:
-        case "TYPE_MESSAGE":
-            return FieldDescriptorProto_Type.TYPE_MESSAGE;
-        case 12:
-        case "TYPE_BYTES":
-            return FieldDescriptorProto_Type.TYPE_BYTES;
-        case 13:
-        case "TYPE_UINT32":
-            return FieldDescriptorProto_Type.TYPE_UINT32;
-        case 14:
-        case "TYPE_ENUM":
-            return FieldDescriptorProto_Type.TYPE_ENUM;
-        case 15:
-        case "TYPE_SFIXED32":
-            return FieldDescriptorProto_Type.TYPE_SFIXED32;
-        case 16:
-        case "TYPE_SFIXED64":
-            return FieldDescriptorProto_Type.TYPE_SFIXED64;
-        case 17:
-        case "TYPE_SINT32":
-            return FieldDescriptorProto_Type.TYPE_SINT32;
-        case 18:
-        case "TYPE_SINT64":
-            return FieldDescriptorProto_Type.TYPE_SINT64;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
-    }
-}
-exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
-function fieldDescriptorProto_TypeToJSON(object) {
-    switch (object) {
-        case FieldDescriptorProto_Type.TYPE_DOUBLE:
-            return "TYPE_DOUBLE";
-        case FieldDescriptorProto_Type.TYPE_FLOAT:
-            return "TYPE_FLOAT";
-        case FieldDescriptorProto_Type.TYPE_INT64:
-            return "TYPE_INT64";
-        case FieldDescriptorProto_Type.TYPE_UINT64:
-            return "TYPE_UINT64";
-        case FieldDescriptorProto_Type.TYPE_INT32:
-            return "TYPE_INT32";
-        case FieldDescriptorProto_Type.TYPE_FIXED64:
-            return "TYPE_FIXED64";
-        case FieldDescriptorProto_Type.TYPE_FIXED32:
-            return "TYPE_FIXED32";
-        case FieldDescriptorProto_Type.TYPE_BOOL:
-            return "TYPE_BOOL";
-        case FieldDescriptorProto_Type.TYPE_STRING:
-            return "TYPE_STRING";
-        case FieldDescriptorProto_Type.TYPE_GROUP:
-            return "TYPE_GROUP";
-        case FieldDescriptorProto_Type.TYPE_MESSAGE:
-            return "TYPE_MESSAGE";
-        case FieldDescriptorProto_Type.TYPE_BYTES:
-            return "TYPE_BYTES";
-        case FieldDescriptorProto_Type.TYPE_UINT32:
-            return "TYPE_UINT32";
-        case FieldDescriptorProto_Type.TYPE_ENUM:
-            return "TYPE_ENUM";
-        case FieldDescriptorProto_Type.TYPE_SFIXED32:
-            return "TYPE_SFIXED32";
-        case FieldDescriptorProto_Type.TYPE_SFIXED64:
-            return "TYPE_SFIXED64";
-        case FieldDescriptorProto_Type.TYPE_SINT32:
-            return "TYPE_SINT32";
-        case FieldDescriptorProto_Type.TYPE_SINT64:
-            return "TYPE_SINT64";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
-    }
-}
-exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
-var FieldDescriptorProto_Label;
-(function (FieldDescriptorProto_Label) {
-    /** LABEL_OPTIONAL - 0 is reserved for errors */
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
-})(FieldDescriptorProto_Label = exports.FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = {}));
-function fieldDescriptorProto_LabelFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "LABEL_OPTIONAL":
-            return FieldDescriptorProto_Label.LABEL_OPTIONAL;
-        case 2:
-        case "LABEL_REQUIRED":
-            return FieldDescriptorProto_Label.LABEL_REQUIRED;
-        case 3:
-        case "LABEL_REPEATED":
-            return FieldDescriptorProto_Label.LABEL_REPEATED;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
-    }
-}
-exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
-function fieldDescriptorProto_LabelToJSON(object) {
-    switch (object) {
-        case FieldDescriptorProto_Label.LABEL_OPTIONAL:
-            return "LABEL_OPTIONAL";
-        case FieldDescriptorProto_Label.LABEL_REQUIRED:
-            return "LABEL_REQUIRED";
-        case FieldDescriptorProto_Label.LABEL_REPEATED:
-            return "LABEL_REPEATED";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
-    }
-}
-exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
-/** Generated classes can be optimized for speed or code size. */
-var FileOptions_OptimizeMode;
-(function (FileOptions_OptimizeMode) {
-    /** SPEED - Generate complete code for parsing, serialization, */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED";
-    /** CODE_SIZE - etc. */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
-    /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
-})(FileOptions_OptimizeMode = exports.FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = {}));
-function fileOptions_OptimizeModeFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "SPEED":
-            return FileOptions_OptimizeMode.SPEED;
-        case 2:
-        case "CODE_SIZE":
-            return FileOptions_OptimizeMode.CODE_SIZE;
-        case 3:
-        case "LITE_RUNTIME":
-            return FileOptions_OptimizeMode.LITE_RUNTIME;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
-    }
-}
-exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
-function fileOptions_OptimizeModeToJSON(object) {
-    switch (object) {
-        case FileOptions_OptimizeMode.SPEED:
-            return "SPEED";
-        case FileOptions_OptimizeMode.CODE_SIZE:
-            return "CODE_SIZE";
-        case FileOptions_OptimizeMode.LITE_RUNTIME:
-            return "LITE_RUNTIME";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
-    }
-}
-exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
-var FieldOptions_CType;
-(function (FieldOptions_CType) {
-    /** STRING - Default mode. */
-    FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
-    FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
-    FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
-})(FieldOptions_CType = exports.FieldOptions_CType || (exports.FieldOptions_CType = {}));
-function fieldOptions_CTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "STRING":
-            return FieldOptions_CType.STRING;
-        case 1:
-        case "CORD":
-            return FieldOptions_CType.CORD;
-        case 2:
-        case "STRING_PIECE":
-            return FieldOptions_CType.STRING_PIECE;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
-    }
-}
-exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
-function fieldOptions_CTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_CType.STRING:
-            return "STRING";
-        case FieldOptions_CType.CORD:
-            return "CORD";
-        case FieldOptions_CType.STRING_PIECE:
-            return "STRING_PIECE";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
-    }
-}
-exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
-var FieldOptions_JSType;
-(function (FieldOptions_JSType) {
-    /** JS_NORMAL - Use the default type. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL";
-    /** JS_STRING - Use JavaScript strings. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
-    /** JS_NUMBER - Use JavaScript numbers. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
-})(FieldOptions_JSType = exports.FieldOptions_JSType || (exports.FieldOptions_JSType = {}));
-function fieldOptions_JSTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "JS_NORMAL":
-            return FieldOptions_JSType.JS_NORMAL;
-        case 1:
-        case "JS_STRING":
-            return FieldOptions_JSType.JS_STRING;
-        case 2:
-        case "JS_NUMBER":
-            return FieldOptions_JSType.JS_NUMBER;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
-    }
-}
-exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
-function fieldOptions_JSTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_JSType.JS_NORMAL:
-            return "JS_NORMAL";
-        case FieldOptions_JSType.JS_STRING:
-            return "JS_STRING";
-        case FieldOptions_JSType.JS_NUMBER:
-            return "JS_NUMBER";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
-    }
-}
-exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
-/**
- * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
- * or neither? HTTP based RPC implementation may choose GET verb for safe
- * methods, and PUT verb for idempotent methods instead of the default POST.
- */
-var MethodOptions_IdempotencyLevel;
-(function (MethodOptions_IdempotencyLevel) {
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN";
-    /** NO_SIDE_EFFECTS - implies idempotent */
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
-    /** IDEMPOTENT - idempotent, but may have side effects */
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
-})(MethodOptions_IdempotencyLevel = exports.MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = {}));
-function methodOptions_IdempotencyLevelFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "IDEMPOTENCY_UNKNOWN":
-            return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN;
-        case 1:
-        case "NO_SIDE_EFFECTS":
-            return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS;
-        case 2:
-        case "IDEMPOTENT":
-            return MethodOptions_IdempotencyLevel.IDEMPOTENT;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
-    }
-}
-exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
-function methodOptions_IdempotencyLevelToJSON(object) {
-    switch (object) {
-        case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
-            return "IDEMPOTENCY_UNKNOWN";
-        case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS:
-            return "NO_SIDE_EFFECTS";
-        case MethodOptions_IdempotencyLevel.IDEMPOTENT:
-            return "IDEMPOTENT";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
-    }
-}
-exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
-function createBaseFileDescriptorSet() {
-    return { file: [] };
-}
-exports.FileDescriptorSet = {
-    fromJSON(object) {
-        return { file: Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [] };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.file) {
-            obj.file = message.file.map((e) => e ? exports.FileDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.file = [];
-        }
-        return obj;
-    },
-};
-function createBaseFileDescriptorProto() {
-    return {
-        name: "",
-        package: "",
-        dependency: [],
-        publicDependency: [],
-        weakDependency: [],
-        messageType: [],
-        enumType: [],
-        service: [],
-        extension: [],
-        options: undefined,
-        sourceCodeInfo: undefined,
-        syntax: "",
-    };
-}
-exports.FileDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            package: isSet(object.package) ? String(object.package) : "",
-            dependency: Array.isArray(object?.dependency) ? object.dependency.map((e) => String(e)) : [],
-            publicDependency: Array.isArray(object?.publicDependency)
-                ? object.publicDependency.map((e) => Number(e))
-                : [],
-            weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e) => Number(e)) : [],
-            messageType: Array.isArray(object?.messageType)
-                ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
-                : [],
-            enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
-            service: Array.isArray(object?.service) ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e)) : [],
-            extension: Array.isArray(object?.extension)
-                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
-            sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
-            syntax: isSet(object.syntax) ? String(object.syntax) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.package !== undefined && (obj.package = message.package);
-        if (message.dependency) {
-            obj.dependency = message.dependency.map((e) => e);
-        }
-        else {
-            obj.dependency = [];
-        }
-        if (message.publicDependency) {
-            obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
-        }
-        else {
-            obj.publicDependency = [];
-        }
-        if (message.weakDependency) {
-            obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
-        }
-        else {
-            obj.weakDependency = [];
-        }
-        if (message.messageType) {
-            obj.messageType = message.messageType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.messageType = [];
-        }
-        if (message.enumType) {
-            obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.enumType = [];
-        }
-        if (message.service) {
-            obj.service = message.service.map((e) => e ? exports.ServiceDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.service = [];
-        }
-        if (message.extension) {
-            obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.extension = [];
-        }
-        message.options !== undefined && (obj.options = message.options ? exports.FileOptions.toJSON(message.options) : undefined);
-        message.sourceCodeInfo !== undefined &&
-            (obj.sourceCodeInfo = message.sourceCodeInfo ? exports.SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined);
-        message.syntax !== undefined && (obj.syntax = message.syntax);
-        return obj;
-    },
-};
-function createBaseDescriptorProto() {
-    return {
-        name: "",
-        field: [],
-        extension: [],
-        nestedType: [],
-        enumType: [],
-        extensionRange: [],
-        oneofDecl: [],
-        options: undefined,
-        reservedRange: [],
-        reservedName: [],
-    };
-}
-exports.DescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            field: Array.isArray(object?.field) ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e)) : [],
-            extension: Array.isArray(object?.extension)
-                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            nestedType: Array.isArray(object?.nestedType)
-                ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
-                : [],
-            enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
-            extensionRange: Array.isArray(object?.extensionRange)
-                ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
-                : [],
-            oneofDecl: Array.isArray(object?.oneofDecl)
-                ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
-            reservedRange: Array.isArray(object?.reservedRange)
-                ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
-                : [],
-            reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e) => String(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        if (message.field) {
-            obj.field = message.field.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.field = [];
-        }
-        if (message.extension) {
-            obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.extension = [];
-        }
-        if (message.nestedType) {
-            obj.nestedType = message.nestedType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.nestedType = [];
-        }
-        if (message.enumType) {
-            obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.enumType = [];
-        }
-        if (message.extensionRange) {
-            obj.extensionRange = message.extensionRange.map((e) => e ? exports.DescriptorProto_ExtensionRange.toJSON(e) : undefined);
-        }
-        else {
-            obj.extensionRange = [];
-        }
-        if (message.oneofDecl) {
-            obj.oneofDecl = message.oneofDecl.map((e) => e ? exports.OneofDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.oneofDecl = [];
-        }
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.MessageOptions.toJSON(message.options) : undefined);
-        if (message.reservedRange) {
-            obj.reservedRange = message.reservedRange.map((e) => e ? exports.DescriptorProto_ReservedRange.toJSON(e) : undefined);
-        }
-        else {
-            obj.reservedRange = [];
-        }
-        if (message.reservedName) {
-            obj.reservedName = message.reservedName.map((e) => e);
-        }
-        else {
-            obj.reservedName = [];
-        }
-        return obj;
-    },
-};
-function createBaseDescriptorProto_ExtensionRange() {
-    return { start: 0, end: 0, options: undefined };
-}
-exports.DescriptorProto_ExtensionRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? Number(object.start) : 0,
-            end: isSet(object.end) ? Number(object.end) : 0,
-            options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.start !== undefined && (obj.start = Math.round(message.start));
-        message.end !== undefined && (obj.end = Math.round(message.end));
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.ExtensionRangeOptions.toJSON(message.options) : undefined);
-        return obj;
-    },
-};
-function createBaseDescriptorProto_ReservedRange() {
-    return { start: 0, end: 0 };
-}
-exports.DescriptorProto_ReservedRange = {
-    fromJSON(object) {
-        return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.start !== undefined && (obj.start = Math.round(message.start));
-        message.end !== undefined && (obj.end = Math.round(message.end));
-        return obj;
-    },
-};
-function createBaseExtensionRangeOptions() {
-    return { uninterpretedOption: [] };
-}
-exports.ExtensionRangeOptions = {
-    fromJSON(object) {
-        return {
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseFieldDescriptorProto() {
-    return {
-        name: "",
-        number: 0,
-        label: 1,
-        type: 1,
-        typeName: "",
-        extendee: "",
-        defaultValue: "",
-        oneofIndex: 0,
-        jsonName: "",
-        options: undefined,
-        proto3Optional: false,
-    };
-}
-exports.FieldDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            number: isSet(object.number) ? Number(object.number) : 0,
-            label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
-            type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
-            typeName: isSet(object.typeName) ? String(object.typeName) : "",
-            extendee: isSet(object.extendee) ? String(object.extendee) : "",
-            defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "",
-            oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0,
-            jsonName: isSet(object.jsonName) ? String(object.jsonName) : "",
-            options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
-            proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.number !== undefined && (obj.number = Math.round(message.number));
-        message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label));
-        message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type));
-        message.typeName !== undefined && (obj.typeName = message.typeName);
-        message.extendee !== undefined && (obj.extendee = message.extendee);
-        message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue);
-        message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex));
-        message.jsonName !== undefined && (obj.jsonName = message.jsonName);
-        message.options !== undefined && (obj.options = message.options ? exports.FieldOptions.toJSON(message.options) : undefined);
-        message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional);
-        return obj;
-    },
-};
-function createBaseOneofDescriptorProto() {
-    return { name: "", options: undefined };
-}
-exports.OneofDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.options !== undefined && (obj.options = message.options ? exports.OneofOptions.toJSON(message.options) : undefined);
-        return obj;
-    },
-};
-function createBaseEnumDescriptorProto() {
-    return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] };
-}
-exports.EnumDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            value: Array.isArray(object?.value) ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e)) : [],
-            options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
-            reservedRange: Array.isArray(object?.reservedRange)
-                ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
-                : [],
-            reservedName: Array.isArray(object?.reservedName)
-                ? object.reservedName.map((e) => String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        if (message.value) {
-            obj.value = message.value.map((e) => e ? exports.EnumValueDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.value = [];
-        }
-        message.options !== undefined && (obj.options = message.options ? exports.EnumOptions.toJSON(message.options) : undefined);
-        if (message.reservedRange) {
-            obj.reservedRange = message.reservedRange.map((e) => e ? exports.EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined);
-        }
-        else {
-            obj.reservedRange = [];
-        }
-        if (message.reservedName) {
-            obj.reservedName = message.reservedName.map((e) => e);
-        }
-        else {
-            obj.reservedName = [];
-        }
-        return obj;
-    },
-};
-function createBaseEnumDescriptorProto_EnumReservedRange() {
-    return { start: 0, end: 0 };
-}
-exports.EnumDescriptorProto_EnumReservedRange = {
-    fromJSON(object) {
-        return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.start !== undefined && (obj.start = Math.round(message.start));
-        message.end !== undefined && (obj.end = Math.round(message.end));
-        return obj;
-    },
-};
-function createBaseEnumValueDescriptorProto() {
-    return { name: "", number: 0, options: undefined };
-}
-exports.EnumValueDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            number: isSet(object.number) ? Number(object.number) : 0,
-            options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.number !== undefined && (obj.number = Math.round(message.number));
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.EnumValueOptions.toJSON(message.options) : undefined);
-        return obj;
-    },
-};
-function createBaseServiceDescriptorProto() {
-    return { name: "", method: [], options: undefined };
-}
-exports.ServiceDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            method: Array.isArray(object?.method) ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e)) : [],
-            options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        if (message.method) {
-            obj.method = message.method.map((e) => e ? exports.MethodDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.method = [];
-        }
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.ServiceOptions.toJSON(message.options) : undefined);
-        return obj;
-    },
-};
-function createBaseMethodDescriptorProto() {
-    return {
-        name: "",
-        inputType: "",
-        outputType: "",
-        options: undefined,
-        clientStreaming: false,
-        serverStreaming: false,
-    };
-}
-exports.MethodDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            inputType: isSet(object.inputType) ? String(object.inputType) : "",
-            outputType: isSet(object.outputType) ? String(object.outputType) : "",
-            options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
-            clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false,
-            serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.inputType !== undefined && (obj.inputType = message.inputType);
-        message.outputType !== undefined && (obj.outputType = message.outputType);
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.MethodOptions.toJSON(message.options) : undefined);
-        message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming);
-        message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming);
-        return obj;
-    },
-};
-function createBaseFileOptions() {
-    return {
-        javaPackage: "",
-        javaOuterClassname: "",
-        javaMultipleFiles: false,
-        javaGenerateEqualsAndHash: false,
-        javaStringCheckUtf8: false,
-        optimizeFor: 1,
-        goPackage: "",
-        ccGenericServices: false,
-        javaGenericServices: false,
-        pyGenericServices: false,
-        phpGenericServices: false,
-        deprecated: false,
-        ccEnableArenas: false,
-        objcClassPrefix: "",
-        csharpNamespace: "",
-        swiftPrefix: "",
-        phpClassPrefix: "",
-        phpNamespace: "",
-        phpMetadataNamespace: "",
-        rubyPackage: "",
-        uninterpretedOption: [],
-    };
-}
-exports.FileOptions = {
-    fromJSON(object) {
-        return {
-            javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "",
-            javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "",
-            javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false,
-            javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
-                ? Boolean(object.javaGenerateEqualsAndHash)
-                : false,
-            javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false,
-            optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
-            goPackage: isSet(object.goPackage) ? String(object.goPackage) : "",
-            ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false,
-            javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false,
-            pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false,
-            phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false,
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false,
-            objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "",
-            csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "",
-            swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "",
-            phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "",
-            phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "",
-            phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "",
-            rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "",
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage);
-        message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname);
-        message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles);
-        message.javaGenerateEqualsAndHash !== undefined &&
-            (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash);
-        message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8);
-        message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor));
-        message.goPackage !== undefined && (obj.goPackage = message.goPackage);
-        message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices);
-        message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices);
-        message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices);
-        message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices);
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas);
-        message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix);
-        message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace);
-        message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix);
-        message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix);
-        message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace);
-        message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace);
-        message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseMessageOptions() {
-    return {
-        messageSetWireFormat: false,
-        noStandardDescriptorAccessor: false,
-        deprecated: false,
-        mapEntry: false,
-        uninterpretedOption: [],
-    };
-}
-exports.MessageOptions = {
-    fromJSON(object) {
-        return {
-            messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false,
-            noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
-                ? Boolean(object.noStandardDescriptorAccessor)
-                : false,
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat);
-        message.noStandardDescriptorAccessor !== undefined &&
-            (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor);
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseFieldOptions() {
-    return {
-        ctype: 0,
-        packed: false,
-        jstype: 0,
-        lazy: false,
-        unverifiedLazy: false,
-        deprecated: false,
-        weak: false,
-        uninterpretedOption: [],
-    };
-}
-exports.FieldOptions = {
-    fromJSON(object) {
-        return {
-            ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
-            packed: isSet(object.packed) ? Boolean(object.packed) : false,
-            jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
-            lazy: isSet(object.lazy) ? Boolean(object.lazy) : false,
-            unverifiedLazy: isSet(object.unverifiedLazy) ? Boolean(object.unverifiedLazy) : false,
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            weak: isSet(object.weak) ? Boolean(object.weak) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype));
-        message.packed !== undefined && (obj.packed = message.packed);
-        message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype));
-        message.lazy !== undefined && (obj.lazy = message.lazy);
-        message.unverifiedLazy !== undefined && (obj.unverifiedLazy = message.unverifiedLazy);
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        message.weak !== undefined && (obj.weak = message.weak);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseOneofOptions() {
-    return { uninterpretedOption: [] };
-}
-exports.OneofOptions = {
-    fromJSON(object) {
-        return {
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseEnumOptions() {
-    return { allowAlias: false, deprecated: false, uninterpretedOption: [] };
-}
-exports.EnumOptions = {
-    fromJSON(object) {
-        return {
-            allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false,
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias);
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseEnumValueOptions() {
-    return { deprecated: false, uninterpretedOption: [] };
-}
-exports.EnumValueOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseServiceOptions() {
-    return { deprecated: false, uninterpretedOption: [] };
-}
-exports.ServiceOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseMethodOptions() {
-    return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] };
-}
-exports.MethodOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            idempotencyLevel: isSet(object.idempotencyLevel)
-                ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
-                : 0,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        message.idempotencyLevel !== undefined &&
-            (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel));
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseUninterpretedOption() {
-    return {
-        name: [],
-        identifierValue: "",
-        positiveIntValue: "0",
-        negativeIntValue: "0",
-        doubleValue: 0,
-        stringValue: Buffer.alloc(0),
-        aggregateValue: "",
-    };
-}
-exports.UninterpretedOption = {
-    fromJSON(object) {
-        return {
-            name: Array.isArray(object?.name) ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e)) : [],
-            identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "",
-            positiveIntValue: isSet(object.positiveIntValue) ? String(object.positiveIntValue) : "0",
-            negativeIntValue: isSet(object.negativeIntValue) ? String(object.negativeIntValue) : "0",
-            doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0,
-            stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
-            aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name) {
-            obj.name = message.name.map((e) => e ? exports.UninterpretedOption_NamePart.toJSON(e) : undefined);
-        }
-        else {
-            obj.name = [];
-        }
-        message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue);
-        message.positiveIntValue !== undefined && (obj.positiveIntValue = message.positiveIntValue);
-        message.negativeIntValue !== undefined && (obj.negativeIntValue = message.negativeIntValue);
-        message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue);
-        message.stringValue !== undefined &&
-            (obj.stringValue = base64FromBytes(message.stringValue !== undefined ? message.stringValue : Buffer.alloc(0)));
-        message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue);
-        return obj;
-    },
-};
-function createBaseUninterpretedOption_NamePart() {
-    return { namePart: "", isExtension: false };
-}
-exports.UninterpretedOption_NamePart = {
-    fromJSON(object) {
-        return {
-            namePart: isSet(object.namePart) ? String(object.namePart) : "",
-            isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.namePart !== undefined && (obj.namePart = message.namePart);
-        message.isExtension !== undefined && (obj.isExtension = message.isExtension);
-        return obj;
-    },
-};
-function createBaseSourceCodeInfo() {
-    return { location: [] };
-}
-exports.SourceCodeInfo = {
-    fromJSON(object) {
-        return {
-            location: Array.isArray(object?.location)
-                ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.location) {
-            obj.location = message.location.map((e) => e ? exports.SourceCodeInfo_Location.toJSON(e) : undefined);
-        }
-        else {
-            obj.location = [];
-        }
-        return obj;
-    },
-};
-function createBaseSourceCodeInfo_Location() {
-    return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] };
-}
-exports.SourceCodeInfo_Location = {
-    fromJSON(object) {
-        return {
-            path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
-            span: Array.isArray(object?.span) ? object.span.map((e) => Number(e)) : [],
-            leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "",
-            trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "",
-            leadingDetachedComments: Array.isArray(object?.leadingDetachedComments)
-                ? object.leadingDetachedComments.map((e) => String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.path) {
-            obj.path = message.path.map((e) => Math.round(e));
-        }
-        else {
-            obj.path = [];
-        }
-        if (message.span) {
-            obj.span = message.span.map((e) => Math.round(e));
-        }
-        else {
-            obj.span = [];
-        }
-        message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments);
-        message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments);
-        if (message.leadingDetachedComments) {
-            obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e);
-        }
-        else {
-            obj.leadingDetachedComments = [];
-        }
-        return obj;
-    },
-};
-function createBaseGeneratedCodeInfo() {
-    return { annotation: [] };
-}
-exports.GeneratedCodeInfo = {
-    fromJSON(object) {
-        return {
-            annotation: Array.isArray(object?.annotation)
-                ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.annotation) {
-            obj.annotation = message.annotation.map((e) => e ? exports.GeneratedCodeInfo_Annotation.toJSON(e) : undefined);
-        }
-        else {
-            obj.annotation = [];
-        }
-        return obj;
-    },
-};
-function createBaseGeneratedCodeInfo_Annotation() {
-    return { path: [], sourceFile: "", begin: 0, end: 0 };
-}
-exports.GeneratedCodeInfo_Annotation = {
-    fromJSON(object) {
-        return {
-            path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
-            sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "",
-            begin: isSet(object.begin) ? Number(object.begin) : 0,
-            end: isSet(object.end) ? Number(object.end) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.path) {
-            obj.path = message.path.map((e) => Math.round(e));
-        }
-        else {
-            obj.path = [];
-        }
-        message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile);
-        message.begin !== undefined && (obj.begin = Math.round(message.begin));
-        message.end !== undefined && (obj.end = Math.round(message.end));
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
deleted file mode 100644
index 159135fe87172..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
+++ /dev/null
@@ -1,24 +0,0 @@
-"use strict";
-/* eslint-disable */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Timestamp = void 0;
-function createBaseTimestamp() {
-    return { seconds: "0", nanos: 0 };
-}
-exports.Timestamp = {
-    fromJSON(object) {
-        return {
-            seconds: isSet(object.seconds) ? String(object.seconds) : "0",
-            nanos: isSet(object.nanos) ? Number(object.nanos) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.seconds !== undefined && (obj.seconds = message.seconds);
-        message.nanos !== undefined && (obj.nanos = Math.round(message.nanos));
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
deleted file mode 100644
index 1ef3e1b3356b7..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
+++ /dev/null
@@ -1,106 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
-/* eslint-disable */
-const envelope_1 = require("./envelope");
-const sigstore_common_1 = require("./sigstore_common");
-const sigstore_rekor_1 = require("./sigstore_rekor");
-function createBaseTimestampVerificationData() {
-    return { rfc3161Timestamps: [] };
-}
-exports.TimestampVerificationData = {
-    fromJSON(object) {
-        return {
-            rfc3161Timestamps: Array.isArray(object?.rfc3161Timestamps)
-                ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rfc3161Timestamps) {
-            obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => e ? sigstore_common_1.RFC3161SignedTimestamp.toJSON(e) : undefined);
-        }
-        else {
-            obj.rfc3161Timestamps = [];
-        }
-        return obj;
-    },
-};
-function createBaseVerificationMaterial() {
-    return { content: undefined, tlogEntries: [], timestampVerificationData: undefined };
-}
-exports.VerificationMaterial = {
-    fromJSON(object) {
-        return {
-            content: isSet(object.publicKey)
-                ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
-                : isSet(object.x509CertificateChain)
-                    ? {
-                        $case: "x509CertificateChain",
-                        x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
-                    }
-                    : undefined,
-            tlogEntries: Array.isArray(object?.tlogEntries)
-                ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
-                : [],
-            timestampVerificationData: isSet(object.timestampVerificationData)
-                ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
-                : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.content?.$case === "publicKey" &&
-            (obj.publicKey = message.content?.publicKey ? sigstore_common_1.PublicKeyIdentifier.toJSON(message.content?.publicKey) : undefined);
-        message.content?.$case === "x509CertificateChain" &&
-            (obj.x509CertificateChain = message.content?.x509CertificateChain
-                ? sigstore_common_1.X509CertificateChain.toJSON(message.content?.x509CertificateChain)
-                : undefined);
-        if (message.tlogEntries) {
-            obj.tlogEntries = message.tlogEntries.map((e) => e ? sigstore_rekor_1.TransparencyLogEntry.toJSON(e) : undefined);
-        }
-        else {
-            obj.tlogEntries = [];
-        }
-        message.timestampVerificationData !== undefined &&
-            (obj.timestampVerificationData = message.timestampVerificationData
-                ? exports.TimestampVerificationData.toJSON(message.timestampVerificationData)
-                : undefined);
-        return obj;
-    },
-};
-function createBaseBundle() {
-    return { mediaType: "", verificationMaterial: undefined, content: undefined };
-}
-exports.Bundle = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
-            verificationMaterial: isSet(object.verificationMaterial)
-                ? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
-                : undefined,
-            content: isSet(object.messageSignature)
-                ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
-                : isSet(object.dsseEnvelope)
-                    ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.mediaType !== undefined && (obj.mediaType = message.mediaType);
-        message.verificationMaterial !== undefined && (obj.verificationMaterial = message.verificationMaterial
-            ? exports.VerificationMaterial.toJSON(message.verificationMaterial)
-            : undefined);
-        message.content?.$case === "messageSignature" && (obj.messageSignature = message.content?.messageSignature
-            ? sigstore_common_1.MessageSignature.toJSON(message.content?.messageSignature)
-            : undefined);
-        message.content?.$case === "dsseEnvelope" &&
-            (obj.dsseEnvelope = message.content?.dsseEnvelope ? envelope_1.Envelope.toJSON(message.content?.dsseEnvelope) : undefined);
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
deleted file mode 100644
index bcd654e9154b9..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
+++ /dev/null
@@ -1,457 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.subjectAlternativeNameTypeToJSON = exports.subjectAlternativeNameTypeFromJSON = exports.SubjectAlternativeNameType = exports.publicKeyDetailsToJSON = exports.publicKeyDetailsFromJSON = exports.PublicKeyDetails = exports.hashAlgorithmToJSON = exports.hashAlgorithmFromJSON = exports.HashAlgorithm = void 0;
-/* eslint-disable */
-const timestamp_1 = require("./google/protobuf/timestamp");
-/**
- * Only a subset of the secure hash standard algorithms are supported.
- * See  for more
- * details.
- * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
- * any proto JSON serialization to emit the used hash algorithm, as default
- * option is to *omit* the default value of an enum (which is the first
- * value, represented by '0'.
- */
-var HashAlgorithm;
-(function (HashAlgorithm) {
-    HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
-    HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
-})(HashAlgorithm = exports.HashAlgorithm || (exports.HashAlgorithm = {}));
-function hashAlgorithmFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "HASH_ALGORITHM_UNSPECIFIED":
-            return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
-        case 1:
-        case "SHA2_256":
-            return HashAlgorithm.SHA2_256;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
-    }
-}
-exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
-function hashAlgorithmToJSON(object) {
-    switch (object) {
-        case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
-            return "HASH_ALGORITHM_UNSPECIFIED";
-        case HashAlgorithm.SHA2_256:
-            return "SHA2_256";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
-    }
-}
-exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
-/**
- * Details of a specific public key, capturing the the key encoding method,
- * and signature algorithm.
- * To avoid the possibility of contradicting formats such as PKCS1 with
- * ED25519 the valid permutations are listed as a linear set instead of a
- * cartesian set (i.e one combined variable instead of two, one for encoding
- * and one for the signature algorithm).
- */
-var PublicKeyDetails;
-(function (PublicKeyDetails) {
-    PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
-    /** PKCS1_RSA_PKCS1V5 - RSA */
-    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
-    /** PKCS1_RSA_PSS - See RFC8017 */
-    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
-    /** PKIX_ECDSA_P256_SHA_256 - ECDSA */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
-    /** PKIX_ECDSA_P256_HMAC_SHA_256 - See RFC6979 */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
-    /** PKIX_ED25519 - Ed 25519 */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
-})(PublicKeyDetails = exports.PublicKeyDetails || (exports.PublicKeyDetails = {}));
-function publicKeyDetailsFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
-            return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
-        case 1:
-        case "PKCS1_RSA_PKCS1V5":
-            return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
-        case 2:
-        case "PKCS1_RSA_PSS":
-            return PublicKeyDetails.PKCS1_RSA_PSS;
-        case 3:
-        case "PKIX_RSA_PKCS1V5":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V5;
-        case 4:
-        case "PKIX_RSA_PSS":
-            return PublicKeyDetails.PKIX_RSA_PSS;
-        case 5:
-        case "PKIX_ECDSA_P256_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
-        case 6:
-        case "PKIX_ECDSA_P256_HMAC_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
-        case 7:
-        case "PKIX_ED25519":
-            return PublicKeyDetails.PKIX_ED25519;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
-    }
-}
-exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
-function publicKeyDetailsToJSON(object) {
-    switch (object) {
-        case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
-            return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
-        case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
-            return "PKCS1_RSA_PKCS1V5";
-        case PublicKeyDetails.PKCS1_RSA_PSS:
-            return "PKCS1_RSA_PSS";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V5:
-            return "PKIX_RSA_PKCS1V5";
-        case PublicKeyDetails.PKIX_RSA_PSS:
-            return "PKIX_RSA_PSS";
-        case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
-            return "PKIX_ECDSA_P256_SHA_256";
-        case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
-            return "PKIX_ECDSA_P256_HMAC_SHA_256";
-        case PublicKeyDetails.PKIX_ED25519:
-            return "PKIX_ED25519";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
-    }
-}
-exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
-var SubjectAlternativeNameType;
-(function (SubjectAlternativeNameType) {
-    SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
-    SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
-    SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
-    /**
-     * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
-     * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
-     * for more details.
-     */
-    SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
-})(SubjectAlternativeNameType = exports.SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = {}));
-function subjectAlternativeNameTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
-            return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
-        case 1:
-        case "EMAIL":
-            return SubjectAlternativeNameType.EMAIL;
-        case 2:
-        case "URI":
-            return SubjectAlternativeNameType.URI;
-        case 3:
-        case "OTHER_NAME":
-            return SubjectAlternativeNameType.OTHER_NAME;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
-    }
-}
-exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
-function subjectAlternativeNameTypeToJSON(object) {
-    switch (object) {
-        case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
-            return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
-        case SubjectAlternativeNameType.EMAIL:
-            return "EMAIL";
-        case SubjectAlternativeNameType.URI:
-            return "URI";
-        case SubjectAlternativeNameType.OTHER_NAME:
-            return "OTHER_NAME";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
-    }
-}
-exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
-function createBaseHashOutput() {
-    return { algorithm: 0, digest: Buffer.alloc(0) };
-}
-exports.HashOutput = {
-    fromJSON(object) {
-        return {
-            algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
-            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.algorithm !== undefined && (obj.algorithm = hashAlgorithmToJSON(message.algorithm));
-        message.digest !== undefined &&
-            (obj.digest = base64FromBytes(message.digest !== undefined ? message.digest : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseMessageSignature() {
-    return { messageDigest: undefined, signature: Buffer.alloc(0) };
-}
-exports.MessageSignature = {
-    fromJSON(object) {
-        return {
-            messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
-            signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.messageDigest !== undefined &&
-            (obj.messageDigest = message.messageDigest ? exports.HashOutput.toJSON(message.messageDigest) : undefined);
-        message.signature !== undefined &&
-            (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseLogId() {
-    return { keyId: Buffer.alloc(0) };
-}
-exports.LogId = {
-    fromJSON(object) {
-        return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.keyId !== undefined &&
-            (obj.keyId = base64FromBytes(message.keyId !== undefined ? message.keyId : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseRFC3161SignedTimestamp() {
-    return { signedTimestamp: Buffer.alloc(0) };
-}
-exports.RFC3161SignedTimestamp = {
-    fromJSON(object) {
-        return {
-            signedTimestamp: isSet(object.signedTimestamp)
-                ? Buffer.from(bytesFromBase64(object.signedTimestamp))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.signedTimestamp !== undefined &&
-            (obj.signedTimestamp = base64FromBytes(message.signedTimestamp !== undefined ? message.signedTimestamp : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBasePublicKey() {
-    return { rawBytes: undefined, keyDetails: 0, validFor: undefined };
-}
-exports.PublicKey = {
-    fromJSON(object) {
-        return {
-            rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
-            keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
-            validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.rawBytes !== undefined &&
-            (obj.rawBytes = message.rawBytes !== undefined ? base64FromBytes(message.rawBytes) : undefined);
-        message.keyDetails !== undefined && (obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails));
-        message.validFor !== undefined &&
-            (obj.validFor = message.validFor ? exports.TimeRange.toJSON(message.validFor) : undefined);
-        return obj;
-    },
-};
-function createBasePublicKeyIdentifier() {
-    return { hint: "" };
-}
-exports.PublicKeyIdentifier = {
-    fromJSON(object) {
-        return { hint: isSet(object.hint) ? String(object.hint) : "" };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.hint !== undefined && (obj.hint = message.hint);
-        return obj;
-    },
-};
-function createBaseObjectIdentifier() {
-    return { id: [] };
-}
-exports.ObjectIdentifier = {
-    fromJSON(object) {
-        return { id: Array.isArray(object?.id) ? object.id.map((e) => Number(e)) : [] };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.id) {
-            obj.id = message.id.map((e) => Math.round(e));
-        }
-        else {
-            obj.id = [];
-        }
-        return obj;
-    },
-};
-function createBaseObjectIdentifierValuePair() {
-    return { oid: undefined, value: Buffer.alloc(0) };
-}
-exports.ObjectIdentifierValuePair = {
-    fromJSON(object) {
-        return {
-            oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
-            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.oid !== undefined && (obj.oid = message.oid ? exports.ObjectIdentifier.toJSON(message.oid) : undefined);
-        message.value !== undefined &&
-            (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseDistinguishedName() {
-    return { organization: "", commonName: "" };
-}
-exports.DistinguishedName = {
-    fromJSON(object) {
-        return {
-            organization: isSet(object.organization) ? String(object.organization) : "",
-            commonName: isSet(object.commonName) ? String(object.commonName) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.organization !== undefined && (obj.organization = message.organization);
-        message.commonName !== undefined && (obj.commonName = message.commonName);
-        return obj;
-    },
-};
-function createBaseX509Certificate() {
-    return { rawBytes: Buffer.alloc(0) };
-}
-exports.X509Certificate = {
-    fromJSON(object) {
-        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.rawBytes !== undefined &&
-            (obj.rawBytes = base64FromBytes(message.rawBytes !== undefined ? message.rawBytes : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseSubjectAlternativeName() {
-    return { type: 0, identity: undefined };
-}
-exports.SubjectAlternativeName = {
-    fromJSON(object) {
-        return {
-            type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
-            identity: isSet(object.regexp)
-                ? { $case: "regexp", regexp: String(object.regexp) }
-                : isSet(object.value)
-                    ? { $case: "value", value: String(object.value) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.type !== undefined && (obj.type = subjectAlternativeNameTypeToJSON(message.type));
-        message.identity?.$case === "regexp" && (obj.regexp = message.identity?.regexp);
-        message.identity?.$case === "value" && (obj.value = message.identity?.value);
-        return obj;
-    },
-};
-function createBaseX509CertificateChain() {
-    return { certificates: [] };
-}
-exports.X509CertificateChain = {
-    fromJSON(object) {
-        return {
-            certificates: Array.isArray(object?.certificates)
-                ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.certificates) {
-            obj.certificates = message.certificates.map((e) => e ? exports.X509Certificate.toJSON(e) : undefined);
-        }
-        else {
-            obj.certificates = [];
-        }
-        return obj;
-    },
-};
-function createBaseTimeRange() {
-    return { start: undefined, end: undefined };
-}
-exports.TimeRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
-            end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.start !== undefined && (obj.start = message.start.toISOString());
-        message.end !== undefined && (obj.end = message.end.toISOString());
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function fromTimestamp(t) {
-    let millis = Number(t.seconds) * 1000;
-    millis += t.nanos / 1000000;
-    return new Date(millis);
-}
-function fromJsonTimestamp(o) {
-    if (o instanceof Date) {
-        return o;
-    }
-    else if (typeof o === "string") {
-        return new Date(o);
-    }
-    else {
-        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
deleted file mode 100644
index 398193b2075a7..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
+++ /dev/null
@@ -1,167 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("./sigstore_common");
-function createBaseKindVersion() {
-    return { kind: "", version: "" };
-}
-exports.KindVersion = {
-    fromJSON(object) {
-        return {
-            kind: isSet(object.kind) ? String(object.kind) : "",
-            version: isSet(object.version) ? String(object.version) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.kind !== undefined && (obj.kind = message.kind);
-        message.version !== undefined && (obj.version = message.version);
-        return obj;
-    },
-};
-function createBaseCheckpoint() {
-    return { envelope: "" };
-}
-exports.Checkpoint = {
-    fromJSON(object) {
-        return { envelope: isSet(object.envelope) ? String(object.envelope) : "" };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.envelope !== undefined && (obj.envelope = message.envelope);
-        return obj;
-    },
-};
-function createBaseInclusionProof() {
-    return { logIndex: "0", rootHash: Buffer.alloc(0), treeSize: "0", hashes: [], checkpoint: undefined };
-}
-exports.InclusionProof = {
-    fromJSON(object) {
-        return {
-            logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
-            rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
-            treeSize: isSet(object.treeSize) ? String(object.treeSize) : "0",
-            hashes: Array.isArray(object?.hashes) ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e))) : [],
-            checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.logIndex !== undefined && (obj.logIndex = message.logIndex);
-        message.rootHash !== undefined &&
-            (obj.rootHash = base64FromBytes(message.rootHash !== undefined ? message.rootHash : Buffer.alloc(0)));
-        message.treeSize !== undefined && (obj.treeSize = message.treeSize);
-        if (message.hashes) {
-            obj.hashes = message.hashes.map((e) => base64FromBytes(e !== undefined ? e : Buffer.alloc(0)));
-        }
-        else {
-            obj.hashes = [];
-        }
-        message.checkpoint !== undefined &&
-            (obj.checkpoint = message.checkpoint ? exports.Checkpoint.toJSON(message.checkpoint) : undefined);
-        return obj;
-    },
-};
-function createBaseInclusionPromise() {
-    return { signedEntryTimestamp: Buffer.alloc(0) };
-}
-exports.InclusionPromise = {
-    fromJSON(object) {
-        return {
-            signedEntryTimestamp: isSet(object.signedEntryTimestamp)
-                ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.signedEntryTimestamp !== undefined &&
-            (obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp !== undefined ? message.signedEntryTimestamp : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseTransparencyLogEntry() {
-    return {
-        logIndex: "0",
-        logId: undefined,
-        kindVersion: undefined,
-        integratedTime: "0",
-        inclusionPromise: undefined,
-        inclusionProof: undefined,
-        canonicalizedBody: Buffer.alloc(0),
-    };
-}
-exports.TransparencyLogEntry = {
-    fromJSON(object) {
-        return {
-            logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
-            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
-            kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
-            integratedTime: isSet(object.integratedTime) ? String(object.integratedTime) : "0",
-            inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
-            inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
-            canonicalizedBody: isSet(object.canonicalizedBody)
-                ? Buffer.from(bytesFromBase64(object.canonicalizedBody))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.logIndex !== undefined && (obj.logIndex = message.logIndex);
-        message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
-        message.kindVersion !== undefined &&
-            (obj.kindVersion = message.kindVersion ? exports.KindVersion.toJSON(message.kindVersion) : undefined);
-        message.integratedTime !== undefined && (obj.integratedTime = message.integratedTime);
-        message.inclusionPromise !== undefined &&
-            (obj.inclusionPromise = message.inclusionPromise ? exports.InclusionPromise.toJSON(message.inclusionPromise) : undefined);
-        message.inclusionProof !== undefined &&
-            (obj.inclusionProof = message.inclusionProof ? exports.InclusionProof.toJSON(message.inclusionProof) : undefined);
-        message.canonicalizedBody !== undefined &&
-            (obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody !== undefined ? message.canonicalizedBody : Buffer.alloc(0)));
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
deleted file mode 100644
index 05e566767cdb2..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
+++ /dev/null
@@ -1,103 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("./sigstore_common");
-function createBaseTransparencyLogInstance() {
-    return { baseUrl: "", hashAlgorithm: 0, publicKey: undefined, logId: undefined };
-}
-exports.TransparencyLogInstance = {
-    fromJSON(object) {
-        return {
-            baseUrl: isSet(object.baseUrl) ? String(object.baseUrl) : "",
-            hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
-            publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
-            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.baseUrl !== undefined && (obj.baseUrl = message.baseUrl);
-        message.hashAlgorithm !== undefined && (obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm));
-        message.publicKey !== undefined &&
-            (obj.publicKey = message.publicKey ? sigstore_common_1.PublicKey.toJSON(message.publicKey) : undefined);
-        message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
-        return obj;
-    },
-};
-function createBaseCertificateAuthority() {
-    return { subject: undefined, uri: "", certChain: undefined, validFor: undefined };
-}
-exports.CertificateAuthority = {
-    fromJSON(object) {
-        return {
-            subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
-            uri: isSet(object.uri) ? String(object.uri) : "",
-            certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
-            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.subject !== undefined &&
-            (obj.subject = message.subject ? sigstore_common_1.DistinguishedName.toJSON(message.subject) : undefined);
-        message.uri !== undefined && (obj.uri = message.uri);
-        message.certChain !== undefined &&
-            (obj.certChain = message.certChain ? sigstore_common_1.X509CertificateChain.toJSON(message.certChain) : undefined);
-        message.validFor !== undefined &&
-            (obj.validFor = message.validFor ? sigstore_common_1.TimeRange.toJSON(message.validFor) : undefined);
-        return obj;
-    },
-};
-function createBaseTrustedRoot() {
-    return { mediaType: "", tlogs: [], certificateAuthorities: [], ctlogs: [], timestampAuthorities: [] };
-}
-exports.TrustedRoot = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
-            tlogs: Array.isArray(object?.tlogs) ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) : [],
-            certificateAuthorities: Array.isArray(object?.certificateAuthorities)
-                ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
-                : [],
-            ctlogs: Array.isArray(object?.ctlogs)
-                ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
-                : [],
-            timestampAuthorities: Array.isArray(object?.timestampAuthorities)
-                ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.mediaType !== undefined && (obj.mediaType = message.mediaType);
-        if (message.tlogs) {
-            obj.tlogs = message.tlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
-        }
-        else {
-            obj.tlogs = [];
-        }
-        if (message.certificateAuthorities) {
-            obj.certificateAuthorities = message.certificateAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
-        }
-        else {
-            obj.certificateAuthorities = [];
-        }
-        if (message.ctlogs) {
-            obj.ctlogs = message.ctlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
-        }
-        else {
-            obj.ctlogs = [];
-        }
-        if (message.timestampAuthorities) {
-            obj.timestampAuthorities = message.timestampAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
-        }
-        else {
-            obj.timestampAuthorities = [];
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
deleted file mode 100644
index 8a72b89761869..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
+++ /dev/null
@@ -1,273 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
-/* eslint-disable */
-const sigstore_bundle_1 = require("./sigstore_bundle");
-const sigstore_common_1 = require("./sigstore_common");
-const sigstore_trustroot_1 = require("./sigstore_trustroot");
-function createBaseCertificateIdentity() {
-    return { issuer: "", san: undefined, oids: [] };
-}
-exports.CertificateIdentity = {
-    fromJSON(object) {
-        return {
-            issuer: isSet(object.issuer) ? String(object.issuer) : "",
-            san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
-            oids: Array.isArray(object?.oids) ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.issuer !== undefined && (obj.issuer = message.issuer);
-        message.san !== undefined && (obj.san = message.san ? sigstore_common_1.SubjectAlternativeName.toJSON(message.san) : undefined);
-        if (message.oids) {
-            obj.oids = message.oids.map((e) => e ? sigstore_common_1.ObjectIdentifierValuePair.toJSON(e) : undefined);
-        }
-        else {
-            obj.oids = [];
-        }
-        return obj;
-    },
-};
-function createBaseCertificateIdentities() {
-    return { identities: [] };
-}
-exports.CertificateIdentities = {
-    fromJSON(object) {
-        return {
-            identities: Array.isArray(object?.identities)
-                ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.identities) {
-            obj.identities = message.identities.map((e) => e ? exports.CertificateIdentity.toJSON(e) : undefined);
-        }
-        else {
-            obj.identities = [];
-        }
-        return obj;
-    },
-};
-function createBasePublicKeyIdentities() {
-    return { publicKeys: [] };
-}
-exports.PublicKeyIdentities = {
-    fromJSON(object) {
-        return {
-            publicKeys: Array.isArray(object?.publicKeys) ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.publicKeys) {
-            obj.publicKeys = message.publicKeys.map((e) => e ? sigstore_common_1.PublicKey.toJSON(e) : undefined);
-        }
-        else {
-            obj.publicKeys = [];
-        }
-        return obj;
-    },
-};
-function createBaseArtifactVerificationOptions() {
-    return { signers: undefined, tlogOptions: undefined, ctlogOptions: undefined, tsaOptions: undefined };
-}
-exports.ArtifactVerificationOptions = {
-    fromJSON(object) {
-        return {
-            signers: isSet(object.certificateIdentities)
-                ? {
-                    $case: "certificateIdentities",
-                    certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
-                }
-                : isSet(object.publicKeys)
-                    ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
-                    : undefined,
-            tlogOptions: isSet(object.tlogOptions)
-                ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
-                : undefined,
-            ctlogOptions: isSet(object.ctlogOptions)
-                ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
-                : undefined,
-            tsaOptions: isSet(object.tsaOptions)
-                ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
-                : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.signers?.$case === "certificateIdentities" &&
-            (obj.certificateIdentities = message.signers?.certificateIdentities
-                ? exports.CertificateIdentities.toJSON(message.signers?.certificateIdentities)
-                : undefined);
-        message.signers?.$case === "publicKeys" && (obj.publicKeys = message.signers?.publicKeys
-            ? exports.PublicKeyIdentities.toJSON(message.signers?.publicKeys)
-            : undefined);
-        message.tlogOptions !== undefined && (obj.tlogOptions = message.tlogOptions
-            ? exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions)
-            : undefined);
-        message.ctlogOptions !== undefined && (obj.ctlogOptions = message.ctlogOptions
-            ? exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions)
-            : undefined);
-        message.tsaOptions !== undefined && (obj.tsaOptions = message.tsaOptions
-            ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions)
-            : undefined);
-        return obj;
-    },
-};
-function createBaseArtifactVerificationOptions_TlogOptions() {
-    return { threshold: 0, performOnlineVerification: false, disable: false };
-}
-exports.ArtifactVerificationOptions_TlogOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
-            performOnlineVerification: isSet(object.performOnlineVerification)
-                ? Boolean(object.performOnlineVerification)
-                : false,
-            disable: isSet(object.disable) ? Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
-        message.performOnlineVerification !== undefined &&
-            (obj.performOnlineVerification = message.performOnlineVerification);
-        message.disable !== undefined && (obj.disable = message.disable);
-        return obj;
-    },
-};
-function createBaseArtifactVerificationOptions_CtlogOptions() {
-    return { threshold: 0, detachedSct: false, disable: false };
-}
-exports.ArtifactVerificationOptions_CtlogOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
-            detachedSct: isSet(object.detachedSct) ? Boolean(object.detachedSct) : false,
-            disable: isSet(object.disable) ? Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
-        message.detachedSct !== undefined && (obj.detachedSct = message.detachedSct);
-        message.disable !== undefined && (obj.disable = message.disable);
-        return obj;
-    },
-};
-function createBaseArtifactVerificationOptions_TimestampAuthorityOptions() {
-    return { threshold: 0, disable: false };
-}
-exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
-        message.disable !== undefined && (obj.disable = message.disable);
-        return obj;
-    },
-};
-function createBaseArtifact() {
-    return { data: undefined };
-}
-exports.Artifact = {
-    fromJSON(object) {
-        return {
-            data: isSet(object.artifactUri)
-                ? { $case: "artifactUri", artifactUri: String(object.artifactUri) }
-                : isSet(object.artifact)
-                    ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.data?.$case === "artifactUri" && (obj.artifactUri = message.data?.artifactUri);
-        message.data?.$case === "artifact" &&
-            (obj.artifact = message.data?.artifact !== undefined ? base64FromBytes(message.data?.artifact) : undefined);
-        return obj;
-    },
-};
-function createBaseInput() {
-    return {
-        artifactTrustRoot: undefined,
-        artifactVerificationOptions: undefined,
-        bundle: undefined,
-        artifact: undefined,
-    };
-}
-exports.Input = {
-    fromJSON(object) {
-        return {
-            artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
-            artifactVerificationOptions: isSet(object.artifactVerificationOptions)
-                ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
-                : undefined,
-            bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
-            artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.artifactTrustRoot !== undefined &&
-            (obj.artifactTrustRoot = message.artifactTrustRoot ? sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot) : undefined);
-        message.artifactVerificationOptions !== undefined &&
-            (obj.artifactVerificationOptions = message.artifactVerificationOptions
-                ? exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions)
-                : undefined);
-        message.bundle !== undefined && (obj.bundle = message.bundle ? sigstore_bundle_1.Bundle.toJSON(message.bundle) : undefined);
-        message.artifact !== undefined && (obj.artifact = message.artifact ? exports.Artifact.toJSON(message.artifact) : undefined);
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/index.js
deleted file mode 100644
index eafb768c48fca..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/index.js
+++ /dev/null
@@ -1,37 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __exportStar = (this && this.__exportStar) || function(m, exports) {
-    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-__exportStar(require("./__generated__/envelope"), exports);
-__exportStar(require("./__generated__/sigstore_bundle"), exports);
-__exportStar(require("./__generated__/sigstore_common"), exports);
-__exportStar(require("./__generated__/sigstore_rekor"), exports);
-__exportStar(require("./__generated__/sigstore_trustroot"), exports);
-__exportStar(require("./__generated__/sigstore_verification"), exports);
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/package.json
deleted file mode 100644
index 450abb157f31a..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/package.json
+++ /dev/null
@@ -1,31 +0,0 @@
-{
-  "name": "@sigstore/protobuf-specs",
-  "version": "0.2.1",
-  "description": "code-signing for npm packages",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
-  "scripts": {
-    "build": "tsc"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/sigstore/protobuf-specs.git"
-  },
-  "files": [
-    "dist"
-  ],
-  "author": "bdehamer@github.com",
-  "license": "Apache-2.0",
-  "bugs": {
-    "url": "https://github.com/sigstore/protobuf-specs/issues"
-  },
-  "homepage": "https://github.com/sigstore/protobuf-specs#readme",
-  "devDependencies": {
-    "@tsconfig/node14": "^1.0.3",
-    "@types/node": "^18.14.0",
-    "typescript": "^4.9.5"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  }
-}
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
index 715bb1aa5b57d..0c367a8384454 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
@@ -44,7 +44,7 @@ exports.Signature = {
         return obj;
     },
 };
-var globalThis = (() => {
+var tsProtoGlobalThis = (() => {
     if (typeof globalThis !== "undefined") {
         return globalThis;
     }
@@ -60,11 +60,11 @@ var globalThis = (() => {
     throw "Unable to locate global object";
 })();
 function bytesFromBase64(b64) {
-    if (globalThis.Buffer) {
-        return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
     }
     else {
-        const bin = globalThis.atob(b64);
+        const bin = tsProtoGlobalThis.atob(b64);
         const arr = new Uint8Array(bin.length);
         for (let i = 0; i < bin.length; ++i) {
             arr[i] = bin.charCodeAt(i);
@@ -73,15 +73,15 @@ function bytesFromBase64(b64) {
     }
 }
 function base64FromBytes(arr) {
-    if (globalThis.Buffer) {
-        return globalThis.Buffer.from(arr).toString("base64");
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
     }
     else {
         const bin = [];
         arr.forEach((byte) => {
             bin.push(String.fromCharCode(byte));
         });
-        return globalThis.btoa(bin.join(""));
+        return tsProtoGlobalThis.btoa(bin.join(""));
     }
 }
 function isSet(value) {
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
similarity index 100%
rename from node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
rename to node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
index f9b57cccdc3d3..da627499ad765 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
@@ -77,7 +77,7 @@ function fieldBehaviorFromJSON(object) {
         case "UNORDERED_LIST":
             return FieldBehavior.UNORDERED_LIST;
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
     }
 }
 exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
@@ -98,11 +98,11 @@ function fieldBehaviorToJSON(object) {
         case FieldBehavior.UNORDERED_LIST:
             return "UNORDERED_LIST";
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
     }
 }
 exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
-var globalThis = (() => {
+var tsProtoGlobalThis = (() => {
     if (typeof globalThis !== "undefined") {
         return globalThis;
     }
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
similarity index 100%
rename from node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
rename to node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
index b8cfc86ab99aa..d429aac846043 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
@@ -102,7 +102,7 @@ function fieldDescriptorProto_TypeFromJSON(object) {
         case "TYPE_SINT64":
             return FieldDescriptorProto_Type.TYPE_SINT64;
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
     }
 }
 exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
@@ -145,7 +145,7 @@ function fieldDescriptorProto_TypeToJSON(object) {
         case FieldDescriptorProto_Type.TYPE_SINT64:
             return "TYPE_SINT64";
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
     }
 }
 exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
@@ -168,7 +168,7 @@ function fieldDescriptorProto_LabelFromJSON(object) {
         case "LABEL_REPEATED":
             return FieldDescriptorProto_Label.LABEL_REPEATED;
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
     }
 }
 exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
@@ -181,7 +181,7 @@ function fieldDescriptorProto_LabelToJSON(object) {
         case FieldDescriptorProto_Label.LABEL_REPEATED:
             return "LABEL_REPEATED";
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
     }
 }
 exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
@@ -207,7 +207,7 @@ function fileOptions_OptimizeModeFromJSON(object) {
         case "LITE_RUNTIME":
             return FileOptions_OptimizeMode.LITE_RUNTIME;
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
     }
 }
 exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
@@ -220,7 +220,7 @@ function fileOptions_OptimizeModeToJSON(object) {
         case FileOptions_OptimizeMode.LITE_RUNTIME:
             return "LITE_RUNTIME";
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
     }
 }
 exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
@@ -243,7 +243,7 @@ function fieldOptions_CTypeFromJSON(object) {
         case "STRING_PIECE":
             return FieldOptions_CType.STRING_PIECE;
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
     }
 }
 exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
@@ -256,7 +256,7 @@ function fieldOptions_CTypeToJSON(object) {
         case FieldOptions_CType.STRING_PIECE:
             return "STRING_PIECE";
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
     }
 }
 exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
@@ -281,7 +281,7 @@ function fieldOptions_JSTypeFromJSON(object) {
         case "JS_NUMBER":
             return FieldOptions_JSType.JS_NUMBER;
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
     }
 }
 exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
@@ -294,7 +294,7 @@ function fieldOptions_JSTypeToJSON(object) {
         case FieldOptions_JSType.JS_NUMBER:
             return "JS_NUMBER";
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
     }
 }
 exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
@@ -323,7 +323,7 @@ function methodOptions_IdempotencyLevelFromJSON(object) {
         case "IDEMPOTENT":
             return MethodOptions_IdempotencyLevel.IDEMPOTENT;
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
     }
 }
 exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
@@ -336,7 +336,7 @@ function methodOptions_IdempotencyLevelToJSON(object) {
         case MethodOptions_IdempotencyLevel.IDEMPOTENT:
             return "IDEMPOTENT";
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
     }
 }
 exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
@@ -1263,7 +1263,7 @@ exports.GeneratedCodeInfo_Annotation = {
         return obj;
     },
 };
-var globalThis = (() => {
+var tsProtoGlobalThis = (() => {
     if (typeof globalThis !== "undefined") {
         return globalThis;
     }
@@ -1279,11 +1279,11 @@ var globalThis = (() => {
     throw "Unable to locate global object";
 })();
 function bytesFromBase64(b64) {
-    if (globalThis.Buffer) {
-        return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
     }
     else {
-        const bin = globalThis.atob(b64);
+        const bin = tsProtoGlobalThis.atob(b64);
         const arr = new Uint8Array(bin.length);
         for (let i = 0; i < bin.length; ++i) {
             arr[i] = bin.charCodeAt(i);
@@ -1292,15 +1292,15 @@ function bytesFromBase64(b64) {
     }
 }
 function base64FromBytes(arr) {
-    if (globalThis.Buffer) {
-        return globalThis.Buffer.from(arr).toString("base64");
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
     }
     else {
         const bin = [];
         arr.forEach((byte) => {
             bin.push(String.fromCharCode(byte));
         });
-        return globalThis.btoa(bin.join(""));
+        return tsProtoGlobalThis.btoa(bin.join(""));
     }
 }
 function isSet(value) {
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
index 63ace8db580cc..bcd654e9154b9 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
@@ -26,7 +26,7 @@ function hashAlgorithmFromJSON(object) {
         case "SHA2_256":
             return HashAlgorithm.SHA2_256;
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
     }
 }
 exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
@@ -37,7 +37,7 @@ function hashAlgorithmToJSON(object) {
         case HashAlgorithm.SHA2_256:
             return "SHA2_256";
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
     }
 }
 exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
@@ -92,7 +92,7 @@ function publicKeyDetailsFromJSON(object) {
         case "PKIX_ED25519":
             return PublicKeyDetails.PKIX_ED25519;
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
     }
 }
 exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
@@ -115,7 +115,7 @@ function publicKeyDetailsToJSON(object) {
         case PublicKeyDetails.PKIX_ED25519:
             return "PKIX_ED25519";
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
     }
 }
 exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
@@ -146,7 +146,7 @@ function subjectAlternativeNameTypeFromJSON(object) {
         case "OTHER_NAME":
             return SubjectAlternativeNameType.OTHER_NAME;
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
     }
 }
 exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
@@ -161,7 +161,7 @@ function subjectAlternativeNameTypeToJSON(object) {
         case SubjectAlternativeNameType.OTHER_NAME:
             return "OTHER_NAME";
         default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
     }
 }
 exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
@@ -396,7 +396,7 @@ exports.TimeRange = {
         return obj;
     },
 };
-var globalThis = (() => {
+var tsProtoGlobalThis = (() => {
     if (typeof globalThis !== "undefined") {
         return globalThis;
     }
@@ -412,11 +412,11 @@ var globalThis = (() => {
     throw "Unable to locate global object";
 })();
 function bytesFromBase64(b64) {
-    if (globalThis.Buffer) {
-        return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
     }
     else {
-        const bin = globalThis.atob(b64);
+        const bin = tsProtoGlobalThis.atob(b64);
         const arr = new Uint8Array(bin.length);
         for (let i = 0; i < bin.length; ++i) {
             arr[i] = bin.charCodeAt(i);
@@ -425,15 +425,15 @@ function bytesFromBase64(b64) {
     }
 }
 function base64FromBytes(arr) {
-    if (globalThis.Buffer) {
-        return globalThis.Buffer.from(arr).toString("base64");
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
     }
     else {
         const bin = [];
         arr.forEach((byte) => {
             bin.push(String.fromCharCode(byte));
         });
-        return globalThis.btoa(bin.join(""));
+        return tsProtoGlobalThis.btoa(bin.join(""));
     }
 }
 function fromTimestamp(t) {
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
index bffc7700edbec..398193b2075a7 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
@@ -122,7 +122,7 @@ exports.TransparencyLogEntry = {
         return obj;
     },
 };
-var globalThis = (() => {
+var tsProtoGlobalThis = (() => {
     if (typeof globalThis !== "undefined") {
         return globalThis;
     }
@@ -138,11 +138,11 @@ var globalThis = (() => {
     throw "Unable to locate global object";
 })();
 function bytesFromBase64(b64) {
-    if (globalThis.Buffer) {
-        return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
     }
     else {
-        const bin = globalThis.atob(b64);
+        const bin = tsProtoGlobalThis.atob(b64);
         const arr = new Uint8Array(bin.length);
         for (let i = 0; i < bin.length; ++i) {
             arr[i] = bin.charCodeAt(i);
@@ -151,15 +151,15 @@ function bytesFromBase64(b64) {
     }
 }
 function base64FromBytes(arr) {
-    if (globalThis.Buffer) {
-        return globalThis.Buffer.from(arr).toString("base64");
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
     }
     else {
         const bin = [];
         arr.forEach((byte) => {
             bin.push(String.fromCharCode(byte));
         });
-        return globalThis.btoa(bin.join(""));
+        return tsProtoGlobalThis.btoa(bin.join(""));
     }
 }
 function isSet(value) {
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
index b99a305ba5317..8a72b89761869 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
@@ -228,7 +228,7 @@ exports.Input = {
         return obj;
     },
 };
-var globalThis = (() => {
+var tsProtoGlobalThis = (() => {
     if (typeof globalThis !== "undefined") {
         return globalThis;
     }
@@ -244,11 +244,11 @@ var globalThis = (() => {
     throw "Unable to locate global object";
 })();
 function bytesFromBase64(b64) {
-    if (globalThis.Buffer) {
-        return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+    if (tsProtoGlobalThis.Buffer) {
+        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
     }
     else {
-        const bin = globalThis.atob(b64);
+        const bin = tsProtoGlobalThis.atob(b64);
         const arr = new Uint8Array(bin.length);
         for (let i = 0; i < bin.length; ++i) {
             arr[i] = bin.charCodeAt(i);
@@ -257,15 +257,15 @@ function bytesFromBase64(b64) {
     }
 }
 function base64FromBytes(arr) {
-    if (globalThis.Buffer) {
-        return globalThis.Buffer.from(arr).toString("base64");
+    if (tsProtoGlobalThis.Buffer) {
+        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
     }
     else {
         const bin = [];
         arr.forEach((byte) => {
             bin.push(String.fromCharCode(byte));
         });
-        return globalThis.btoa(bin.join(""));
+        return tsProtoGlobalThis.btoa(bin.join(""));
     }
 }
 function isSet(value) {
diff --git a/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/@sigstore/protobuf-specs/package.json
index 7cb4aa9c5364f..450abb157f31a 100644
--- a/node_modules/@sigstore/protobuf-specs/package.json
+++ b/node_modules/@sigstore/protobuf-specs/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@sigstore/protobuf-specs",
-  "version": "0.1.0",
+  "version": "0.2.1",
   "description": "code-signing for npm packages",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/LICENSE b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/LICENSE
deleted file mode 100644
index e9e7c1679a09d..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright 2023 The Sigstore Authors
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
deleted file mode 100644
index 0c367a8384454..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
+++ /dev/null
@@ -1,89 +0,0 @@
-"use strict";
-/* eslint-disable */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Signature = exports.Envelope = void 0;
-function createBaseEnvelope() {
-    return { payload: Buffer.alloc(0), payloadType: "", signatures: [] };
-}
-exports.Envelope = {
-    fromJSON(object) {
-        return {
-            payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
-            payloadType: isSet(object.payloadType) ? String(object.payloadType) : "",
-            signatures: Array.isArray(object?.signatures) ? object.signatures.map((e) => exports.Signature.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.payload !== undefined &&
-            (obj.payload = base64FromBytes(message.payload !== undefined ? message.payload : Buffer.alloc(0)));
-        message.payloadType !== undefined && (obj.payloadType = message.payloadType);
-        if (message.signatures) {
-            obj.signatures = message.signatures.map((e) => e ? exports.Signature.toJSON(e) : undefined);
-        }
-        else {
-            obj.signatures = [];
-        }
-        return obj;
-    },
-};
-function createBaseSignature() {
-    return { sig: Buffer.alloc(0), keyid: "" };
-}
-exports.Signature = {
-    fromJSON(object) {
-        return {
-            sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
-            keyid: isSet(object.keyid) ? String(object.keyid) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.sig !== undefined && (obj.sig = base64FromBytes(message.sig !== undefined ? message.sig : Buffer.alloc(0)));
-        message.keyid !== undefined && (obj.keyid = message.keyid);
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
deleted file mode 100644
index 073093b8371a8..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
+++ /dev/null
@@ -1,185 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
-/* eslint-disable */
-const any_1 = require("./google/protobuf/any");
-const timestamp_1 = require("./google/protobuf/timestamp");
-function createBaseCloudEvent() {
-    return { id: "", source: "", specVersion: "", type: "", attributes: {}, data: undefined };
-}
-exports.CloudEvent = {
-    fromJSON(object) {
-        return {
-            id: isSet(object.id) ? String(object.id) : "",
-            source: isSet(object.source) ? String(object.source) : "",
-            specVersion: isSet(object.specVersion) ? String(object.specVersion) : "",
-            type: isSet(object.type) ? String(object.type) : "",
-            attributes: isObject(object.attributes)
-                ? Object.entries(object.attributes).reduce((acc, [key, value]) => {
-                    acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
-                    return acc;
-                }, {})
-                : {},
-            data: isSet(object.binaryData)
-                ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
-                : isSet(object.textData)
-                    ? { $case: "textData", textData: String(object.textData) }
-                    : isSet(object.protoData)
-                        ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
-                        : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.id !== undefined && (obj.id = message.id);
-        message.source !== undefined && (obj.source = message.source);
-        message.specVersion !== undefined && (obj.specVersion = message.specVersion);
-        message.type !== undefined && (obj.type = message.type);
-        obj.attributes = {};
-        if (message.attributes) {
-            Object.entries(message.attributes).forEach(([k, v]) => {
-                obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
-            });
-        }
-        message.data?.$case === "binaryData" &&
-            (obj.binaryData = message.data?.binaryData !== undefined ? base64FromBytes(message.data?.binaryData) : undefined);
-        message.data?.$case === "textData" && (obj.textData = message.data?.textData);
-        message.data?.$case === "protoData" &&
-            (obj.protoData = message.data?.protoData ? any_1.Any.toJSON(message.data?.protoData) : undefined);
-        return obj;
-    },
-};
-function createBaseCloudEvent_AttributesEntry() {
-    return { key: "", value: undefined };
-}
-exports.CloudEvent_AttributesEntry = {
-    fromJSON(object) {
-        return {
-            key: isSet(object.key) ? String(object.key) : "",
-            value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.key !== undefined && (obj.key = message.key);
-        message.value !== undefined &&
-            (obj.value = message.value ? exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value) : undefined);
-        return obj;
-    },
-};
-function createBaseCloudEvent_CloudEventAttributeValue() {
-    return { attr: undefined };
-}
-exports.CloudEvent_CloudEventAttributeValue = {
-    fromJSON(object) {
-        return {
-            attr: isSet(object.ceBoolean)
-                ? { $case: "ceBoolean", ceBoolean: Boolean(object.ceBoolean) }
-                : isSet(object.ceInteger)
-                    ? { $case: "ceInteger", ceInteger: Number(object.ceInteger) }
-                    : isSet(object.ceString)
-                        ? { $case: "ceString", ceString: String(object.ceString) }
-                        : isSet(object.ceBytes)
-                            ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
-                            : isSet(object.ceUri)
-                                ? { $case: "ceUri", ceUri: String(object.ceUri) }
-                                : isSet(object.ceUriRef)
-                                    ? { $case: "ceUriRef", ceUriRef: String(object.ceUriRef) }
-                                    : isSet(object.ceTimestamp)
-                                        ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
-                                        : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.attr?.$case === "ceBoolean" && (obj.ceBoolean = message.attr?.ceBoolean);
-        message.attr?.$case === "ceInteger" && (obj.ceInteger = Math.round(message.attr?.ceInteger));
-        message.attr?.$case === "ceString" && (obj.ceString = message.attr?.ceString);
-        message.attr?.$case === "ceBytes" &&
-            (obj.ceBytes = message.attr?.ceBytes !== undefined ? base64FromBytes(message.attr?.ceBytes) : undefined);
-        message.attr?.$case === "ceUri" && (obj.ceUri = message.attr?.ceUri);
-        message.attr?.$case === "ceUriRef" && (obj.ceUriRef = message.attr?.ceUriRef);
-        message.attr?.$case === "ceTimestamp" && (obj.ceTimestamp = message.attr?.ceTimestamp.toISOString());
-        return obj;
-    },
-};
-function createBaseCloudEventBatch() {
-    return { events: [] };
-}
-exports.CloudEventBatch = {
-    fromJSON(object) {
-        return { events: Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [] };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.events) {
-            obj.events = message.events.map((e) => e ? exports.CloudEvent.toJSON(e) : undefined);
-        }
-        else {
-            obj.events = [];
-        }
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function fromTimestamp(t) {
-    let millis = Number(t.seconds) * 1000;
-    millis += t.nanos / 1000000;
-    return new Date(millis);
-}
-function fromJsonTimestamp(o) {
-    if (o instanceof Date) {
-        return o;
-    }
-    else if (typeof o === "string") {
-        return new Date(o);
-    }
-    else {
-        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
-    }
-}
-function isObject(value) {
-    return typeof value === "object" && value !== null;
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
deleted file mode 100644
index da627499ad765..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
+++ /dev/null
@@ -1,119 +0,0 @@
-"use strict";
-/* eslint-disable */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.fieldBehaviorToJSON = exports.fieldBehaviorFromJSON = exports.FieldBehavior = void 0;
-/**
- * An indicator of the behavior of a given field (for example, that a field
- * is required in requests, or given as output but ignored as input).
- * This **does not** change the behavior in protocol buffers itself; it only
- * denotes the behavior and may affect how API tooling handles the field.
- *
- * Note: This enum **may** receive new values in the future.
- */
-var FieldBehavior;
-(function (FieldBehavior) {
-    /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
-    FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
-    /**
-     * OPTIONAL - Specifically denotes a field as optional.
-     * While all fields in protocol buffers are optional, this may be specified
-     * for emphasis if appropriate.
-     */
-    FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
-    /**
-     * REQUIRED - Denotes a field as required.
-     * This indicates that the field **must** be provided as part of the request,
-     * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
-     */
-    FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
-    /**
-     * OUTPUT_ONLY - Denotes a field as output only.
-     * This indicates that the field is provided in responses, but including the
-     * field in a request does nothing (the server *must* ignore it and
-     * *must not* throw an error as a result of the field's presence).
-     */
-    FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
-    /**
-     * INPUT_ONLY - Denotes a field as input only.
-     * This indicates that the field is provided in requests, and the
-     * corresponding field is not included in output.
-     */
-    FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
-    /**
-     * IMMUTABLE - Denotes a field as immutable.
-     * This indicates that the field may be set once in a request to create a
-     * resource, but may not be changed thereafter.
-     */
-    FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
-    /**
-     * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
-     * This indicates that the service may provide the elements of the list
-     * in any arbitrary order, rather than the order the user originally
-     * provided. Additionally, the list's order may or may not be stable.
-     */
-    FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
-})(FieldBehavior = exports.FieldBehavior || (exports.FieldBehavior = {}));
-function fieldBehaviorFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "FIELD_BEHAVIOR_UNSPECIFIED":
-            return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
-        case 1:
-        case "OPTIONAL":
-            return FieldBehavior.OPTIONAL;
-        case 2:
-        case "REQUIRED":
-            return FieldBehavior.REQUIRED;
-        case 3:
-        case "OUTPUT_ONLY":
-            return FieldBehavior.OUTPUT_ONLY;
-        case 4:
-        case "INPUT_ONLY":
-            return FieldBehavior.INPUT_ONLY;
-        case 5:
-        case "IMMUTABLE":
-            return FieldBehavior.IMMUTABLE;
-        case 6:
-        case "UNORDERED_LIST":
-            return FieldBehavior.UNORDERED_LIST;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
-    }
-}
-exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
-function fieldBehaviorToJSON(object) {
-    switch (object) {
-        case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
-            return "FIELD_BEHAVIOR_UNSPECIFIED";
-        case FieldBehavior.OPTIONAL:
-            return "OPTIONAL";
-        case FieldBehavior.REQUIRED:
-            return "REQUIRED";
-        case FieldBehavior.OUTPUT_ONLY:
-            return "OUTPUT_ONLY";
-        case FieldBehavior.INPUT_ONLY:
-            return "INPUT_ONLY";
-        case FieldBehavior.IMMUTABLE:
-            return "IMMUTABLE";
-        case FieldBehavior.UNORDERED_LIST:
-            return "UNORDERED_LIST";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
-    }
-}
-exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
deleted file mode 100644
index 6b3f3c97a6647..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
+++ /dev/null
@@ -1,65 +0,0 @@
-"use strict";
-/* eslint-disable */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Any = void 0;
-function createBaseAny() {
-    return { typeUrl: "", value: Buffer.alloc(0) };
-}
-exports.Any = {
-    fromJSON(object) {
-        return {
-            typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "",
-            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl);
-        message.value !== undefined &&
-            (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
deleted file mode 100644
index d429aac846043..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
+++ /dev/null
@@ -1,1308 +0,0 @@
-"use strict";
-/* eslint-disable */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.GeneratedCodeInfo_Annotation = exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.methodOptions_IdempotencyLevelToJSON = exports.methodOptions_IdempotencyLevelFromJSON = exports.MethodOptions_IdempotencyLevel = exports.fieldOptions_JSTypeToJSON = exports.fieldOptions_JSTypeFromJSON = exports.FieldOptions_JSType = exports.fieldOptions_CTypeToJSON = exports.fieldOptions_CTypeFromJSON = exports.FieldOptions_CType = exports.fileOptions_OptimizeModeToJSON = exports.fileOptions_OptimizeModeFromJSON = exports.FileOptions_OptimizeMode = exports.fieldDescriptorProto_LabelToJSON = exports.fieldDescriptorProto_LabelFromJSON = exports.FieldDescriptorProto_Label = exports.fieldDescriptorProto_TypeToJSON = exports.fieldDescriptorProto_TypeFromJSON = exports.FieldDescriptorProto_Type = void 0;
-var FieldDescriptorProto_Type;
-(function (FieldDescriptorProto_Type) {
-    /**
-     * TYPE_DOUBLE - 0 is reserved for errors.
-     * Order is weird for historical reasons.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT";
-    /**
-     * TYPE_INT64 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
-     * negative values are likely.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64";
-    /**
-     * TYPE_INT32 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
-     * negative values are likely.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
-    /**
-     * TYPE_GROUP - Tag-delimited aggregate.
-     * Group type is deprecated and not supported in proto3. However, Proto3
-     * implementations should still be able to parse the group wire format and
-     * treat group fields as unknown fields.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
-    /** TYPE_MESSAGE - Length-delimited aggregate. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE";
-    /** TYPE_BYTES - New in version 2. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64";
-    /** TYPE_SINT32 - Uses ZigZag encoding. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
-    /** TYPE_SINT64 - Uses ZigZag encoding. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
-})(FieldDescriptorProto_Type = exports.FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = {}));
-function fieldDescriptorProto_TypeFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "TYPE_DOUBLE":
-            return FieldDescriptorProto_Type.TYPE_DOUBLE;
-        case 2:
-        case "TYPE_FLOAT":
-            return FieldDescriptorProto_Type.TYPE_FLOAT;
-        case 3:
-        case "TYPE_INT64":
-            return FieldDescriptorProto_Type.TYPE_INT64;
-        case 4:
-        case "TYPE_UINT64":
-            return FieldDescriptorProto_Type.TYPE_UINT64;
-        case 5:
-        case "TYPE_INT32":
-            return FieldDescriptorProto_Type.TYPE_INT32;
-        case 6:
-        case "TYPE_FIXED64":
-            return FieldDescriptorProto_Type.TYPE_FIXED64;
-        case 7:
-        case "TYPE_FIXED32":
-            return FieldDescriptorProto_Type.TYPE_FIXED32;
-        case 8:
-        case "TYPE_BOOL":
-            return FieldDescriptorProto_Type.TYPE_BOOL;
-        case 9:
-        case "TYPE_STRING":
-            return FieldDescriptorProto_Type.TYPE_STRING;
-        case 10:
-        case "TYPE_GROUP":
-            return FieldDescriptorProto_Type.TYPE_GROUP;
-        case 11:
-        case "TYPE_MESSAGE":
-            return FieldDescriptorProto_Type.TYPE_MESSAGE;
-        case 12:
-        case "TYPE_BYTES":
-            return FieldDescriptorProto_Type.TYPE_BYTES;
-        case 13:
-        case "TYPE_UINT32":
-            return FieldDescriptorProto_Type.TYPE_UINT32;
-        case 14:
-        case "TYPE_ENUM":
-            return FieldDescriptorProto_Type.TYPE_ENUM;
-        case 15:
-        case "TYPE_SFIXED32":
-            return FieldDescriptorProto_Type.TYPE_SFIXED32;
-        case 16:
-        case "TYPE_SFIXED64":
-            return FieldDescriptorProto_Type.TYPE_SFIXED64;
-        case 17:
-        case "TYPE_SINT32":
-            return FieldDescriptorProto_Type.TYPE_SINT32;
-        case 18:
-        case "TYPE_SINT64":
-            return FieldDescriptorProto_Type.TYPE_SINT64;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
-    }
-}
-exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
-function fieldDescriptorProto_TypeToJSON(object) {
-    switch (object) {
-        case FieldDescriptorProto_Type.TYPE_DOUBLE:
-            return "TYPE_DOUBLE";
-        case FieldDescriptorProto_Type.TYPE_FLOAT:
-            return "TYPE_FLOAT";
-        case FieldDescriptorProto_Type.TYPE_INT64:
-            return "TYPE_INT64";
-        case FieldDescriptorProto_Type.TYPE_UINT64:
-            return "TYPE_UINT64";
-        case FieldDescriptorProto_Type.TYPE_INT32:
-            return "TYPE_INT32";
-        case FieldDescriptorProto_Type.TYPE_FIXED64:
-            return "TYPE_FIXED64";
-        case FieldDescriptorProto_Type.TYPE_FIXED32:
-            return "TYPE_FIXED32";
-        case FieldDescriptorProto_Type.TYPE_BOOL:
-            return "TYPE_BOOL";
-        case FieldDescriptorProto_Type.TYPE_STRING:
-            return "TYPE_STRING";
-        case FieldDescriptorProto_Type.TYPE_GROUP:
-            return "TYPE_GROUP";
-        case FieldDescriptorProto_Type.TYPE_MESSAGE:
-            return "TYPE_MESSAGE";
-        case FieldDescriptorProto_Type.TYPE_BYTES:
-            return "TYPE_BYTES";
-        case FieldDescriptorProto_Type.TYPE_UINT32:
-            return "TYPE_UINT32";
-        case FieldDescriptorProto_Type.TYPE_ENUM:
-            return "TYPE_ENUM";
-        case FieldDescriptorProto_Type.TYPE_SFIXED32:
-            return "TYPE_SFIXED32";
-        case FieldDescriptorProto_Type.TYPE_SFIXED64:
-            return "TYPE_SFIXED64";
-        case FieldDescriptorProto_Type.TYPE_SINT32:
-            return "TYPE_SINT32";
-        case FieldDescriptorProto_Type.TYPE_SINT64:
-            return "TYPE_SINT64";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
-    }
-}
-exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
-var FieldDescriptorProto_Label;
-(function (FieldDescriptorProto_Label) {
-    /** LABEL_OPTIONAL - 0 is reserved for errors */
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
-})(FieldDescriptorProto_Label = exports.FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = {}));
-function fieldDescriptorProto_LabelFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "LABEL_OPTIONAL":
-            return FieldDescriptorProto_Label.LABEL_OPTIONAL;
-        case 2:
-        case "LABEL_REQUIRED":
-            return FieldDescriptorProto_Label.LABEL_REQUIRED;
-        case 3:
-        case "LABEL_REPEATED":
-            return FieldDescriptorProto_Label.LABEL_REPEATED;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
-    }
-}
-exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
-function fieldDescriptorProto_LabelToJSON(object) {
-    switch (object) {
-        case FieldDescriptorProto_Label.LABEL_OPTIONAL:
-            return "LABEL_OPTIONAL";
-        case FieldDescriptorProto_Label.LABEL_REQUIRED:
-            return "LABEL_REQUIRED";
-        case FieldDescriptorProto_Label.LABEL_REPEATED:
-            return "LABEL_REPEATED";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
-    }
-}
-exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
-/** Generated classes can be optimized for speed or code size. */
-var FileOptions_OptimizeMode;
-(function (FileOptions_OptimizeMode) {
-    /** SPEED - Generate complete code for parsing, serialization, */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED";
-    /** CODE_SIZE - etc. */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
-    /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
-})(FileOptions_OptimizeMode = exports.FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = {}));
-function fileOptions_OptimizeModeFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "SPEED":
-            return FileOptions_OptimizeMode.SPEED;
-        case 2:
-        case "CODE_SIZE":
-            return FileOptions_OptimizeMode.CODE_SIZE;
-        case 3:
-        case "LITE_RUNTIME":
-            return FileOptions_OptimizeMode.LITE_RUNTIME;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
-    }
-}
-exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
-function fileOptions_OptimizeModeToJSON(object) {
-    switch (object) {
-        case FileOptions_OptimizeMode.SPEED:
-            return "SPEED";
-        case FileOptions_OptimizeMode.CODE_SIZE:
-            return "CODE_SIZE";
-        case FileOptions_OptimizeMode.LITE_RUNTIME:
-            return "LITE_RUNTIME";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
-    }
-}
-exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
-var FieldOptions_CType;
-(function (FieldOptions_CType) {
-    /** STRING - Default mode. */
-    FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
-    FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
-    FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
-})(FieldOptions_CType = exports.FieldOptions_CType || (exports.FieldOptions_CType = {}));
-function fieldOptions_CTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "STRING":
-            return FieldOptions_CType.STRING;
-        case 1:
-        case "CORD":
-            return FieldOptions_CType.CORD;
-        case 2:
-        case "STRING_PIECE":
-            return FieldOptions_CType.STRING_PIECE;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
-    }
-}
-exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
-function fieldOptions_CTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_CType.STRING:
-            return "STRING";
-        case FieldOptions_CType.CORD:
-            return "CORD";
-        case FieldOptions_CType.STRING_PIECE:
-            return "STRING_PIECE";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
-    }
-}
-exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
-var FieldOptions_JSType;
-(function (FieldOptions_JSType) {
-    /** JS_NORMAL - Use the default type. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL";
-    /** JS_STRING - Use JavaScript strings. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
-    /** JS_NUMBER - Use JavaScript numbers. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
-})(FieldOptions_JSType = exports.FieldOptions_JSType || (exports.FieldOptions_JSType = {}));
-function fieldOptions_JSTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "JS_NORMAL":
-            return FieldOptions_JSType.JS_NORMAL;
-        case 1:
-        case "JS_STRING":
-            return FieldOptions_JSType.JS_STRING;
-        case 2:
-        case "JS_NUMBER":
-            return FieldOptions_JSType.JS_NUMBER;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
-    }
-}
-exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
-function fieldOptions_JSTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_JSType.JS_NORMAL:
-            return "JS_NORMAL";
-        case FieldOptions_JSType.JS_STRING:
-            return "JS_STRING";
-        case FieldOptions_JSType.JS_NUMBER:
-            return "JS_NUMBER";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
-    }
-}
-exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
-/**
- * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
- * or neither? HTTP based RPC implementation may choose GET verb for safe
- * methods, and PUT verb for idempotent methods instead of the default POST.
- */
-var MethodOptions_IdempotencyLevel;
-(function (MethodOptions_IdempotencyLevel) {
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN";
-    /** NO_SIDE_EFFECTS - implies idempotent */
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
-    /** IDEMPOTENT - idempotent, but may have side effects */
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
-})(MethodOptions_IdempotencyLevel = exports.MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = {}));
-function methodOptions_IdempotencyLevelFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "IDEMPOTENCY_UNKNOWN":
-            return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN;
-        case 1:
-        case "NO_SIDE_EFFECTS":
-            return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS;
-        case 2:
-        case "IDEMPOTENT":
-            return MethodOptions_IdempotencyLevel.IDEMPOTENT;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
-    }
-}
-exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
-function methodOptions_IdempotencyLevelToJSON(object) {
-    switch (object) {
-        case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
-            return "IDEMPOTENCY_UNKNOWN";
-        case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS:
-            return "NO_SIDE_EFFECTS";
-        case MethodOptions_IdempotencyLevel.IDEMPOTENT:
-            return "IDEMPOTENT";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
-    }
-}
-exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
-function createBaseFileDescriptorSet() {
-    return { file: [] };
-}
-exports.FileDescriptorSet = {
-    fromJSON(object) {
-        return { file: Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [] };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.file) {
-            obj.file = message.file.map((e) => e ? exports.FileDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.file = [];
-        }
-        return obj;
-    },
-};
-function createBaseFileDescriptorProto() {
-    return {
-        name: "",
-        package: "",
-        dependency: [],
-        publicDependency: [],
-        weakDependency: [],
-        messageType: [],
-        enumType: [],
-        service: [],
-        extension: [],
-        options: undefined,
-        sourceCodeInfo: undefined,
-        syntax: "",
-    };
-}
-exports.FileDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            package: isSet(object.package) ? String(object.package) : "",
-            dependency: Array.isArray(object?.dependency) ? object.dependency.map((e) => String(e)) : [],
-            publicDependency: Array.isArray(object?.publicDependency)
-                ? object.publicDependency.map((e) => Number(e))
-                : [],
-            weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e) => Number(e)) : [],
-            messageType: Array.isArray(object?.messageType)
-                ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
-                : [],
-            enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
-            service: Array.isArray(object?.service) ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e)) : [],
-            extension: Array.isArray(object?.extension)
-                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
-            sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
-            syntax: isSet(object.syntax) ? String(object.syntax) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.package !== undefined && (obj.package = message.package);
-        if (message.dependency) {
-            obj.dependency = message.dependency.map((e) => e);
-        }
-        else {
-            obj.dependency = [];
-        }
-        if (message.publicDependency) {
-            obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
-        }
-        else {
-            obj.publicDependency = [];
-        }
-        if (message.weakDependency) {
-            obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
-        }
-        else {
-            obj.weakDependency = [];
-        }
-        if (message.messageType) {
-            obj.messageType = message.messageType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.messageType = [];
-        }
-        if (message.enumType) {
-            obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.enumType = [];
-        }
-        if (message.service) {
-            obj.service = message.service.map((e) => e ? exports.ServiceDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.service = [];
-        }
-        if (message.extension) {
-            obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.extension = [];
-        }
-        message.options !== undefined && (obj.options = message.options ? exports.FileOptions.toJSON(message.options) : undefined);
-        message.sourceCodeInfo !== undefined &&
-            (obj.sourceCodeInfo = message.sourceCodeInfo ? exports.SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined);
-        message.syntax !== undefined && (obj.syntax = message.syntax);
-        return obj;
-    },
-};
-function createBaseDescriptorProto() {
-    return {
-        name: "",
-        field: [],
-        extension: [],
-        nestedType: [],
-        enumType: [],
-        extensionRange: [],
-        oneofDecl: [],
-        options: undefined,
-        reservedRange: [],
-        reservedName: [],
-    };
-}
-exports.DescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            field: Array.isArray(object?.field) ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e)) : [],
-            extension: Array.isArray(object?.extension)
-                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            nestedType: Array.isArray(object?.nestedType)
-                ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
-                : [],
-            enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
-            extensionRange: Array.isArray(object?.extensionRange)
-                ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
-                : [],
-            oneofDecl: Array.isArray(object?.oneofDecl)
-                ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
-            reservedRange: Array.isArray(object?.reservedRange)
-                ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
-                : [],
-            reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e) => String(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        if (message.field) {
-            obj.field = message.field.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.field = [];
-        }
-        if (message.extension) {
-            obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.extension = [];
-        }
-        if (message.nestedType) {
-            obj.nestedType = message.nestedType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.nestedType = [];
-        }
-        if (message.enumType) {
-            obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.enumType = [];
-        }
-        if (message.extensionRange) {
-            obj.extensionRange = message.extensionRange.map((e) => e ? exports.DescriptorProto_ExtensionRange.toJSON(e) : undefined);
-        }
-        else {
-            obj.extensionRange = [];
-        }
-        if (message.oneofDecl) {
-            obj.oneofDecl = message.oneofDecl.map((e) => e ? exports.OneofDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.oneofDecl = [];
-        }
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.MessageOptions.toJSON(message.options) : undefined);
-        if (message.reservedRange) {
-            obj.reservedRange = message.reservedRange.map((e) => e ? exports.DescriptorProto_ReservedRange.toJSON(e) : undefined);
-        }
-        else {
-            obj.reservedRange = [];
-        }
-        if (message.reservedName) {
-            obj.reservedName = message.reservedName.map((e) => e);
-        }
-        else {
-            obj.reservedName = [];
-        }
-        return obj;
-    },
-};
-function createBaseDescriptorProto_ExtensionRange() {
-    return { start: 0, end: 0, options: undefined };
-}
-exports.DescriptorProto_ExtensionRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? Number(object.start) : 0,
-            end: isSet(object.end) ? Number(object.end) : 0,
-            options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.start !== undefined && (obj.start = Math.round(message.start));
-        message.end !== undefined && (obj.end = Math.round(message.end));
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.ExtensionRangeOptions.toJSON(message.options) : undefined);
-        return obj;
-    },
-};
-function createBaseDescriptorProto_ReservedRange() {
-    return { start: 0, end: 0 };
-}
-exports.DescriptorProto_ReservedRange = {
-    fromJSON(object) {
-        return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.start !== undefined && (obj.start = Math.round(message.start));
-        message.end !== undefined && (obj.end = Math.round(message.end));
-        return obj;
-    },
-};
-function createBaseExtensionRangeOptions() {
-    return { uninterpretedOption: [] };
-}
-exports.ExtensionRangeOptions = {
-    fromJSON(object) {
-        return {
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseFieldDescriptorProto() {
-    return {
-        name: "",
-        number: 0,
-        label: 1,
-        type: 1,
-        typeName: "",
-        extendee: "",
-        defaultValue: "",
-        oneofIndex: 0,
-        jsonName: "",
-        options: undefined,
-        proto3Optional: false,
-    };
-}
-exports.FieldDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            number: isSet(object.number) ? Number(object.number) : 0,
-            label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
-            type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
-            typeName: isSet(object.typeName) ? String(object.typeName) : "",
-            extendee: isSet(object.extendee) ? String(object.extendee) : "",
-            defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "",
-            oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0,
-            jsonName: isSet(object.jsonName) ? String(object.jsonName) : "",
-            options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
-            proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.number !== undefined && (obj.number = Math.round(message.number));
-        message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label));
-        message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type));
-        message.typeName !== undefined && (obj.typeName = message.typeName);
-        message.extendee !== undefined && (obj.extendee = message.extendee);
-        message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue);
-        message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex));
-        message.jsonName !== undefined && (obj.jsonName = message.jsonName);
-        message.options !== undefined && (obj.options = message.options ? exports.FieldOptions.toJSON(message.options) : undefined);
-        message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional);
-        return obj;
-    },
-};
-function createBaseOneofDescriptorProto() {
-    return { name: "", options: undefined };
-}
-exports.OneofDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.options !== undefined && (obj.options = message.options ? exports.OneofOptions.toJSON(message.options) : undefined);
-        return obj;
-    },
-};
-function createBaseEnumDescriptorProto() {
-    return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] };
-}
-exports.EnumDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            value: Array.isArray(object?.value) ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e)) : [],
-            options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
-            reservedRange: Array.isArray(object?.reservedRange)
-                ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
-                : [],
-            reservedName: Array.isArray(object?.reservedName)
-                ? object.reservedName.map((e) => String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        if (message.value) {
-            obj.value = message.value.map((e) => e ? exports.EnumValueDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.value = [];
-        }
-        message.options !== undefined && (obj.options = message.options ? exports.EnumOptions.toJSON(message.options) : undefined);
-        if (message.reservedRange) {
-            obj.reservedRange = message.reservedRange.map((e) => e ? exports.EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined);
-        }
-        else {
-            obj.reservedRange = [];
-        }
-        if (message.reservedName) {
-            obj.reservedName = message.reservedName.map((e) => e);
-        }
-        else {
-            obj.reservedName = [];
-        }
-        return obj;
-    },
-};
-function createBaseEnumDescriptorProto_EnumReservedRange() {
-    return { start: 0, end: 0 };
-}
-exports.EnumDescriptorProto_EnumReservedRange = {
-    fromJSON(object) {
-        return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.start !== undefined && (obj.start = Math.round(message.start));
-        message.end !== undefined && (obj.end = Math.round(message.end));
-        return obj;
-    },
-};
-function createBaseEnumValueDescriptorProto() {
-    return { name: "", number: 0, options: undefined };
-}
-exports.EnumValueDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            number: isSet(object.number) ? Number(object.number) : 0,
-            options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.number !== undefined && (obj.number = Math.round(message.number));
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.EnumValueOptions.toJSON(message.options) : undefined);
-        return obj;
-    },
-};
-function createBaseServiceDescriptorProto() {
-    return { name: "", method: [], options: undefined };
-}
-exports.ServiceDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            method: Array.isArray(object?.method) ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e)) : [],
-            options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        if (message.method) {
-            obj.method = message.method.map((e) => e ? exports.MethodDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.method = [];
-        }
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.ServiceOptions.toJSON(message.options) : undefined);
-        return obj;
-    },
-};
-function createBaseMethodDescriptorProto() {
-    return {
-        name: "",
-        inputType: "",
-        outputType: "",
-        options: undefined,
-        clientStreaming: false,
-        serverStreaming: false,
-    };
-}
-exports.MethodDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            inputType: isSet(object.inputType) ? String(object.inputType) : "",
-            outputType: isSet(object.outputType) ? String(object.outputType) : "",
-            options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
-            clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false,
-            serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.inputType !== undefined && (obj.inputType = message.inputType);
-        message.outputType !== undefined && (obj.outputType = message.outputType);
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.MethodOptions.toJSON(message.options) : undefined);
-        message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming);
-        message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming);
-        return obj;
-    },
-};
-function createBaseFileOptions() {
-    return {
-        javaPackage: "",
-        javaOuterClassname: "",
-        javaMultipleFiles: false,
-        javaGenerateEqualsAndHash: false,
-        javaStringCheckUtf8: false,
-        optimizeFor: 1,
-        goPackage: "",
-        ccGenericServices: false,
-        javaGenericServices: false,
-        pyGenericServices: false,
-        phpGenericServices: false,
-        deprecated: false,
-        ccEnableArenas: false,
-        objcClassPrefix: "",
-        csharpNamespace: "",
-        swiftPrefix: "",
-        phpClassPrefix: "",
-        phpNamespace: "",
-        phpMetadataNamespace: "",
-        rubyPackage: "",
-        uninterpretedOption: [],
-    };
-}
-exports.FileOptions = {
-    fromJSON(object) {
-        return {
-            javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "",
-            javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "",
-            javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false,
-            javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
-                ? Boolean(object.javaGenerateEqualsAndHash)
-                : false,
-            javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false,
-            optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
-            goPackage: isSet(object.goPackage) ? String(object.goPackage) : "",
-            ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false,
-            javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false,
-            pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false,
-            phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false,
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false,
-            objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "",
-            csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "",
-            swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "",
-            phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "",
-            phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "",
-            phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "",
-            rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "",
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage);
-        message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname);
-        message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles);
-        message.javaGenerateEqualsAndHash !== undefined &&
-            (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash);
-        message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8);
-        message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor));
-        message.goPackage !== undefined && (obj.goPackage = message.goPackage);
-        message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices);
-        message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices);
-        message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices);
-        message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices);
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas);
-        message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix);
-        message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace);
-        message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix);
-        message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix);
-        message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace);
-        message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace);
-        message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseMessageOptions() {
-    return {
-        messageSetWireFormat: false,
-        noStandardDescriptorAccessor: false,
-        deprecated: false,
-        mapEntry: false,
-        uninterpretedOption: [],
-    };
-}
-exports.MessageOptions = {
-    fromJSON(object) {
-        return {
-            messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false,
-            noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
-                ? Boolean(object.noStandardDescriptorAccessor)
-                : false,
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat);
-        message.noStandardDescriptorAccessor !== undefined &&
-            (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor);
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseFieldOptions() {
-    return {
-        ctype: 0,
-        packed: false,
-        jstype: 0,
-        lazy: false,
-        unverifiedLazy: false,
-        deprecated: false,
-        weak: false,
-        uninterpretedOption: [],
-    };
-}
-exports.FieldOptions = {
-    fromJSON(object) {
-        return {
-            ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
-            packed: isSet(object.packed) ? Boolean(object.packed) : false,
-            jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
-            lazy: isSet(object.lazy) ? Boolean(object.lazy) : false,
-            unverifiedLazy: isSet(object.unverifiedLazy) ? Boolean(object.unverifiedLazy) : false,
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            weak: isSet(object.weak) ? Boolean(object.weak) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype));
-        message.packed !== undefined && (obj.packed = message.packed);
-        message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype));
-        message.lazy !== undefined && (obj.lazy = message.lazy);
-        message.unverifiedLazy !== undefined && (obj.unverifiedLazy = message.unverifiedLazy);
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        message.weak !== undefined && (obj.weak = message.weak);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseOneofOptions() {
-    return { uninterpretedOption: [] };
-}
-exports.OneofOptions = {
-    fromJSON(object) {
-        return {
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseEnumOptions() {
-    return { allowAlias: false, deprecated: false, uninterpretedOption: [] };
-}
-exports.EnumOptions = {
-    fromJSON(object) {
-        return {
-            allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false,
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias);
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseEnumValueOptions() {
-    return { deprecated: false, uninterpretedOption: [] };
-}
-exports.EnumValueOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseServiceOptions() {
-    return { deprecated: false, uninterpretedOption: [] };
-}
-exports.ServiceOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseMethodOptions() {
-    return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] };
-}
-exports.MethodOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            idempotencyLevel: isSet(object.idempotencyLevel)
-                ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
-                : 0,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        message.idempotencyLevel !== undefined &&
-            (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel));
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseUninterpretedOption() {
-    return {
-        name: [],
-        identifierValue: "",
-        positiveIntValue: "0",
-        negativeIntValue: "0",
-        doubleValue: 0,
-        stringValue: Buffer.alloc(0),
-        aggregateValue: "",
-    };
-}
-exports.UninterpretedOption = {
-    fromJSON(object) {
-        return {
-            name: Array.isArray(object?.name) ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e)) : [],
-            identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "",
-            positiveIntValue: isSet(object.positiveIntValue) ? String(object.positiveIntValue) : "0",
-            negativeIntValue: isSet(object.negativeIntValue) ? String(object.negativeIntValue) : "0",
-            doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0,
-            stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
-            aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name) {
-            obj.name = message.name.map((e) => e ? exports.UninterpretedOption_NamePart.toJSON(e) : undefined);
-        }
-        else {
-            obj.name = [];
-        }
-        message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue);
-        message.positiveIntValue !== undefined && (obj.positiveIntValue = message.positiveIntValue);
-        message.negativeIntValue !== undefined && (obj.negativeIntValue = message.negativeIntValue);
-        message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue);
-        message.stringValue !== undefined &&
-            (obj.stringValue = base64FromBytes(message.stringValue !== undefined ? message.stringValue : Buffer.alloc(0)));
-        message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue);
-        return obj;
-    },
-};
-function createBaseUninterpretedOption_NamePart() {
-    return { namePart: "", isExtension: false };
-}
-exports.UninterpretedOption_NamePart = {
-    fromJSON(object) {
-        return {
-            namePart: isSet(object.namePart) ? String(object.namePart) : "",
-            isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.namePart !== undefined && (obj.namePart = message.namePart);
-        message.isExtension !== undefined && (obj.isExtension = message.isExtension);
-        return obj;
-    },
-};
-function createBaseSourceCodeInfo() {
-    return { location: [] };
-}
-exports.SourceCodeInfo = {
-    fromJSON(object) {
-        return {
-            location: Array.isArray(object?.location)
-                ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.location) {
-            obj.location = message.location.map((e) => e ? exports.SourceCodeInfo_Location.toJSON(e) : undefined);
-        }
-        else {
-            obj.location = [];
-        }
-        return obj;
-    },
-};
-function createBaseSourceCodeInfo_Location() {
-    return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] };
-}
-exports.SourceCodeInfo_Location = {
-    fromJSON(object) {
-        return {
-            path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
-            span: Array.isArray(object?.span) ? object.span.map((e) => Number(e)) : [],
-            leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "",
-            trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "",
-            leadingDetachedComments: Array.isArray(object?.leadingDetachedComments)
-                ? object.leadingDetachedComments.map((e) => String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.path) {
-            obj.path = message.path.map((e) => Math.round(e));
-        }
-        else {
-            obj.path = [];
-        }
-        if (message.span) {
-            obj.span = message.span.map((e) => Math.round(e));
-        }
-        else {
-            obj.span = [];
-        }
-        message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments);
-        message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments);
-        if (message.leadingDetachedComments) {
-            obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e);
-        }
-        else {
-            obj.leadingDetachedComments = [];
-        }
-        return obj;
-    },
-};
-function createBaseGeneratedCodeInfo() {
-    return { annotation: [] };
-}
-exports.GeneratedCodeInfo = {
-    fromJSON(object) {
-        return {
-            annotation: Array.isArray(object?.annotation)
-                ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.annotation) {
-            obj.annotation = message.annotation.map((e) => e ? exports.GeneratedCodeInfo_Annotation.toJSON(e) : undefined);
-        }
-        else {
-            obj.annotation = [];
-        }
-        return obj;
-    },
-};
-function createBaseGeneratedCodeInfo_Annotation() {
-    return { path: [], sourceFile: "", begin: 0, end: 0 };
-}
-exports.GeneratedCodeInfo_Annotation = {
-    fromJSON(object) {
-        return {
-            path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
-            sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "",
-            begin: isSet(object.begin) ? Number(object.begin) : 0,
-            end: isSet(object.end) ? Number(object.end) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.path) {
-            obj.path = message.path.map((e) => Math.round(e));
-        }
-        else {
-            obj.path = [];
-        }
-        message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile);
-        message.begin !== undefined && (obj.begin = Math.round(message.begin));
-        message.end !== undefined && (obj.end = Math.round(message.end));
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
deleted file mode 100644
index 159135fe87172..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
+++ /dev/null
@@ -1,24 +0,0 @@
-"use strict";
-/* eslint-disable */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Timestamp = void 0;
-function createBaseTimestamp() {
-    return { seconds: "0", nanos: 0 };
-}
-exports.Timestamp = {
-    fromJSON(object) {
-        return {
-            seconds: isSet(object.seconds) ? String(object.seconds) : "0",
-            nanos: isSet(object.nanos) ? Number(object.nanos) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.seconds !== undefined && (obj.seconds = message.seconds);
-        message.nanos !== undefined && (obj.nanos = Math.round(message.nanos));
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
deleted file mode 100644
index 1ef3e1b3356b7..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
+++ /dev/null
@@ -1,106 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
-/* eslint-disable */
-const envelope_1 = require("./envelope");
-const sigstore_common_1 = require("./sigstore_common");
-const sigstore_rekor_1 = require("./sigstore_rekor");
-function createBaseTimestampVerificationData() {
-    return { rfc3161Timestamps: [] };
-}
-exports.TimestampVerificationData = {
-    fromJSON(object) {
-        return {
-            rfc3161Timestamps: Array.isArray(object?.rfc3161Timestamps)
-                ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rfc3161Timestamps) {
-            obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => e ? sigstore_common_1.RFC3161SignedTimestamp.toJSON(e) : undefined);
-        }
-        else {
-            obj.rfc3161Timestamps = [];
-        }
-        return obj;
-    },
-};
-function createBaseVerificationMaterial() {
-    return { content: undefined, tlogEntries: [], timestampVerificationData: undefined };
-}
-exports.VerificationMaterial = {
-    fromJSON(object) {
-        return {
-            content: isSet(object.publicKey)
-                ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
-                : isSet(object.x509CertificateChain)
-                    ? {
-                        $case: "x509CertificateChain",
-                        x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
-                    }
-                    : undefined,
-            tlogEntries: Array.isArray(object?.tlogEntries)
-                ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
-                : [],
-            timestampVerificationData: isSet(object.timestampVerificationData)
-                ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
-                : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.content?.$case === "publicKey" &&
-            (obj.publicKey = message.content?.publicKey ? sigstore_common_1.PublicKeyIdentifier.toJSON(message.content?.publicKey) : undefined);
-        message.content?.$case === "x509CertificateChain" &&
-            (obj.x509CertificateChain = message.content?.x509CertificateChain
-                ? sigstore_common_1.X509CertificateChain.toJSON(message.content?.x509CertificateChain)
-                : undefined);
-        if (message.tlogEntries) {
-            obj.tlogEntries = message.tlogEntries.map((e) => e ? sigstore_rekor_1.TransparencyLogEntry.toJSON(e) : undefined);
-        }
-        else {
-            obj.tlogEntries = [];
-        }
-        message.timestampVerificationData !== undefined &&
-            (obj.timestampVerificationData = message.timestampVerificationData
-                ? exports.TimestampVerificationData.toJSON(message.timestampVerificationData)
-                : undefined);
-        return obj;
-    },
-};
-function createBaseBundle() {
-    return { mediaType: "", verificationMaterial: undefined, content: undefined };
-}
-exports.Bundle = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
-            verificationMaterial: isSet(object.verificationMaterial)
-                ? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
-                : undefined,
-            content: isSet(object.messageSignature)
-                ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
-                : isSet(object.dsseEnvelope)
-                    ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.mediaType !== undefined && (obj.mediaType = message.mediaType);
-        message.verificationMaterial !== undefined && (obj.verificationMaterial = message.verificationMaterial
-            ? exports.VerificationMaterial.toJSON(message.verificationMaterial)
-            : undefined);
-        message.content?.$case === "messageSignature" && (obj.messageSignature = message.content?.messageSignature
-            ? sigstore_common_1.MessageSignature.toJSON(message.content?.messageSignature)
-            : undefined);
-        message.content?.$case === "dsseEnvelope" &&
-            (obj.dsseEnvelope = message.content?.dsseEnvelope ? envelope_1.Envelope.toJSON(message.content?.dsseEnvelope) : undefined);
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
deleted file mode 100644
index bcd654e9154b9..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
+++ /dev/null
@@ -1,457 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.subjectAlternativeNameTypeToJSON = exports.subjectAlternativeNameTypeFromJSON = exports.SubjectAlternativeNameType = exports.publicKeyDetailsToJSON = exports.publicKeyDetailsFromJSON = exports.PublicKeyDetails = exports.hashAlgorithmToJSON = exports.hashAlgorithmFromJSON = exports.HashAlgorithm = void 0;
-/* eslint-disable */
-const timestamp_1 = require("./google/protobuf/timestamp");
-/**
- * Only a subset of the secure hash standard algorithms are supported.
- * See  for more
- * details.
- * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
- * any proto JSON serialization to emit the used hash algorithm, as default
- * option is to *omit* the default value of an enum (which is the first
- * value, represented by '0'.
- */
-var HashAlgorithm;
-(function (HashAlgorithm) {
-    HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
-    HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
-})(HashAlgorithm = exports.HashAlgorithm || (exports.HashAlgorithm = {}));
-function hashAlgorithmFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "HASH_ALGORITHM_UNSPECIFIED":
-            return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
-        case 1:
-        case "SHA2_256":
-            return HashAlgorithm.SHA2_256;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
-    }
-}
-exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
-function hashAlgorithmToJSON(object) {
-    switch (object) {
-        case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
-            return "HASH_ALGORITHM_UNSPECIFIED";
-        case HashAlgorithm.SHA2_256:
-            return "SHA2_256";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
-    }
-}
-exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
-/**
- * Details of a specific public key, capturing the the key encoding method,
- * and signature algorithm.
- * To avoid the possibility of contradicting formats such as PKCS1 with
- * ED25519 the valid permutations are listed as a linear set instead of a
- * cartesian set (i.e one combined variable instead of two, one for encoding
- * and one for the signature algorithm).
- */
-var PublicKeyDetails;
-(function (PublicKeyDetails) {
-    PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
-    /** PKCS1_RSA_PKCS1V5 - RSA */
-    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
-    /** PKCS1_RSA_PSS - See RFC8017 */
-    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
-    /** PKIX_ECDSA_P256_SHA_256 - ECDSA */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
-    /** PKIX_ECDSA_P256_HMAC_SHA_256 - See RFC6979 */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
-    /** PKIX_ED25519 - Ed 25519 */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
-})(PublicKeyDetails = exports.PublicKeyDetails || (exports.PublicKeyDetails = {}));
-function publicKeyDetailsFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
-            return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
-        case 1:
-        case "PKCS1_RSA_PKCS1V5":
-            return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
-        case 2:
-        case "PKCS1_RSA_PSS":
-            return PublicKeyDetails.PKCS1_RSA_PSS;
-        case 3:
-        case "PKIX_RSA_PKCS1V5":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V5;
-        case 4:
-        case "PKIX_RSA_PSS":
-            return PublicKeyDetails.PKIX_RSA_PSS;
-        case 5:
-        case "PKIX_ECDSA_P256_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
-        case 6:
-        case "PKIX_ECDSA_P256_HMAC_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
-        case 7:
-        case "PKIX_ED25519":
-            return PublicKeyDetails.PKIX_ED25519;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
-    }
-}
-exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
-function publicKeyDetailsToJSON(object) {
-    switch (object) {
-        case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
-            return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
-        case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
-            return "PKCS1_RSA_PKCS1V5";
-        case PublicKeyDetails.PKCS1_RSA_PSS:
-            return "PKCS1_RSA_PSS";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V5:
-            return "PKIX_RSA_PKCS1V5";
-        case PublicKeyDetails.PKIX_RSA_PSS:
-            return "PKIX_RSA_PSS";
-        case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
-            return "PKIX_ECDSA_P256_SHA_256";
-        case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
-            return "PKIX_ECDSA_P256_HMAC_SHA_256";
-        case PublicKeyDetails.PKIX_ED25519:
-            return "PKIX_ED25519";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
-    }
-}
-exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
-var SubjectAlternativeNameType;
-(function (SubjectAlternativeNameType) {
-    SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
-    SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
-    SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
-    /**
-     * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
-     * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
-     * for more details.
-     */
-    SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
-})(SubjectAlternativeNameType = exports.SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = {}));
-function subjectAlternativeNameTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
-            return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
-        case 1:
-        case "EMAIL":
-            return SubjectAlternativeNameType.EMAIL;
-        case 2:
-        case "URI":
-            return SubjectAlternativeNameType.URI;
-        case 3:
-        case "OTHER_NAME":
-            return SubjectAlternativeNameType.OTHER_NAME;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
-    }
-}
-exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
-function subjectAlternativeNameTypeToJSON(object) {
-    switch (object) {
-        case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
-            return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
-        case SubjectAlternativeNameType.EMAIL:
-            return "EMAIL";
-        case SubjectAlternativeNameType.URI:
-            return "URI";
-        case SubjectAlternativeNameType.OTHER_NAME:
-            return "OTHER_NAME";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
-    }
-}
-exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
-function createBaseHashOutput() {
-    return { algorithm: 0, digest: Buffer.alloc(0) };
-}
-exports.HashOutput = {
-    fromJSON(object) {
-        return {
-            algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
-            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.algorithm !== undefined && (obj.algorithm = hashAlgorithmToJSON(message.algorithm));
-        message.digest !== undefined &&
-            (obj.digest = base64FromBytes(message.digest !== undefined ? message.digest : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseMessageSignature() {
-    return { messageDigest: undefined, signature: Buffer.alloc(0) };
-}
-exports.MessageSignature = {
-    fromJSON(object) {
-        return {
-            messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
-            signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.messageDigest !== undefined &&
-            (obj.messageDigest = message.messageDigest ? exports.HashOutput.toJSON(message.messageDigest) : undefined);
-        message.signature !== undefined &&
-            (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseLogId() {
-    return { keyId: Buffer.alloc(0) };
-}
-exports.LogId = {
-    fromJSON(object) {
-        return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.keyId !== undefined &&
-            (obj.keyId = base64FromBytes(message.keyId !== undefined ? message.keyId : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseRFC3161SignedTimestamp() {
-    return { signedTimestamp: Buffer.alloc(0) };
-}
-exports.RFC3161SignedTimestamp = {
-    fromJSON(object) {
-        return {
-            signedTimestamp: isSet(object.signedTimestamp)
-                ? Buffer.from(bytesFromBase64(object.signedTimestamp))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.signedTimestamp !== undefined &&
-            (obj.signedTimestamp = base64FromBytes(message.signedTimestamp !== undefined ? message.signedTimestamp : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBasePublicKey() {
-    return { rawBytes: undefined, keyDetails: 0, validFor: undefined };
-}
-exports.PublicKey = {
-    fromJSON(object) {
-        return {
-            rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
-            keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
-            validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.rawBytes !== undefined &&
-            (obj.rawBytes = message.rawBytes !== undefined ? base64FromBytes(message.rawBytes) : undefined);
-        message.keyDetails !== undefined && (obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails));
-        message.validFor !== undefined &&
-            (obj.validFor = message.validFor ? exports.TimeRange.toJSON(message.validFor) : undefined);
-        return obj;
-    },
-};
-function createBasePublicKeyIdentifier() {
-    return { hint: "" };
-}
-exports.PublicKeyIdentifier = {
-    fromJSON(object) {
-        return { hint: isSet(object.hint) ? String(object.hint) : "" };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.hint !== undefined && (obj.hint = message.hint);
-        return obj;
-    },
-};
-function createBaseObjectIdentifier() {
-    return { id: [] };
-}
-exports.ObjectIdentifier = {
-    fromJSON(object) {
-        return { id: Array.isArray(object?.id) ? object.id.map((e) => Number(e)) : [] };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.id) {
-            obj.id = message.id.map((e) => Math.round(e));
-        }
-        else {
-            obj.id = [];
-        }
-        return obj;
-    },
-};
-function createBaseObjectIdentifierValuePair() {
-    return { oid: undefined, value: Buffer.alloc(0) };
-}
-exports.ObjectIdentifierValuePair = {
-    fromJSON(object) {
-        return {
-            oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
-            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.oid !== undefined && (obj.oid = message.oid ? exports.ObjectIdentifier.toJSON(message.oid) : undefined);
-        message.value !== undefined &&
-            (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseDistinguishedName() {
-    return { organization: "", commonName: "" };
-}
-exports.DistinguishedName = {
-    fromJSON(object) {
-        return {
-            organization: isSet(object.organization) ? String(object.organization) : "",
-            commonName: isSet(object.commonName) ? String(object.commonName) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.organization !== undefined && (obj.organization = message.organization);
-        message.commonName !== undefined && (obj.commonName = message.commonName);
-        return obj;
-    },
-};
-function createBaseX509Certificate() {
-    return { rawBytes: Buffer.alloc(0) };
-}
-exports.X509Certificate = {
-    fromJSON(object) {
-        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.rawBytes !== undefined &&
-            (obj.rawBytes = base64FromBytes(message.rawBytes !== undefined ? message.rawBytes : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseSubjectAlternativeName() {
-    return { type: 0, identity: undefined };
-}
-exports.SubjectAlternativeName = {
-    fromJSON(object) {
-        return {
-            type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
-            identity: isSet(object.regexp)
-                ? { $case: "regexp", regexp: String(object.regexp) }
-                : isSet(object.value)
-                    ? { $case: "value", value: String(object.value) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.type !== undefined && (obj.type = subjectAlternativeNameTypeToJSON(message.type));
-        message.identity?.$case === "regexp" && (obj.regexp = message.identity?.regexp);
-        message.identity?.$case === "value" && (obj.value = message.identity?.value);
-        return obj;
-    },
-};
-function createBaseX509CertificateChain() {
-    return { certificates: [] };
-}
-exports.X509CertificateChain = {
-    fromJSON(object) {
-        return {
-            certificates: Array.isArray(object?.certificates)
-                ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.certificates) {
-            obj.certificates = message.certificates.map((e) => e ? exports.X509Certificate.toJSON(e) : undefined);
-        }
-        else {
-            obj.certificates = [];
-        }
-        return obj;
-    },
-};
-function createBaseTimeRange() {
-    return { start: undefined, end: undefined };
-}
-exports.TimeRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
-            end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.start !== undefined && (obj.start = message.start.toISOString());
-        message.end !== undefined && (obj.end = message.end.toISOString());
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function fromTimestamp(t) {
-    let millis = Number(t.seconds) * 1000;
-    millis += t.nanos / 1000000;
-    return new Date(millis);
-}
-function fromJsonTimestamp(o) {
-    if (o instanceof Date) {
-        return o;
-    }
-    else if (typeof o === "string") {
-        return new Date(o);
-    }
-    else {
-        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
deleted file mode 100644
index 398193b2075a7..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
+++ /dev/null
@@ -1,167 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("./sigstore_common");
-function createBaseKindVersion() {
-    return { kind: "", version: "" };
-}
-exports.KindVersion = {
-    fromJSON(object) {
-        return {
-            kind: isSet(object.kind) ? String(object.kind) : "",
-            version: isSet(object.version) ? String(object.version) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.kind !== undefined && (obj.kind = message.kind);
-        message.version !== undefined && (obj.version = message.version);
-        return obj;
-    },
-};
-function createBaseCheckpoint() {
-    return { envelope: "" };
-}
-exports.Checkpoint = {
-    fromJSON(object) {
-        return { envelope: isSet(object.envelope) ? String(object.envelope) : "" };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.envelope !== undefined && (obj.envelope = message.envelope);
-        return obj;
-    },
-};
-function createBaseInclusionProof() {
-    return { logIndex: "0", rootHash: Buffer.alloc(0), treeSize: "0", hashes: [], checkpoint: undefined };
-}
-exports.InclusionProof = {
-    fromJSON(object) {
-        return {
-            logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
-            rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
-            treeSize: isSet(object.treeSize) ? String(object.treeSize) : "0",
-            hashes: Array.isArray(object?.hashes) ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e))) : [],
-            checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.logIndex !== undefined && (obj.logIndex = message.logIndex);
-        message.rootHash !== undefined &&
-            (obj.rootHash = base64FromBytes(message.rootHash !== undefined ? message.rootHash : Buffer.alloc(0)));
-        message.treeSize !== undefined && (obj.treeSize = message.treeSize);
-        if (message.hashes) {
-            obj.hashes = message.hashes.map((e) => base64FromBytes(e !== undefined ? e : Buffer.alloc(0)));
-        }
-        else {
-            obj.hashes = [];
-        }
-        message.checkpoint !== undefined &&
-            (obj.checkpoint = message.checkpoint ? exports.Checkpoint.toJSON(message.checkpoint) : undefined);
-        return obj;
-    },
-};
-function createBaseInclusionPromise() {
-    return { signedEntryTimestamp: Buffer.alloc(0) };
-}
-exports.InclusionPromise = {
-    fromJSON(object) {
-        return {
-            signedEntryTimestamp: isSet(object.signedEntryTimestamp)
-                ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.signedEntryTimestamp !== undefined &&
-            (obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp !== undefined ? message.signedEntryTimestamp : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseTransparencyLogEntry() {
-    return {
-        logIndex: "0",
-        logId: undefined,
-        kindVersion: undefined,
-        integratedTime: "0",
-        inclusionPromise: undefined,
-        inclusionProof: undefined,
-        canonicalizedBody: Buffer.alloc(0),
-    };
-}
-exports.TransparencyLogEntry = {
-    fromJSON(object) {
-        return {
-            logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
-            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
-            kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
-            integratedTime: isSet(object.integratedTime) ? String(object.integratedTime) : "0",
-            inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
-            inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
-            canonicalizedBody: isSet(object.canonicalizedBody)
-                ? Buffer.from(bytesFromBase64(object.canonicalizedBody))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.logIndex !== undefined && (obj.logIndex = message.logIndex);
-        message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
-        message.kindVersion !== undefined &&
-            (obj.kindVersion = message.kindVersion ? exports.KindVersion.toJSON(message.kindVersion) : undefined);
-        message.integratedTime !== undefined && (obj.integratedTime = message.integratedTime);
-        message.inclusionPromise !== undefined &&
-            (obj.inclusionPromise = message.inclusionPromise ? exports.InclusionPromise.toJSON(message.inclusionPromise) : undefined);
-        message.inclusionProof !== undefined &&
-            (obj.inclusionProof = message.inclusionProof ? exports.InclusionProof.toJSON(message.inclusionProof) : undefined);
-        message.canonicalizedBody !== undefined &&
-            (obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody !== undefined ? message.canonicalizedBody : Buffer.alloc(0)));
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
deleted file mode 100644
index 05e566767cdb2..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
+++ /dev/null
@@ -1,103 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("./sigstore_common");
-function createBaseTransparencyLogInstance() {
-    return { baseUrl: "", hashAlgorithm: 0, publicKey: undefined, logId: undefined };
-}
-exports.TransparencyLogInstance = {
-    fromJSON(object) {
-        return {
-            baseUrl: isSet(object.baseUrl) ? String(object.baseUrl) : "",
-            hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
-            publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
-            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.baseUrl !== undefined && (obj.baseUrl = message.baseUrl);
-        message.hashAlgorithm !== undefined && (obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm));
-        message.publicKey !== undefined &&
-            (obj.publicKey = message.publicKey ? sigstore_common_1.PublicKey.toJSON(message.publicKey) : undefined);
-        message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
-        return obj;
-    },
-};
-function createBaseCertificateAuthority() {
-    return { subject: undefined, uri: "", certChain: undefined, validFor: undefined };
-}
-exports.CertificateAuthority = {
-    fromJSON(object) {
-        return {
-            subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
-            uri: isSet(object.uri) ? String(object.uri) : "",
-            certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
-            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.subject !== undefined &&
-            (obj.subject = message.subject ? sigstore_common_1.DistinguishedName.toJSON(message.subject) : undefined);
-        message.uri !== undefined && (obj.uri = message.uri);
-        message.certChain !== undefined &&
-            (obj.certChain = message.certChain ? sigstore_common_1.X509CertificateChain.toJSON(message.certChain) : undefined);
-        message.validFor !== undefined &&
-            (obj.validFor = message.validFor ? sigstore_common_1.TimeRange.toJSON(message.validFor) : undefined);
-        return obj;
-    },
-};
-function createBaseTrustedRoot() {
-    return { mediaType: "", tlogs: [], certificateAuthorities: [], ctlogs: [], timestampAuthorities: [] };
-}
-exports.TrustedRoot = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
-            tlogs: Array.isArray(object?.tlogs) ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) : [],
-            certificateAuthorities: Array.isArray(object?.certificateAuthorities)
-                ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
-                : [],
-            ctlogs: Array.isArray(object?.ctlogs)
-                ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
-                : [],
-            timestampAuthorities: Array.isArray(object?.timestampAuthorities)
-                ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.mediaType !== undefined && (obj.mediaType = message.mediaType);
-        if (message.tlogs) {
-            obj.tlogs = message.tlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
-        }
-        else {
-            obj.tlogs = [];
-        }
-        if (message.certificateAuthorities) {
-            obj.certificateAuthorities = message.certificateAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
-        }
-        else {
-            obj.certificateAuthorities = [];
-        }
-        if (message.ctlogs) {
-            obj.ctlogs = message.ctlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
-        }
-        else {
-            obj.ctlogs = [];
-        }
-        if (message.timestampAuthorities) {
-            obj.timestampAuthorities = message.timestampAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
-        }
-        else {
-            obj.timestampAuthorities = [];
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
deleted file mode 100644
index 8a72b89761869..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
+++ /dev/null
@@ -1,273 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
-/* eslint-disable */
-const sigstore_bundle_1 = require("./sigstore_bundle");
-const sigstore_common_1 = require("./sigstore_common");
-const sigstore_trustroot_1 = require("./sigstore_trustroot");
-function createBaseCertificateIdentity() {
-    return { issuer: "", san: undefined, oids: [] };
-}
-exports.CertificateIdentity = {
-    fromJSON(object) {
-        return {
-            issuer: isSet(object.issuer) ? String(object.issuer) : "",
-            san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
-            oids: Array.isArray(object?.oids) ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.issuer !== undefined && (obj.issuer = message.issuer);
-        message.san !== undefined && (obj.san = message.san ? sigstore_common_1.SubjectAlternativeName.toJSON(message.san) : undefined);
-        if (message.oids) {
-            obj.oids = message.oids.map((e) => e ? sigstore_common_1.ObjectIdentifierValuePair.toJSON(e) : undefined);
-        }
-        else {
-            obj.oids = [];
-        }
-        return obj;
-    },
-};
-function createBaseCertificateIdentities() {
-    return { identities: [] };
-}
-exports.CertificateIdentities = {
-    fromJSON(object) {
-        return {
-            identities: Array.isArray(object?.identities)
-                ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.identities) {
-            obj.identities = message.identities.map((e) => e ? exports.CertificateIdentity.toJSON(e) : undefined);
-        }
-        else {
-            obj.identities = [];
-        }
-        return obj;
-    },
-};
-function createBasePublicKeyIdentities() {
-    return { publicKeys: [] };
-}
-exports.PublicKeyIdentities = {
-    fromJSON(object) {
-        return {
-            publicKeys: Array.isArray(object?.publicKeys) ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.publicKeys) {
-            obj.publicKeys = message.publicKeys.map((e) => e ? sigstore_common_1.PublicKey.toJSON(e) : undefined);
-        }
-        else {
-            obj.publicKeys = [];
-        }
-        return obj;
-    },
-};
-function createBaseArtifactVerificationOptions() {
-    return { signers: undefined, tlogOptions: undefined, ctlogOptions: undefined, tsaOptions: undefined };
-}
-exports.ArtifactVerificationOptions = {
-    fromJSON(object) {
-        return {
-            signers: isSet(object.certificateIdentities)
-                ? {
-                    $case: "certificateIdentities",
-                    certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
-                }
-                : isSet(object.publicKeys)
-                    ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
-                    : undefined,
-            tlogOptions: isSet(object.tlogOptions)
-                ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
-                : undefined,
-            ctlogOptions: isSet(object.ctlogOptions)
-                ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
-                : undefined,
-            tsaOptions: isSet(object.tsaOptions)
-                ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
-                : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.signers?.$case === "certificateIdentities" &&
-            (obj.certificateIdentities = message.signers?.certificateIdentities
-                ? exports.CertificateIdentities.toJSON(message.signers?.certificateIdentities)
-                : undefined);
-        message.signers?.$case === "publicKeys" && (obj.publicKeys = message.signers?.publicKeys
-            ? exports.PublicKeyIdentities.toJSON(message.signers?.publicKeys)
-            : undefined);
-        message.tlogOptions !== undefined && (obj.tlogOptions = message.tlogOptions
-            ? exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions)
-            : undefined);
-        message.ctlogOptions !== undefined && (obj.ctlogOptions = message.ctlogOptions
-            ? exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions)
-            : undefined);
-        message.tsaOptions !== undefined && (obj.tsaOptions = message.tsaOptions
-            ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions)
-            : undefined);
-        return obj;
-    },
-};
-function createBaseArtifactVerificationOptions_TlogOptions() {
-    return { threshold: 0, performOnlineVerification: false, disable: false };
-}
-exports.ArtifactVerificationOptions_TlogOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
-            performOnlineVerification: isSet(object.performOnlineVerification)
-                ? Boolean(object.performOnlineVerification)
-                : false,
-            disable: isSet(object.disable) ? Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
-        message.performOnlineVerification !== undefined &&
-            (obj.performOnlineVerification = message.performOnlineVerification);
-        message.disable !== undefined && (obj.disable = message.disable);
-        return obj;
-    },
-};
-function createBaseArtifactVerificationOptions_CtlogOptions() {
-    return { threshold: 0, detachedSct: false, disable: false };
-}
-exports.ArtifactVerificationOptions_CtlogOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
-            detachedSct: isSet(object.detachedSct) ? Boolean(object.detachedSct) : false,
-            disable: isSet(object.disable) ? Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
-        message.detachedSct !== undefined && (obj.detachedSct = message.detachedSct);
-        message.disable !== undefined && (obj.disable = message.disable);
-        return obj;
-    },
-};
-function createBaseArtifactVerificationOptions_TimestampAuthorityOptions() {
-    return { threshold: 0, disable: false };
-}
-exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
-        message.disable !== undefined && (obj.disable = message.disable);
-        return obj;
-    },
-};
-function createBaseArtifact() {
-    return { data: undefined };
-}
-exports.Artifact = {
-    fromJSON(object) {
-        return {
-            data: isSet(object.artifactUri)
-                ? { $case: "artifactUri", artifactUri: String(object.artifactUri) }
-                : isSet(object.artifact)
-                    ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.data?.$case === "artifactUri" && (obj.artifactUri = message.data?.artifactUri);
-        message.data?.$case === "artifact" &&
-            (obj.artifact = message.data?.artifact !== undefined ? base64FromBytes(message.data?.artifact) : undefined);
-        return obj;
-    },
-};
-function createBaseInput() {
-    return {
-        artifactTrustRoot: undefined,
-        artifactVerificationOptions: undefined,
-        bundle: undefined,
-        artifact: undefined,
-    };
-}
-exports.Input = {
-    fromJSON(object) {
-        return {
-            artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
-            artifactVerificationOptions: isSet(object.artifactVerificationOptions)
-                ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
-                : undefined,
-            bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
-            artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.artifactTrustRoot !== undefined &&
-            (obj.artifactTrustRoot = message.artifactTrustRoot ? sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot) : undefined);
-        message.artifactVerificationOptions !== undefined &&
-            (obj.artifactVerificationOptions = message.artifactVerificationOptions
-                ? exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions)
-                : undefined);
-        message.bundle !== undefined && (obj.bundle = message.bundle ? sigstore_bundle_1.Bundle.toJSON(message.bundle) : undefined);
-        message.artifact !== undefined && (obj.artifact = message.artifact ? exports.Artifact.toJSON(message.artifact) : undefined);
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/index.js
deleted file mode 100644
index eafb768c48fca..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/index.js
+++ /dev/null
@@ -1,37 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __exportStar = (this && this.__exportStar) || function(m, exports) {
-    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-__exportStar(require("./__generated__/envelope"), exports);
-__exportStar(require("./__generated__/sigstore_bundle"), exports);
-__exportStar(require("./__generated__/sigstore_common"), exports);
-__exportStar(require("./__generated__/sigstore_rekor"), exports);
-__exportStar(require("./__generated__/sigstore_trustroot"), exports);
-__exportStar(require("./__generated__/sigstore_verification"), exports);
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/package.json
deleted file mode 100644
index 450abb157f31a..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/package.json
+++ /dev/null
@@ -1,31 +0,0 @@
-{
-  "name": "@sigstore/protobuf-specs",
-  "version": "0.2.1",
-  "description": "code-signing for npm packages",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
-  "scripts": {
-    "build": "tsc"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/sigstore/protobuf-specs.git"
-  },
-  "files": [
-    "dist"
-  ],
-  "author": "bdehamer@github.com",
-  "license": "Apache-2.0",
-  "bugs": {
-    "url": "https://github.com/sigstore/protobuf-specs/issues"
-  },
-  "homepage": "https://github.com/sigstore/protobuf-specs#readme",
-  "devDependencies": {
-    "@tsconfig/node14": "^1.0.3",
-    "@types/node": "^18.14.0",
-    "typescript": "^4.9.5"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  }
-}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE
deleted file mode 100644
index e9e7c1679a09d..0000000000000
--- a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright 2023 The Sigstore Authors
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
deleted file mode 100644
index 0c367a8384454..0000000000000
--- a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
+++ /dev/null
@@ -1,89 +0,0 @@
-"use strict";
-/* eslint-disable */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Signature = exports.Envelope = void 0;
-function createBaseEnvelope() {
-    return { payload: Buffer.alloc(0), payloadType: "", signatures: [] };
-}
-exports.Envelope = {
-    fromJSON(object) {
-        return {
-            payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
-            payloadType: isSet(object.payloadType) ? String(object.payloadType) : "",
-            signatures: Array.isArray(object?.signatures) ? object.signatures.map((e) => exports.Signature.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.payload !== undefined &&
-            (obj.payload = base64FromBytes(message.payload !== undefined ? message.payload : Buffer.alloc(0)));
-        message.payloadType !== undefined && (obj.payloadType = message.payloadType);
-        if (message.signatures) {
-            obj.signatures = message.signatures.map((e) => e ? exports.Signature.toJSON(e) : undefined);
-        }
-        else {
-            obj.signatures = [];
-        }
-        return obj;
-    },
-};
-function createBaseSignature() {
-    return { sig: Buffer.alloc(0), keyid: "" };
-}
-exports.Signature = {
-    fromJSON(object) {
-        return {
-            sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
-            keyid: isSet(object.keyid) ? String(object.keyid) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.sig !== undefined && (obj.sig = base64FromBytes(message.sig !== undefined ? message.sig : Buffer.alloc(0)));
-        message.keyid !== undefined && (obj.keyid = message.keyid);
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
deleted file mode 100644
index 073093b8371a8..0000000000000
--- a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
+++ /dev/null
@@ -1,185 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
-/* eslint-disable */
-const any_1 = require("./google/protobuf/any");
-const timestamp_1 = require("./google/protobuf/timestamp");
-function createBaseCloudEvent() {
-    return { id: "", source: "", specVersion: "", type: "", attributes: {}, data: undefined };
-}
-exports.CloudEvent = {
-    fromJSON(object) {
-        return {
-            id: isSet(object.id) ? String(object.id) : "",
-            source: isSet(object.source) ? String(object.source) : "",
-            specVersion: isSet(object.specVersion) ? String(object.specVersion) : "",
-            type: isSet(object.type) ? String(object.type) : "",
-            attributes: isObject(object.attributes)
-                ? Object.entries(object.attributes).reduce((acc, [key, value]) => {
-                    acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
-                    return acc;
-                }, {})
-                : {},
-            data: isSet(object.binaryData)
-                ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
-                : isSet(object.textData)
-                    ? { $case: "textData", textData: String(object.textData) }
-                    : isSet(object.protoData)
-                        ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
-                        : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.id !== undefined && (obj.id = message.id);
-        message.source !== undefined && (obj.source = message.source);
-        message.specVersion !== undefined && (obj.specVersion = message.specVersion);
-        message.type !== undefined && (obj.type = message.type);
-        obj.attributes = {};
-        if (message.attributes) {
-            Object.entries(message.attributes).forEach(([k, v]) => {
-                obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
-            });
-        }
-        message.data?.$case === "binaryData" &&
-            (obj.binaryData = message.data?.binaryData !== undefined ? base64FromBytes(message.data?.binaryData) : undefined);
-        message.data?.$case === "textData" && (obj.textData = message.data?.textData);
-        message.data?.$case === "protoData" &&
-            (obj.protoData = message.data?.protoData ? any_1.Any.toJSON(message.data?.protoData) : undefined);
-        return obj;
-    },
-};
-function createBaseCloudEvent_AttributesEntry() {
-    return { key: "", value: undefined };
-}
-exports.CloudEvent_AttributesEntry = {
-    fromJSON(object) {
-        return {
-            key: isSet(object.key) ? String(object.key) : "",
-            value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.key !== undefined && (obj.key = message.key);
-        message.value !== undefined &&
-            (obj.value = message.value ? exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value) : undefined);
-        return obj;
-    },
-};
-function createBaseCloudEvent_CloudEventAttributeValue() {
-    return { attr: undefined };
-}
-exports.CloudEvent_CloudEventAttributeValue = {
-    fromJSON(object) {
-        return {
-            attr: isSet(object.ceBoolean)
-                ? { $case: "ceBoolean", ceBoolean: Boolean(object.ceBoolean) }
-                : isSet(object.ceInteger)
-                    ? { $case: "ceInteger", ceInteger: Number(object.ceInteger) }
-                    : isSet(object.ceString)
-                        ? { $case: "ceString", ceString: String(object.ceString) }
-                        : isSet(object.ceBytes)
-                            ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
-                            : isSet(object.ceUri)
-                                ? { $case: "ceUri", ceUri: String(object.ceUri) }
-                                : isSet(object.ceUriRef)
-                                    ? { $case: "ceUriRef", ceUriRef: String(object.ceUriRef) }
-                                    : isSet(object.ceTimestamp)
-                                        ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
-                                        : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.attr?.$case === "ceBoolean" && (obj.ceBoolean = message.attr?.ceBoolean);
-        message.attr?.$case === "ceInteger" && (obj.ceInteger = Math.round(message.attr?.ceInteger));
-        message.attr?.$case === "ceString" && (obj.ceString = message.attr?.ceString);
-        message.attr?.$case === "ceBytes" &&
-            (obj.ceBytes = message.attr?.ceBytes !== undefined ? base64FromBytes(message.attr?.ceBytes) : undefined);
-        message.attr?.$case === "ceUri" && (obj.ceUri = message.attr?.ceUri);
-        message.attr?.$case === "ceUriRef" && (obj.ceUriRef = message.attr?.ceUriRef);
-        message.attr?.$case === "ceTimestamp" && (obj.ceTimestamp = message.attr?.ceTimestamp.toISOString());
-        return obj;
-    },
-};
-function createBaseCloudEventBatch() {
-    return { events: [] };
-}
-exports.CloudEventBatch = {
-    fromJSON(object) {
-        return { events: Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [] };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.events) {
-            obj.events = message.events.map((e) => e ? exports.CloudEvent.toJSON(e) : undefined);
-        }
-        else {
-            obj.events = [];
-        }
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function fromTimestamp(t) {
-    let millis = Number(t.seconds) * 1000;
-    millis += t.nanos / 1000000;
-    return new Date(millis);
-}
-function fromJsonTimestamp(o) {
-    if (o instanceof Date) {
-        return o;
-    }
-    else if (typeof o === "string") {
-        return new Date(o);
-    }
-    else {
-        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
-    }
-}
-function isObject(value) {
-    return typeof value === "object" && value !== null;
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
deleted file mode 100644
index da627499ad765..0000000000000
--- a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
+++ /dev/null
@@ -1,119 +0,0 @@
-"use strict";
-/* eslint-disable */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.fieldBehaviorToJSON = exports.fieldBehaviorFromJSON = exports.FieldBehavior = void 0;
-/**
- * An indicator of the behavior of a given field (for example, that a field
- * is required in requests, or given as output but ignored as input).
- * This **does not** change the behavior in protocol buffers itself; it only
- * denotes the behavior and may affect how API tooling handles the field.
- *
- * Note: This enum **may** receive new values in the future.
- */
-var FieldBehavior;
-(function (FieldBehavior) {
-    /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
-    FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
-    /**
-     * OPTIONAL - Specifically denotes a field as optional.
-     * While all fields in protocol buffers are optional, this may be specified
-     * for emphasis if appropriate.
-     */
-    FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
-    /**
-     * REQUIRED - Denotes a field as required.
-     * This indicates that the field **must** be provided as part of the request,
-     * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
-     */
-    FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
-    /**
-     * OUTPUT_ONLY - Denotes a field as output only.
-     * This indicates that the field is provided in responses, but including the
-     * field in a request does nothing (the server *must* ignore it and
-     * *must not* throw an error as a result of the field's presence).
-     */
-    FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
-    /**
-     * INPUT_ONLY - Denotes a field as input only.
-     * This indicates that the field is provided in requests, and the
-     * corresponding field is not included in output.
-     */
-    FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
-    /**
-     * IMMUTABLE - Denotes a field as immutable.
-     * This indicates that the field may be set once in a request to create a
-     * resource, but may not be changed thereafter.
-     */
-    FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
-    /**
-     * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
-     * This indicates that the service may provide the elements of the list
-     * in any arbitrary order, rather than the order the user originally
-     * provided. Additionally, the list's order may or may not be stable.
-     */
-    FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
-})(FieldBehavior = exports.FieldBehavior || (exports.FieldBehavior = {}));
-function fieldBehaviorFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "FIELD_BEHAVIOR_UNSPECIFIED":
-            return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
-        case 1:
-        case "OPTIONAL":
-            return FieldBehavior.OPTIONAL;
-        case 2:
-        case "REQUIRED":
-            return FieldBehavior.REQUIRED;
-        case 3:
-        case "OUTPUT_ONLY":
-            return FieldBehavior.OUTPUT_ONLY;
-        case 4:
-        case "INPUT_ONLY":
-            return FieldBehavior.INPUT_ONLY;
-        case 5:
-        case "IMMUTABLE":
-            return FieldBehavior.IMMUTABLE;
-        case 6:
-        case "UNORDERED_LIST":
-            return FieldBehavior.UNORDERED_LIST;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
-    }
-}
-exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
-function fieldBehaviorToJSON(object) {
-    switch (object) {
-        case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
-            return "FIELD_BEHAVIOR_UNSPECIFIED";
-        case FieldBehavior.OPTIONAL:
-            return "OPTIONAL";
-        case FieldBehavior.REQUIRED:
-            return "REQUIRED";
-        case FieldBehavior.OUTPUT_ONLY:
-            return "OUTPUT_ONLY";
-        case FieldBehavior.INPUT_ONLY:
-            return "INPUT_ONLY";
-        case FieldBehavior.IMMUTABLE:
-            return "IMMUTABLE";
-        case FieldBehavior.UNORDERED_LIST:
-            return "UNORDERED_LIST";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
-    }
-}
-exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
deleted file mode 100644
index 6b3f3c97a6647..0000000000000
--- a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
+++ /dev/null
@@ -1,65 +0,0 @@
-"use strict";
-/* eslint-disable */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Any = void 0;
-function createBaseAny() {
-    return { typeUrl: "", value: Buffer.alloc(0) };
-}
-exports.Any = {
-    fromJSON(object) {
-        return {
-            typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "",
-            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl);
-        message.value !== undefined &&
-            (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
deleted file mode 100644
index d429aac846043..0000000000000
--- a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
+++ /dev/null
@@ -1,1308 +0,0 @@
-"use strict";
-/* eslint-disable */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.GeneratedCodeInfo_Annotation = exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.methodOptions_IdempotencyLevelToJSON = exports.methodOptions_IdempotencyLevelFromJSON = exports.MethodOptions_IdempotencyLevel = exports.fieldOptions_JSTypeToJSON = exports.fieldOptions_JSTypeFromJSON = exports.FieldOptions_JSType = exports.fieldOptions_CTypeToJSON = exports.fieldOptions_CTypeFromJSON = exports.FieldOptions_CType = exports.fileOptions_OptimizeModeToJSON = exports.fileOptions_OptimizeModeFromJSON = exports.FileOptions_OptimizeMode = exports.fieldDescriptorProto_LabelToJSON = exports.fieldDescriptorProto_LabelFromJSON = exports.FieldDescriptorProto_Label = exports.fieldDescriptorProto_TypeToJSON = exports.fieldDescriptorProto_TypeFromJSON = exports.FieldDescriptorProto_Type = void 0;
-var FieldDescriptorProto_Type;
-(function (FieldDescriptorProto_Type) {
-    /**
-     * TYPE_DOUBLE - 0 is reserved for errors.
-     * Order is weird for historical reasons.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT";
-    /**
-     * TYPE_INT64 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
-     * negative values are likely.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64";
-    /**
-     * TYPE_INT32 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
-     * negative values are likely.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
-    /**
-     * TYPE_GROUP - Tag-delimited aggregate.
-     * Group type is deprecated and not supported in proto3. However, Proto3
-     * implementations should still be able to parse the group wire format and
-     * treat group fields as unknown fields.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
-    /** TYPE_MESSAGE - Length-delimited aggregate. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE";
-    /** TYPE_BYTES - New in version 2. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64";
-    /** TYPE_SINT32 - Uses ZigZag encoding. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
-    /** TYPE_SINT64 - Uses ZigZag encoding. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
-})(FieldDescriptorProto_Type = exports.FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = {}));
-function fieldDescriptorProto_TypeFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "TYPE_DOUBLE":
-            return FieldDescriptorProto_Type.TYPE_DOUBLE;
-        case 2:
-        case "TYPE_FLOAT":
-            return FieldDescriptorProto_Type.TYPE_FLOAT;
-        case 3:
-        case "TYPE_INT64":
-            return FieldDescriptorProto_Type.TYPE_INT64;
-        case 4:
-        case "TYPE_UINT64":
-            return FieldDescriptorProto_Type.TYPE_UINT64;
-        case 5:
-        case "TYPE_INT32":
-            return FieldDescriptorProto_Type.TYPE_INT32;
-        case 6:
-        case "TYPE_FIXED64":
-            return FieldDescriptorProto_Type.TYPE_FIXED64;
-        case 7:
-        case "TYPE_FIXED32":
-            return FieldDescriptorProto_Type.TYPE_FIXED32;
-        case 8:
-        case "TYPE_BOOL":
-            return FieldDescriptorProto_Type.TYPE_BOOL;
-        case 9:
-        case "TYPE_STRING":
-            return FieldDescriptorProto_Type.TYPE_STRING;
-        case 10:
-        case "TYPE_GROUP":
-            return FieldDescriptorProto_Type.TYPE_GROUP;
-        case 11:
-        case "TYPE_MESSAGE":
-            return FieldDescriptorProto_Type.TYPE_MESSAGE;
-        case 12:
-        case "TYPE_BYTES":
-            return FieldDescriptorProto_Type.TYPE_BYTES;
-        case 13:
-        case "TYPE_UINT32":
-            return FieldDescriptorProto_Type.TYPE_UINT32;
-        case 14:
-        case "TYPE_ENUM":
-            return FieldDescriptorProto_Type.TYPE_ENUM;
-        case 15:
-        case "TYPE_SFIXED32":
-            return FieldDescriptorProto_Type.TYPE_SFIXED32;
-        case 16:
-        case "TYPE_SFIXED64":
-            return FieldDescriptorProto_Type.TYPE_SFIXED64;
-        case 17:
-        case "TYPE_SINT32":
-            return FieldDescriptorProto_Type.TYPE_SINT32;
-        case 18:
-        case "TYPE_SINT64":
-            return FieldDescriptorProto_Type.TYPE_SINT64;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
-    }
-}
-exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
-function fieldDescriptorProto_TypeToJSON(object) {
-    switch (object) {
-        case FieldDescriptorProto_Type.TYPE_DOUBLE:
-            return "TYPE_DOUBLE";
-        case FieldDescriptorProto_Type.TYPE_FLOAT:
-            return "TYPE_FLOAT";
-        case FieldDescriptorProto_Type.TYPE_INT64:
-            return "TYPE_INT64";
-        case FieldDescriptorProto_Type.TYPE_UINT64:
-            return "TYPE_UINT64";
-        case FieldDescriptorProto_Type.TYPE_INT32:
-            return "TYPE_INT32";
-        case FieldDescriptorProto_Type.TYPE_FIXED64:
-            return "TYPE_FIXED64";
-        case FieldDescriptorProto_Type.TYPE_FIXED32:
-            return "TYPE_FIXED32";
-        case FieldDescriptorProto_Type.TYPE_BOOL:
-            return "TYPE_BOOL";
-        case FieldDescriptorProto_Type.TYPE_STRING:
-            return "TYPE_STRING";
-        case FieldDescriptorProto_Type.TYPE_GROUP:
-            return "TYPE_GROUP";
-        case FieldDescriptorProto_Type.TYPE_MESSAGE:
-            return "TYPE_MESSAGE";
-        case FieldDescriptorProto_Type.TYPE_BYTES:
-            return "TYPE_BYTES";
-        case FieldDescriptorProto_Type.TYPE_UINT32:
-            return "TYPE_UINT32";
-        case FieldDescriptorProto_Type.TYPE_ENUM:
-            return "TYPE_ENUM";
-        case FieldDescriptorProto_Type.TYPE_SFIXED32:
-            return "TYPE_SFIXED32";
-        case FieldDescriptorProto_Type.TYPE_SFIXED64:
-            return "TYPE_SFIXED64";
-        case FieldDescriptorProto_Type.TYPE_SINT32:
-            return "TYPE_SINT32";
-        case FieldDescriptorProto_Type.TYPE_SINT64:
-            return "TYPE_SINT64";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
-    }
-}
-exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
-var FieldDescriptorProto_Label;
-(function (FieldDescriptorProto_Label) {
-    /** LABEL_OPTIONAL - 0 is reserved for errors */
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
-})(FieldDescriptorProto_Label = exports.FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = {}));
-function fieldDescriptorProto_LabelFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "LABEL_OPTIONAL":
-            return FieldDescriptorProto_Label.LABEL_OPTIONAL;
-        case 2:
-        case "LABEL_REQUIRED":
-            return FieldDescriptorProto_Label.LABEL_REQUIRED;
-        case 3:
-        case "LABEL_REPEATED":
-            return FieldDescriptorProto_Label.LABEL_REPEATED;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
-    }
-}
-exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
-function fieldDescriptorProto_LabelToJSON(object) {
-    switch (object) {
-        case FieldDescriptorProto_Label.LABEL_OPTIONAL:
-            return "LABEL_OPTIONAL";
-        case FieldDescriptorProto_Label.LABEL_REQUIRED:
-            return "LABEL_REQUIRED";
-        case FieldDescriptorProto_Label.LABEL_REPEATED:
-            return "LABEL_REPEATED";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
-    }
-}
-exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
-/** Generated classes can be optimized for speed or code size. */
-var FileOptions_OptimizeMode;
-(function (FileOptions_OptimizeMode) {
-    /** SPEED - Generate complete code for parsing, serialization, */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED";
-    /** CODE_SIZE - etc. */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
-    /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
-})(FileOptions_OptimizeMode = exports.FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = {}));
-function fileOptions_OptimizeModeFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "SPEED":
-            return FileOptions_OptimizeMode.SPEED;
-        case 2:
-        case "CODE_SIZE":
-            return FileOptions_OptimizeMode.CODE_SIZE;
-        case 3:
-        case "LITE_RUNTIME":
-            return FileOptions_OptimizeMode.LITE_RUNTIME;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
-    }
-}
-exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
-function fileOptions_OptimizeModeToJSON(object) {
-    switch (object) {
-        case FileOptions_OptimizeMode.SPEED:
-            return "SPEED";
-        case FileOptions_OptimizeMode.CODE_SIZE:
-            return "CODE_SIZE";
-        case FileOptions_OptimizeMode.LITE_RUNTIME:
-            return "LITE_RUNTIME";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
-    }
-}
-exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
-var FieldOptions_CType;
-(function (FieldOptions_CType) {
-    /** STRING - Default mode. */
-    FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
-    FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
-    FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
-})(FieldOptions_CType = exports.FieldOptions_CType || (exports.FieldOptions_CType = {}));
-function fieldOptions_CTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "STRING":
-            return FieldOptions_CType.STRING;
-        case 1:
-        case "CORD":
-            return FieldOptions_CType.CORD;
-        case 2:
-        case "STRING_PIECE":
-            return FieldOptions_CType.STRING_PIECE;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
-    }
-}
-exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
-function fieldOptions_CTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_CType.STRING:
-            return "STRING";
-        case FieldOptions_CType.CORD:
-            return "CORD";
-        case FieldOptions_CType.STRING_PIECE:
-            return "STRING_PIECE";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
-    }
-}
-exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
-var FieldOptions_JSType;
-(function (FieldOptions_JSType) {
-    /** JS_NORMAL - Use the default type. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL";
-    /** JS_STRING - Use JavaScript strings. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
-    /** JS_NUMBER - Use JavaScript numbers. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
-})(FieldOptions_JSType = exports.FieldOptions_JSType || (exports.FieldOptions_JSType = {}));
-function fieldOptions_JSTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "JS_NORMAL":
-            return FieldOptions_JSType.JS_NORMAL;
-        case 1:
-        case "JS_STRING":
-            return FieldOptions_JSType.JS_STRING;
-        case 2:
-        case "JS_NUMBER":
-            return FieldOptions_JSType.JS_NUMBER;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
-    }
-}
-exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
-function fieldOptions_JSTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_JSType.JS_NORMAL:
-            return "JS_NORMAL";
-        case FieldOptions_JSType.JS_STRING:
-            return "JS_STRING";
-        case FieldOptions_JSType.JS_NUMBER:
-            return "JS_NUMBER";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
-    }
-}
-exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
-/**
- * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
- * or neither? HTTP based RPC implementation may choose GET verb for safe
- * methods, and PUT verb for idempotent methods instead of the default POST.
- */
-var MethodOptions_IdempotencyLevel;
-(function (MethodOptions_IdempotencyLevel) {
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN";
-    /** NO_SIDE_EFFECTS - implies idempotent */
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
-    /** IDEMPOTENT - idempotent, but may have side effects */
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
-})(MethodOptions_IdempotencyLevel = exports.MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = {}));
-function methodOptions_IdempotencyLevelFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "IDEMPOTENCY_UNKNOWN":
-            return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN;
-        case 1:
-        case "NO_SIDE_EFFECTS":
-            return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS;
-        case 2:
-        case "IDEMPOTENT":
-            return MethodOptions_IdempotencyLevel.IDEMPOTENT;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
-    }
-}
-exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
-function methodOptions_IdempotencyLevelToJSON(object) {
-    switch (object) {
-        case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
-            return "IDEMPOTENCY_UNKNOWN";
-        case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS:
-            return "NO_SIDE_EFFECTS";
-        case MethodOptions_IdempotencyLevel.IDEMPOTENT:
-            return "IDEMPOTENT";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
-    }
-}
-exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
-function createBaseFileDescriptorSet() {
-    return { file: [] };
-}
-exports.FileDescriptorSet = {
-    fromJSON(object) {
-        return { file: Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [] };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.file) {
-            obj.file = message.file.map((e) => e ? exports.FileDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.file = [];
-        }
-        return obj;
-    },
-};
-function createBaseFileDescriptorProto() {
-    return {
-        name: "",
-        package: "",
-        dependency: [],
-        publicDependency: [],
-        weakDependency: [],
-        messageType: [],
-        enumType: [],
-        service: [],
-        extension: [],
-        options: undefined,
-        sourceCodeInfo: undefined,
-        syntax: "",
-    };
-}
-exports.FileDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            package: isSet(object.package) ? String(object.package) : "",
-            dependency: Array.isArray(object?.dependency) ? object.dependency.map((e) => String(e)) : [],
-            publicDependency: Array.isArray(object?.publicDependency)
-                ? object.publicDependency.map((e) => Number(e))
-                : [],
-            weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e) => Number(e)) : [],
-            messageType: Array.isArray(object?.messageType)
-                ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
-                : [],
-            enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
-            service: Array.isArray(object?.service) ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e)) : [],
-            extension: Array.isArray(object?.extension)
-                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
-            sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
-            syntax: isSet(object.syntax) ? String(object.syntax) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.package !== undefined && (obj.package = message.package);
-        if (message.dependency) {
-            obj.dependency = message.dependency.map((e) => e);
-        }
-        else {
-            obj.dependency = [];
-        }
-        if (message.publicDependency) {
-            obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
-        }
-        else {
-            obj.publicDependency = [];
-        }
-        if (message.weakDependency) {
-            obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
-        }
-        else {
-            obj.weakDependency = [];
-        }
-        if (message.messageType) {
-            obj.messageType = message.messageType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.messageType = [];
-        }
-        if (message.enumType) {
-            obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.enumType = [];
-        }
-        if (message.service) {
-            obj.service = message.service.map((e) => e ? exports.ServiceDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.service = [];
-        }
-        if (message.extension) {
-            obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.extension = [];
-        }
-        message.options !== undefined && (obj.options = message.options ? exports.FileOptions.toJSON(message.options) : undefined);
-        message.sourceCodeInfo !== undefined &&
-            (obj.sourceCodeInfo = message.sourceCodeInfo ? exports.SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined);
-        message.syntax !== undefined && (obj.syntax = message.syntax);
-        return obj;
-    },
-};
-function createBaseDescriptorProto() {
-    return {
-        name: "",
-        field: [],
-        extension: [],
-        nestedType: [],
-        enumType: [],
-        extensionRange: [],
-        oneofDecl: [],
-        options: undefined,
-        reservedRange: [],
-        reservedName: [],
-    };
-}
-exports.DescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            field: Array.isArray(object?.field) ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e)) : [],
-            extension: Array.isArray(object?.extension)
-                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            nestedType: Array.isArray(object?.nestedType)
-                ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
-                : [],
-            enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
-            extensionRange: Array.isArray(object?.extensionRange)
-                ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
-                : [],
-            oneofDecl: Array.isArray(object?.oneofDecl)
-                ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
-            reservedRange: Array.isArray(object?.reservedRange)
-                ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
-                : [],
-            reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e) => String(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        if (message.field) {
-            obj.field = message.field.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.field = [];
-        }
-        if (message.extension) {
-            obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.extension = [];
-        }
-        if (message.nestedType) {
-            obj.nestedType = message.nestedType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.nestedType = [];
-        }
-        if (message.enumType) {
-            obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.enumType = [];
-        }
-        if (message.extensionRange) {
-            obj.extensionRange = message.extensionRange.map((e) => e ? exports.DescriptorProto_ExtensionRange.toJSON(e) : undefined);
-        }
-        else {
-            obj.extensionRange = [];
-        }
-        if (message.oneofDecl) {
-            obj.oneofDecl = message.oneofDecl.map((e) => e ? exports.OneofDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.oneofDecl = [];
-        }
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.MessageOptions.toJSON(message.options) : undefined);
-        if (message.reservedRange) {
-            obj.reservedRange = message.reservedRange.map((e) => e ? exports.DescriptorProto_ReservedRange.toJSON(e) : undefined);
-        }
-        else {
-            obj.reservedRange = [];
-        }
-        if (message.reservedName) {
-            obj.reservedName = message.reservedName.map((e) => e);
-        }
-        else {
-            obj.reservedName = [];
-        }
-        return obj;
-    },
-};
-function createBaseDescriptorProto_ExtensionRange() {
-    return { start: 0, end: 0, options: undefined };
-}
-exports.DescriptorProto_ExtensionRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? Number(object.start) : 0,
-            end: isSet(object.end) ? Number(object.end) : 0,
-            options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.start !== undefined && (obj.start = Math.round(message.start));
-        message.end !== undefined && (obj.end = Math.round(message.end));
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.ExtensionRangeOptions.toJSON(message.options) : undefined);
-        return obj;
-    },
-};
-function createBaseDescriptorProto_ReservedRange() {
-    return { start: 0, end: 0 };
-}
-exports.DescriptorProto_ReservedRange = {
-    fromJSON(object) {
-        return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.start !== undefined && (obj.start = Math.round(message.start));
-        message.end !== undefined && (obj.end = Math.round(message.end));
-        return obj;
-    },
-};
-function createBaseExtensionRangeOptions() {
-    return { uninterpretedOption: [] };
-}
-exports.ExtensionRangeOptions = {
-    fromJSON(object) {
-        return {
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseFieldDescriptorProto() {
-    return {
-        name: "",
-        number: 0,
-        label: 1,
-        type: 1,
-        typeName: "",
-        extendee: "",
-        defaultValue: "",
-        oneofIndex: 0,
-        jsonName: "",
-        options: undefined,
-        proto3Optional: false,
-    };
-}
-exports.FieldDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            number: isSet(object.number) ? Number(object.number) : 0,
-            label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
-            type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
-            typeName: isSet(object.typeName) ? String(object.typeName) : "",
-            extendee: isSet(object.extendee) ? String(object.extendee) : "",
-            defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "",
-            oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0,
-            jsonName: isSet(object.jsonName) ? String(object.jsonName) : "",
-            options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
-            proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.number !== undefined && (obj.number = Math.round(message.number));
-        message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label));
-        message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type));
-        message.typeName !== undefined && (obj.typeName = message.typeName);
-        message.extendee !== undefined && (obj.extendee = message.extendee);
-        message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue);
-        message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex));
-        message.jsonName !== undefined && (obj.jsonName = message.jsonName);
-        message.options !== undefined && (obj.options = message.options ? exports.FieldOptions.toJSON(message.options) : undefined);
-        message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional);
-        return obj;
-    },
-};
-function createBaseOneofDescriptorProto() {
-    return { name: "", options: undefined };
-}
-exports.OneofDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.options !== undefined && (obj.options = message.options ? exports.OneofOptions.toJSON(message.options) : undefined);
-        return obj;
-    },
-};
-function createBaseEnumDescriptorProto() {
-    return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] };
-}
-exports.EnumDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            value: Array.isArray(object?.value) ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e)) : [],
-            options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
-            reservedRange: Array.isArray(object?.reservedRange)
-                ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
-                : [],
-            reservedName: Array.isArray(object?.reservedName)
-                ? object.reservedName.map((e) => String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        if (message.value) {
-            obj.value = message.value.map((e) => e ? exports.EnumValueDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.value = [];
-        }
-        message.options !== undefined && (obj.options = message.options ? exports.EnumOptions.toJSON(message.options) : undefined);
-        if (message.reservedRange) {
-            obj.reservedRange = message.reservedRange.map((e) => e ? exports.EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined);
-        }
-        else {
-            obj.reservedRange = [];
-        }
-        if (message.reservedName) {
-            obj.reservedName = message.reservedName.map((e) => e);
-        }
-        else {
-            obj.reservedName = [];
-        }
-        return obj;
-    },
-};
-function createBaseEnumDescriptorProto_EnumReservedRange() {
-    return { start: 0, end: 0 };
-}
-exports.EnumDescriptorProto_EnumReservedRange = {
-    fromJSON(object) {
-        return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.start !== undefined && (obj.start = Math.round(message.start));
-        message.end !== undefined && (obj.end = Math.round(message.end));
-        return obj;
-    },
-};
-function createBaseEnumValueDescriptorProto() {
-    return { name: "", number: 0, options: undefined };
-}
-exports.EnumValueDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            number: isSet(object.number) ? Number(object.number) : 0,
-            options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.number !== undefined && (obj.number = Math.round(message.number));
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.EnumValueOptions.toJSON(message.options) : undefined);
-        return obj;
-    },
-};
-function createBaseServiceDescriptorProto() {
-    return { name: "", method: [], options: undefined };
-}
-exports.ServiceDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            method: Array.isArray(object?.method) ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e)) : [],
-            options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        if (message.method) {
-            obj.method = message.method.map((e) => e ? exports.MethodDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.method = [];
-        }
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.ServiceOptions.toJSON(message.options) : undefined);
-        return obj;
-    },
-};
-function createBaseMethodDescriptorProto() {
-    return {
-        name: "",
-        inputType: "",
-        outputType: "",
-        options: undefined,
-        clientStreaming: false,
-        serverStreaming: false,
-    };
-}
-exports.MethodDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            inputType: isSet(object.inputType) ? String(object.inputType) : "",
-            outputType: isSet(object.outputType) ? String(object.outputType) : "",
-            options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
-            clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false,
-            serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.inputType !== undefined && (obj.inputType = message.inputType);
-        message.outputType !== undefined && (obj.outputType = message.outputType);
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.MethodOptions.toJSON(message.options) : undefined);
-        message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming);
-        message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming);
-        return obj;
-    },
-};
-function createBaseFileOptions() {
-    return {
-        javaPackage: "",
-        javaOuterClassname: "",
-        javaMultipleFiles: false,
-        javaGenerateEqualsAndHash: false,
-        javaStringCheckUtf8: false,
-        optimizeFor: 1,
-        goPackage: "",
-        ccGenericServices: false,
-        javaGenericServices: false,
-        pyGenericServices: false,
-        phpGenericServices: false,
-        deprecated: false,
-        ccEnableArenas: false,
-        objcClassPrefix: "",
-        csharpNamespace: "",
-        swiftPrefix: "",
-        phpClassPrefix: "",
-        phpNamespace: "",
-        phpMetadataNamespace: "",
-        rubyPackage: "",
-        uninterpretedOption: [],
-    };
-}
-exports.FileOptions = {
-    fromJSON(object) {
-        return {
-            javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "",
-            javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "",
-            javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false,
-            javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
-                ? Boolean(object.javaGenerateEqualsAndHash)
-                : false,
-            javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false,
-            optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
-            goPackage: isSet(object.goPackage) ? String(object.goPackage) : "",
-            ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false,
-            javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false,
-            pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false,
-            phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false,
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false,
-            objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "",
-            csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "",
-            swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "",
-            phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "",
-            phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "",
-            phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "",
-            rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "",
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage);
-        message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname);
-        message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles);
-        message.javaGenerateEqualsAndHash !== undefined &&
-            (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash);
-        message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8);
-        message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor));
-        message.goPackage !== undefined && (obj.goPackage = message.goPackage);
-        message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices);
-        message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices);
-        message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices);
-        message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices);
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas);
-        message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix);
-        message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace);
-        message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix);
-        message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix);
-        message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace);
-        message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace);
-        message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseMessageOptions() {
-    return {
-        messageSetWireFormat: false,
-        noStandardDescriptorAccessor: false,
-        deprecated: false,
-        mapEntry: false,
-        uninterpretedOption: [],
-    };
-}
-exports.MessageOptions = {
-    fromJSON(object) {
-        return {
-            messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false,
-            noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
-                ? Boolean(object.noStandardDescriptorAccessor)
-                : false,
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat);
-        message.noStandardDescriptorAccessor !== undefined &&
-            (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor);
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseFieldOptions() {
-    return {
-        ctype: 0,
-        packed: false,
-        jstype: 0,
-        lazy: false,
-        unverifiedLazy: false,
-        deprecated: false,
-        weak: false,
-        uninterpretedOption: [],
-    };
-}
-exports.FieldOptions = {
-    fromJSON(object) {
-        return {
-            ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
-            packed: isSet(object.packed) ? Boolean(object.packed) : false,
-            jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
-            lazy: isSet(object.lazy) ? Boolean(object.lazy) : false,
-            unverifiedLazy: isSet(object.unverifiedLazy) ? Boolean(object.unverifiedLazy) : false,
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            weak: isSet(object.weak) ? Boolean(object.weak) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype));
-        message.packed !== undefined && (obj.packed = message.packed);
-        message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype));
-        message.lazy !== undefined && (obj.lazy = message.lazy);
-        message.unverifiedLazy !== undefined && (obj.unverifiedLazy = message.unverifiedLazy);
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        message.weak !== undefined && (obj.weak = message.weak);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseOneofOptions() {
-    return { uninterpretedOption: [] };
-}
-exports.OneofOptions = {
-    fromJSON(object) {
-        return {
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseEnumOptions() {
-    return { allowAlias: false, deprecated: false, uninterpretedOption: [] };
-}
-exports.EnumOptions = {
-    fromJSON(object) {
-        return {
-            allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false,
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias);
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseEnumValueOptions() {
-    return { deprecated: false, uninterpretedOption: [] };
-}
-exports.EnumValueOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseServiceOptions() {
-    return { deprecated: false, uninterpretedOption: [] };
-}
-exports.ServiceOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseMethodOptions() {
-    return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] };
-}
-exports.MethodOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            idempotencyLevel: isSet(object.idempotencyLevel)
-                ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
-                : 0,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        message.idempotencyLevel !== undefined &&
-            (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel));
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseUninterpretedOption() {
-    return {
-        name: [],
-        identifierValue: "",
-        positiveIntValue: "0",
-        negativeIntValue: "0",
-        doubleValue: 0,
-        stringValue: Buffer.alloc(0),
-        aggregateValue: "",
-    };
-}
-exports.UninterpretedOption = {
-    fromJSON(object) {
-        return {
-            name: Array.isArray(object?.name) ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e)) : [],
-            identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "",
-            positiveIntValue: isSet(object.positiveIntValue) ? String(object.positiveIntValue) : "0",
-            negativeIntValue: isSet(object.negativeIntValue) ? String(object.negativeIntValue) : "0",
-            doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0,
-            stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
-            aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name) {
-            obj.name = message.name.map((e) => e ? exports.UninterpretedOption_NamePart.toJSON(e) : undefined);
-        }
-        else {
-            obj.name = [];
-        }
-        message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue);
-        message.positiveIntValue !== undefined && (obj.positiveIntValue = message.positiveIntValue);
-        message.negativeIntValue !== undefined && (obj.negativeIntValue = message.negativeIntValue);
-        message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue);
-        message.stringValue !== undefined &&
-            (obj.stringValue = base64FromBytes(message.stringValue !== undefined ? message.stringValue : Buffer.alloc(0)));
-        message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue);
-        return obj;
-    },
-};
-function createBaseUninterpretedOption_NamePart() {
-    return { namePart: "", isExtension: false };
-}
-exports.UninterpretedOption_NamePart = {
-    fromJSON(object) {
-        return {
-            namePart: isSet(object.namePart) ? String(object.namePart) : "",
-            isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.namePart !== undefined && (obj.namePart = message.namePart);
-        message.isExtension !== undefined && (obj.isExtension = message.isExtension);
-        return obj;
-    },
-};
-function createBaseSourceCodeInfo() {
-    return { location: [] };
-}
-exports.SourceCodeInfo = {
-    fromJSON(object) {
-        return {
-            location: Array.isArray(object?.location)
-                ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.location) {
-            obj.location = message.location.map((e) => e ? exports.SourceCodeInfo_Location.toJSON(e) : undefined);
-        }
-        else {
-            obj.location = [];
-        }
-        return obj;
-    },
-};
-function createBaseSourceCodeInfo_Location() {
-    return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] };
-}
-exports.SourceCodeInfo_Location = {
-    fromJSON(object) {
-        return {
-            path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
-            span: Array.isArray(object?.span) ? object.span.map((e) => Number(e)) : [],
-            leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "",
-            trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "",
-            leadingDetachedComments: Array.isArray(object?.leadingDetachedComments)
-                ? object.leadingDetachedComments.map((e) => String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.path) {
-            obj.path = message.path.map((e) => Math.round(e));
-        }
-        else {
-            obj.path = [];
-        }
-        if (message.span) {
-            obj.span = message.span.map((e) => Math.round(e));
-        }
-        else {
-            obj.span = [];
-        }
-        message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments);
-        message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments);
-        if (message.leadingDetachedComments) {
-            obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e);
-        }
-        else {
-            obj.leadingDetachedComments = [];
-        }
-        return obj;
-    },
-};
-function createBaseGeneratedCodeInfo() {
-    return { annotation: [] };
-}
-exports.GeneratedCodeInfo = {
-    fromJSON(object) {
-        return {
-            annotation: Array.isArray(object?.annotation)
-                ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.annotation) {
-            obj.annotation = message.annotation.map((e) => e ? exports.GeneratedCodeInfo_Annotation.toJSON(e) : undefined);
-        }
-        else {
-            obj.annotation = [];
-        }
-        return obj;
-    },
-};
-function createBaseGeneratedCodeInfo_Annotation() {
-    return { path: [], sourceFile: "", begin: 0, end: 0 };
-}
-exports.GeneratedCodeInfo_Annotation = {
-    fromJSON(object) {
-        return {
-            path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
-            sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "",
-            begin: isSet(object.begin) ? Number(object.begin) : 0,
-            end: isSet(object.end) ? Number(object.end) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.path) {
-            obj.path = message.path.map((e) => Math.round(e));
-        }
-        else {
-            obj.path = [];
-        }
-        message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile);
-        message.begin !== undefined && (obj.begin = Math.round(message.begin));
-        message.end !== undefined && (obj.end = Math.round(message.end));
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
deleted file mode 100644
index 159135fe87172..0000000000000
--- a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
+++ /dev/null
@@ -1,24 +0,0 @@
-"use strict";
-/* eslint-disable */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Timestamp = void 0;
-function createBaseTimestamp() {
-    return { seconds: "0", nanos: 0 };
-}
-exports.Timestamp = {
-    fromJSON(object) {
-        return {
-            seconds: isSet(object.seconds) ? String(object.seconds) : "0",
-            nanos: isSet(object.nanos) ? Number(object.nanos) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.seconds !== undefined && (obj.seconds = message.seconds);
-        message.nanos !== undefined && (obj.nanos = Math.round(message.nanos));
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
deleted file mode 100644
index 1ef3e1b3356b7..0000000000000
--- a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
+++ /dev/null
@@ -1,106 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
-/* eslint-disable */
-const envelope_1 = require("./envelope");
-const sigstore_common_1 = require("./sigstore_common");
-const sigstore_rekor_1 = require("./sigstore_rekor");
-function createBaseTimestampVerificationData() {
-    return { rfc3161Timestamps: [] };
-}
-exports.TimestampVerificationData = {
-    fromJSON(object) {
-        return {
-            rfc3161Timestamps: Array.isArray(object?.rfc3161Timestamps)
-                ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rfc3161Timestamps) {
-            obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => e ? sigstore_common_1.RFC3161SignedTimestamp.toJSON(e) : undefined);
-        }
-        else {
-            obj.rfc3161Timestamps = [];
-        }
-        return obj;
-    },
-};
-function createBaseVerificationMaterial() {
-    return { content: undefined, tlogEntries: [], timestampVerificationData: undefined };
-}
-exports.VerificationMaterial = {
-    fromJSON(object) {
-        return {
-            content: isSet(object.publicKey)
-                ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
-                : isSet(object.x509CertificateChain)
-                    ? {
-                        $case: "x509CertificateChain",
-                        x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
-                    }
-                    : undefined,
-            tlogEntries: Array.isArray(object?.tlogEntries)
-                ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
-                : [],
-            timestampVerificationData: isSet(object.timestampVerificationData)
-                ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
-                : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.content?.$case === "publicKey" &&
-            (obj.publicKey = message.content?.publicKey ? sigstore_common_1.PublicKeyIdentifier.toJSON(message.content?.publicKey) : undefined);
-        message.content?.$case === "x509CertificateChain" &&
-            (obj.x509CertificateChain = message.content?.x509CertificateChain
-                ? sigstore_common_1.X509CertificateChain.toJSON(message.content?.x509CertificateChain)
-                : undefined);
-        if (message.tlogEntries) {
-            obj.tlogEntries = message.tlogEntries.map((e) => e ? sigstore_rekor_1.TransparencyLogEntry.toJSON(e) : undefined);
-        }
-        else {
-            obj.tlogEntries = [];
-        }
-        message.timestampVerificationData !== undefined &&
-            (obj.timestampVerificationData = message.timestampVerificationData
-                ? exports.TimestampVerificationData.toJSON(message.timestampVerificationData)
-                : undefined);
-        return obj;
-    },
-};
-function createBaseBundle() {
-    return { mediaType: "", verificationMaterial: undefined, content: undefined };
-}
-exports.Bundle = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
-            verificationMaterial: isSet(object.verificationMaterial)
-                ? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
-                : undefined,
-            content: isSet(object.messageSignature)
-                ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
-                : isSet(object.dsseEnvelope)
-                    ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.mediaType !== undefined && (obj.mediaType = message.mediaType);
-        message.verificationMaterial !== undefined && (obj.verificationMaterial = message.verificationMaterial
-            ? exports.VerificationMaterial.toJSON(message.verificationMaterial)
-            : undefined);
-        message.content?.$case === "messageSignature" && (obj.messageSignature = message.content?.messageSignature
-            ? sigstore_common_1.MessageSignature.toJSON(message.content?.messageSignature)
-            : undefined);
-        message.content?.$case === "dsseEnvelope" &&
-            (obj.dsseEnvelope = message.content?.dsseEnvelope ? envelope_1.Envelope.toJSON(message.content?.dsseEnvelope) : undefined);
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
deleted file mode 100644
index bcd654e9154b9..0000000000000
--- a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
+++ /dev/null
@@ -1,457 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.subjectAlternativeNameTypeToJSON = exports.subjectAlternativeNameTypeFromJSON = exports.SubjectAlternativeNameType = exports.publicKeyDetailsToJSON = exports.publicKeyDetailsFromJSON = exports.PublicKeyDetails = exports.hashAlgorithmToJSON = exports.hashAlgorithmFromJSON = exports.HashAlgorithm = void 0;
-/* eslint-disable */
-const timestamp_1 = require("./google/protobuf/timestamp");
-/**
- * Only a subset of the secure hash standard algorithms are supported.
- * See  for more
- * details.
- * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
- * any proto JSON serialization to emit the used hash algorithm, as default
- * option is to *omit* the default value of an enum (which is the first
- * value, represented by '0'.
- */
-var HashAlgorithm;
-(function (HashAlgorithm) {
-    HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
-    HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
-})(HashAlgorithm = exports.HashAlgorithm || (exports.HashAlgorithm = {}));
-function hashAlgorithmFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "HASH_ALGORITHM_UNSPECIFIED":
-            return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
-        case 1:
-        case "SHA2_256":
-            return HashAlgorithm.SHA2_256;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
-    }
-}
-exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
-function hashAlgorithmToJSON(object) {
-    switch (object) {
-        case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
-            return "HASH_ALGORITHM_UNSPECIFIED";
-        case HashAlgorithm.SHA2_256:
-            return "SHA2_256";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
-    }
-}
-exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
-/**
- * Details of a specific public key, capturing the the key encoding method,
- * and signature algorithm.
- * To avoid the possibility of contradicting formats such as PKCS1 with
- * ED25519 the valid permutations are listed as a linear set instead of a
- * cartesian set (i.e one combined variable instead of two, one for encoding
- * and one for the signature algorithm).
- */
-var PublicKeyDetails;
-(function (PublicKeyDetails) {
-    PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
-    /** PKCS1_RSA_PKCS1V5 - RSA */
-    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
-    /** PKCS1_RSA_PSS - See RFC8017 */
-    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
-    /** PKIX_ECDSA_P256_SHA_256 - ECDSA */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
-    /** PKIX_ECDSA_P256_HMAC_SHA_256 - See RFC6979 */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
-    /** PKIX_ED25519 - Ed 25519 */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
-})(PublicKeyDetails = exports.PublicKeyDetails || (exports.PublicKeyDetails = {}));
-function publicKeyDetailsFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
-            return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
-        case 1:
-        case "PKCS1_RSA_PKCS1V5":
-            return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
-        case 2:
-        case "PKCS1_RSA_PSS":
-            return PublicKeyDetails.PKCS1_RSA_PSS;
-        case 3:
-        case "PKIX_RSA_PKCS1V5":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V5;
-        case 4:
-        case "PKIX_RSA_PSS":
-            return PublicKeyDetails.PKIX_RSA_PSS;
-        case 5:
-        case "PKIX_ECDSA_P256_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
-        case 6:
-        case "PKIX_ECDSA_P256_HMAC_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
-        case 7:
-        case "PKIX_ED25519":
-            return PublicKeyDetails.PKIX_ED25519;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
-    }
-}
-exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
-function publicKeyDetailsToJSON(object) {
-    switch (object) {
-        case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
-            return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
-        case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
-            return "PKCS1_RSA_PKCS1V5";
-        case PublicKeyDetails.PKCS1_RSA_PSS:
-            return "PKCS1_RSA_PSS";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V5:
-            return "PKIX_RSA_PKCS1V5";
-        case PublicKeyDetails.PKIX_RSA_PSS:
-            return "PKIX_RSA_PSS";
-        case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
-            return "PKIX_ECDSA_P256_SHA_256";
-        case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
-            return "PKIX_ECDSA_P256_HMAC_SHA_256";
-        case PublicKeyDetails.PKIX_ED25519:
-            return "PKIX_ED25519";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
-    }
-}
-exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
-var SubjectAlternativeNameType;
-(function (SubjectAlternativeNameType) {
-    SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
-    SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
-    SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
-    /**
-     * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
-     * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
-     * for more details.
-     */
-    SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
-})(SubjectAlternativeNameType = exports.SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = {}));
-function subjectAlternativeNameTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
-            return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
-        case 1:
-        case "EMAIL":
-            return SubjectAlternativeNameType.EMAIL;
-        case 2:
-        case "URI":
-            return SubjectAlternativeNameType.URI;
-        case 3:
-        case "OTHER_NAME":
-            return SubjectAlternativeNameType.OTHER_NAME;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
-    }
-}
-exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
-function subjectAlternativeNameTypeToJSON(object) {
-    switch (object) {
-        case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
-            return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
-        case SubjectAlternativeNameType.EMAIL:
-            return "EMAIL";
-        case SubjectAlternativeNameType.URI:
-            return "URI";
-        case SubjectAlternativeNameType.OTHER_NAME:
-            return "OTHER_NAME";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
-    }
-}
-exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
-function createBaseHashOutput() {
-    return { algorithm: 0, digest: Buffer.alloc(0) };
-}
-exports.HashOutput = {
-    fromJSON(object) {
-        return {
-            algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
-            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.algorithm !== undefined && (obj.algorithm = hashAlgorithmToJSON(message.algorithm));
-        message.digest !== undefined &&
-            (obj.digest = base64FromBytes(message.digest !== undefined ? message.digest : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseMessageSignature() {
-    return { messageDigest: undefined, signature: Buffer.alloc(0) };
-}
-exports.MessageSignature = {
-    fromJSON(object) {
-        return {
-            messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
-            signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.messageDigest !== undefined &&
-            (obj.messageDigest = message.messageDigest ? exports.HashOutput.toJSON(message.messageDigest) : undefined);
-        message.signature !== undefined &&
-            (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseLogId() {
-    return { keyId: Buffer.alloc(0) };
-}
-exports.LogId = {
-    fromJSON(object) {
-        return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.keyId !== undefined &&
-            (obj.keyId = base64FromBytes(message.keyId !== undefined ? message.keyId : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseRFC3161SignedTimestamp() {
-    return { signedTimestamp: Buffer.alloc(0) };
-}
-exports.RFC3161SignedTimestamp = {
-    fromJSON(object) {
-        return {
-            signedTimestamp: isSet(object.signedTimestamp)
-                ? Buffer.from(bytesFromBase64(object.signedTimestamp))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.signedTimestamp !== undefined &&
-            (obj.signedTimestamp = base64FromBytes(message.signedTimestamp !== undefined ? message.signedTimestamp : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBasePublicKey() {
-    return { rawBytes: undefined, keyDetails: 0, validFor: undefined };
-}
-exports.PublicKey = {
-    fromJSON(object) {
-        return {
-            rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
-            keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
-            validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.rawBytes !== undefined &&
-            (obj.rawBytes = message.rawBytes !== undefined ? base64FromBytes(message.rawBytes) : undefined);
-        message.keyDetails !== undefined && (obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails));
-        message.validFor !== undefined &&
-            (obj.validFor = message.validFor ? exports.TimeRange.toJSON(message.validFor) : undefined);
-        return obj;
-    },
-};
-function createBasePublicKeyIdentifier() {
-    return { hint: "" };
-}
-exports.PublicKeyIdentifier = {
-    fromJSON(object) {
-        return { hint: isSet(object.hint) ? String(object.hint) : "" };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.hint !== undefined && (obj.hint = message.hint);
-        return obj;
-    },
-};
-function createBaseObjectIdentifier() {
-    return { id: [] };
-}
-exports.ObjectIdentifier = {
-    fromJSON(object) {
-        return { id: Array.isArray(object?.id) ? object.id.map((e) => Number(e)) : [] };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.id) {
-            obj.id = message.id.map((e) => Math.round(e));
-        }
-        else {
-            obj.id = [];
-        }
-        return obj;
-    },
-};
-function createBaseObjectIdentifierValuePair() {
-    return { oid: undefined, value: Buffer.alloc(0) };
-}
-exports.ObjectIdentifierValuePair = {
-    fromJSON(object) {
-        return {
-            oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
-            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.oid !== undefined && (obj.oid = message.oid ? exports.ObjectIdentifier.toJSON(message.oid) : undefined);
-        message.value !== undefined &&
-            (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseDistinguishedName() {
-    return { organization: "", commonName: "" };
-}
-exports.DistinguishedName = {
-    fromJSON(object) {
-        return {
-            organization: isSet(object.organization) ? String(object.organization) : "",
-            commonName: isSet(object.commonName) ? String(object.commonName) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.organization !== undefined && (obj.organization = message.organization);
-        message.commonName !== undefined && (obj.commonName = message.commonName);
-        return obj;
-    },
-};
-function createBaseX509Certificate() {
-    return { rawBytes: Buffer.alloc(0) };
-}
-exports.X509Certificate = {
-    fromJSON(object) {
-        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.rawBytes !== undefined &&
-            (obj.rawBytes = base64FromBytes(message.rawBytes !== undefined ? message.rawBytes : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseSubjectAlternativeName() {
-    return { type: 0, identity: undefined };
-}
-exports.SubjectAlternativeName = {
-    fromJSON(object) {
-        return {
-            type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
-            identity: isSet(object.regexp)
-                ? { $case: "regexp", regexp: String(object.regexp) }
-                : isSet(object.value)
-                    ? { $case: "value", value: String(object.value) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.type !== undefined && (obj.type = subjectAlternativeNameTypeToJSON(message.type));
-        message.identity?.$case === "regexp" && (obj.regexp = message.identity?.regexp);
-        message.identity?.$case === "value" && (obj.value = message.identity?.value);
-        return obj;
-    },
-};
-function createBaseX509CertificateChain() {
-    return { certificates: [] };
-}
-exports.X509CertificateChain = {
-    fromJSON(object) {
-        return {
-            certificates: Array.isArray(object?.certificates)
-                ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.certificates) {
-            obj.certificates = message.certificates.map((e) => e ? exports.X509Certificate.toJSON(e) : undefined);
-        }
-        else {
-            obj.certificates = [];
-        }
-        return obj;
-    },
-};
-function createBaseTimeRange() {
-    return { start: undefined, end: undefined };
-}
-exports.TimeRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
-            end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.start !== undefined && (obj.start = message.start.toISOString());
-        message.end !== undefined && (obj.end = message.end.toISOString());
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function fromTimestamp(t) {
-    let millis = Number(t.seconds) * 1000;
-    millis += t.nanos / 1000000;
-    return new Date(millis);
-}
-function fromJsonTimestamp(o) {
-    if (o instanceof Date) {
-        return o;
-    }
-    else if (typeof o === "string") {
-        return new Date(o);
-    }
-    else {
-        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
deleted file mode 100644
index 398193b2075a7..0000000000000
--- a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
+++ /dev/null
@@ -1,167 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("./sigstore_common");
-function createBaseKindVersion() {
-    return { kind: "", version: "" };
-}
-exports.KindVersion = {
-    fromJSON(object) {
-        return {
-            kind: isSet(object.kind) ? String(object.kind) : "",
-            version: isSet(object.version) ? String(object.version) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.kind !== undefined && (obj.kind = message.kind);
-        message.version !== undefined && (obj.version = message.version);
-        return obj;
-    },
-};
-function createBaseCheckpoint() {
-    return { envelope: "" };
-}
-exports.Checkpoint = {
-    fromJSON(object) {
-        return { envelope: isSet(object.envelope) ? String(object.envelope) : "" };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.envelope !== undefined && (obj.envelope = message.envelope);
-        return obj;
-    },
-};
-function createBaseInclusionProof() {
-    return { logIndex: "0", rootHash: Buffer.alloc(0), treeSize: "0", hashes: [], checkpoint: undefined };
-}
-exports.InclusionProof = {
-    fromJSON(object) {
-        return {
-            logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
-            rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
-            treeSize: isSet(object.treeSize) ? String(object.treeSize) : "0",
-            hashes: Array.isArray(object?.hashes) ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e))) : [],
-            checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.logIndex !== undefined && (obj.logIndex = message.logIndex);
-        message.rootHash !== undefined &&
-            (obj.rootHash = base64FromBytes(message.rootHash !== undefined ? message.rootHash : Buffer.alloc(0)));
-        message.treeSize !== undefined && (obj.treeSize = message.treeSize);
-        if (message.hashes) {
-            obj.hashes = message.hashes.map((e) => base64FromBytes(e !== undefined ? e : Buffer.alloc(0)));
-        }
-        else {
-            obj.hashes = [];
-        }
-        message.checkpoint !== undefined &&
-            (obj.checkpoint = message.checkpoint ? exports.Checkpoint.toJSON(message.checkpoint) : undefined);
-        return obj;
-    },
-};
-function createBaseInclusionPromise() {
-    return { signedEntryTimestamp: Buffer.alloc(0) };
-}
-exports.InclusionPromise = {
-    fromJSON(object) {
-        return {
-            signedEntryTimestamp: isSet(object.signedEntryTimestamp)
-                ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.signedEntryTimestamp !== undefined &&
-            (obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp !== undefined ? message.signedEntryTimestamp : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseTransparencyLogEntry() {
-    return {
-        logIndex: "0",
-        logId: undefined,
-        kindVersion: undefined,
-        integratedTime: "0",
-        inclusionPromise: undefined,
-        inclusionProof: undefined,
-        canonicalizedBody: Buffer.alloc(0),
-    };
-}
-exports.TransparencyLogEntry = {
-    fromJSON(object) {
-        return {
-            logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
-            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
-            kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
-            integratedTime: isSet(object.integratedTime) ? String(object.integratedTime) : "0",
-            inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
-            inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
-            canonicalizedBody: isSet(object.canonicalizedBody)
-                ? Buffer.from(bytesFromBase64(object.canonicalizedBody))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.logIndex !== undefined && (obj.logIndex = message.logIndex);
-        message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
-        message.kindVersion !== undefined &&
-            (obj.kindVersion = message.kindVersion ? exports.KindVersion.toJSON(message.kindVersion) : undefined);
-        message.integratedTime !== undefined && (obj.integratedTime = message.integratedTime);
-        message.inclusionPromise !== undefined &&
-            (obj.inclusionPromise = message.inclusionPromise ? exports.InclusionPromise.toJSON(message.inclusionPromise) : undefined);
-        message.inclusionProof !== undefined &&
-            (obj.inclusionProof = message.inclusionProof ? exports.InclusionProof.toJSON(message.inclusionProof) : undefined);
-        message.canonicalizedBody !== undefined &&
-            (obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody !== undefined ? message.canonicalizedBody : Buffer.alloc(0)));
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
deleted file mode 100644
index 05e566767cdb2..0000000000000
--- a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
+++ /dev/null
@@ -1,103 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("./sigstore_common");
-function createBaseTransparencyLogInstance() {
-    return { baseUrl: "", hashAlgorithm: 0, publicKey: undefined, logId: undefined };
-}
-exports.TransparencyLogInstance = {
-    fromJSON(object) {
-        return {
-            baseUrl: isSet(object.baseUrl) ? String(object.baseUrl) : "",
-            hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
-            publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
-            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.baseUrl !== undefined && (obj.baseUrl = message.baseUrl);
-        message.hashAlgorithm !== undefined && (obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm));
-        message.publicKey !== undefined &&
-            (obj.publicKey = message.publicKey ? sigstore_common_1.PublicKey.toJSON(message.publicKey) : undefined);
-        message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
-        return obj;
-    },
-};
-function createBaseCertificateAuthority() {
-    return { subject: undefined, uri: "", certChain: undefined, validFor: undefined };
-}
-exports.CertificateAuthority = {
-    fromJSON(object) {
-        return {
-            subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
-            uri: isSet(object.uri) ? String(object.uri) : "",
-            certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
-            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.subject !== undefined &&
-            (obj.subject = message.subject ? sigstore_common_1.DistinguishedName.toJSON(message.subject) : undefined);
-        message.uri !== undefined && (obj.uri = message.uri);
-        message.certChain !== undefined &&
-            (obj.certChain = message.certChain ? sigstore_common_1.X509CertificateChain.toJSON(message.certChain) : undefined);
-        message.validFor !== undefined &&
-            (obj.validFor = message.validFor ? sigstore_common_1.TimeRange.toJSON(message.validFor) : undefined);
-        return obj;
-    },
-};
-function createBaseTrustedRoot() {
-    return { mediaType: "", tlogs: [], certificateAuthorities: [], ctlogs: [], timestampAuthorities: [] };
-}
-exports.TrustedRoot = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
-            tlogs: Array.isArray(object?.tlogs) ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) : [],
-            certificateAuthorities: Array.isArray(object?.certificateAuthorities)
-                ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
-                : [],
-            ctlogs: Array.isArray(object?.ctlogs)
-                ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
-                : [],
-            timestampAuthorities: Array.isArray(object?.timestampAuthorities)
-                ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.mediaType !== undefined && (obj.mediaType = message.mediaType);
-        if (message.tlogs) {
-            obj.tlogs = message.tlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
-        }
-        else {
-            obj.tlogs = [];
-        }
-        if (message.certificateAuthorities) {
-            obj.certificateAuthorities = message.certificateAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
-        }
-        else {
-            obj.certificateAuthorities = [];
-        }
-        if (message.ctlogs) {
-            obj.ctlogs = message.ctlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
-        }
-        else {
-            obj.ctlogs = [];
-        }
-        if (message.timestampAuthorities) {
-            obj.timestampAuthorities = message.timestampAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
-        }
-        else {
-            obj.timestampAuthorities = [];
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
deleted file mode 100644
index 8a72b89761869..0000000000000
--- a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
+++ /dev/null
@@ -1,273 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
-/* eslint-disable */
-const sigstore_bundle_1 = require("./sigstore_bundle");
-const sigstore_common_1 = require("./sigstore_common");
-const sigstore_trustroot_1 = require("./sigstore_trustroot");
-function createBaseCertificateIdentity() {
-    return { issuer: "", san: undefined, oids: [] };
-}
-exports.CertificateIdentity = {
-    fromJSON(object) {
-        return {
-            issuer: isSet(object.issuer) ? String(object.issuer) : "",
-            san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
-            oids: Array.isArray(object?.oids) ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.issuer !== undefined && (obj.issuer = message.issuer);
-        message.san !== undefined && (obj.san = message.san ? sigstore_common_1.SubjectAlternativeName.toJSON(message.san) : undefined);
-        if (message.oids) {
-            obj.oids = message.oids.map((e) => e ? sigstore_common_1.ObjectIdentifierValuePair.toJSON(e) : undefined);
-        }
-        else {
-            obj.oids = [];
-        }
-        return obj;
-    },
-};
-function createBaseCertificateIdentities() {
-    return { identities: [] };
-}
-exports.CertificateIdentities = {
-    fromJSON(object) {
-        return {
-            identities: Array.isArray(object?.identities)
-                ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.identities) {
-            obj.identities = message.identities.map((e) => e ? exports.CertificateIdentity.toJSON(e) : undefined);
-        }
-        else {
-            obj.identities = [];
-        }
-        return obj;
-    },
-};
-function createBasePublicKeyIdentities() {
-    return { publicKeys: [] };
-}
-exports.PublicKeyIdentities = {
-    fromJSON(object) {
-        return {
-            publicKeys: Array.isArray(object?.publicKeys) ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.publicKeys) {
-            obj.publicKeys = message.publicKeys.map((e) => e ? sigstore_common_1.PublicKey.toJSON(e) : undefined);
-        }
-        else {
-            obj.publicKeys = [];
-        }
-        return obj;
-    },
-};
-function createBaseArtifactVerificationOptions() {
-    return { signers: undefined, tlogOptions: undefined, ctlogOptions: undefined, tsaOptions: undefined };
-}
-exports.ArtifactVerificationOptions = {
-    fromJSON(object) {
-        return {
-            signers: isSet(object.certificateIdentities)
-                ? {
-                    $case: "certificateIdentities",
-                    certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
-                }
-                : isSet(object.publicKeys)
-                    ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
-                    : undefined,
-            tlogOptions: isSet(object.tlogOptions)
-                ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
-                : undefined,
-            ctlogOptions: isSet(object.ctlogOptions)
-                ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
-                : undefined,
-            tsaOptions: isSet(object.tsaOptions)
-                ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
-                : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.signers?.$case === "certificateIdentities" &&
-            (obj.certificateIdentities = message.signers?.certificateIdentities
-                ? exports.CertificateIdentities.toJSON(message.signers?.certificateIdentities)
-                : undefined);
-        message.signers?.$case === "publicKeys" && (obj.publicKeys = message.signers?.publicKeys
-            ? exports.PublicKeyIdentities.toJSON(message.signers?.publicKeys)
-            : undefined);
-        message.tlogOptions !== undefined && (obj.tlogOptions = message.tlogOptions
-            ? exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions)
-            : undefined);
-        message.ctlogOptions !== undefined && (obj.ctlogOptions = message.ctlogOptions
-            ? exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions)
-            : undefined);
-        message.tsaOptions !== undefined && (obj.tsaOptions = message.tsaOptions
-            ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions)
-            : undefined);
-        return obj;
-    },
-};
-function createBaseArtifactVerificationOptions_TlogOptions() {
-    return { threshold: 0, performOnlineVerification: false, disable: false };
-}
-exports.ArtifactVerificationOptions_TlogOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
-            performOnlineVerification: isSet(object.performOnlineVerification)
-                ? Boolean(object.performOnlineVerification)
-                : false,
-            disable: isSet(object.disable) ? Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
-        message.performOnlineVerification !== undefined &&
-            (obj.performOnlineVerification = message.performOnlineVerification);
-        message.disable !== undefined && (obj.disable = message.disable);
-        return obj;
-    },
-};
-function createBaseArtifactVerificationOptions_CtlogOptions() {
-    return { threshold: 0, detachedSct: false, disable: false };
-}
-exports.ArtifactVerificationOptions_CtlogOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
-            detachedSct: isSet(object.detachedSct) ? Boolean(object.detachedSct) : false,
-            disable: isSet(object.disable) ? Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
-        message.detachedSct !== undefined && (obj.detachedSct = message.detachedSct);
-        message.disable !== undefined && (obj.disable = message.disable);
-        return obj;
-    },
-};
-function createBaseArtifactVerificationOptions_TimestampAuthorityOptions() {
-    return { threshold: 0, disable: false };
-}
-exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
-        message.disable !== undefined && (obj.disable = message.disable);
-        return obj;
-    },
-};
-function createBaseArtifact() {
-    return { data: undefined };
-}
-exports.Artifact = {
-    fromJSON(object) {
-        return {
-            data: isSet(object.artifactUri)
-                ? { $case: "artifactUri", artifactUri: String(object.artifactUri) }
-                : isSet(object.artifact)
-                    ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.data?.$case === "artifactUri" && (obj.artifactUri = message.data?.artifactUri);
-        message.data?.$case === "artifact" &&
-            (obj.artifact = message.data?.artifact !== undefined ? base64FromBytes(message.data?.artifact) : undefined);
-        return obj;
-    },
-};
-function createBaseInput() {
-    return {
-        artifactTrustRoot: undefined,
-        artifactVerificationOptions: undefined,
-        bundle: undefined,
-        artifact: undefined,
-    };
-}
-exports.Input = {
-    fromJSON(object) {
-        return {
-            artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
-            artifactVerificationOptions: isSet(object.artifactVerificationOptions)
-                ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
-                : undefined,
-            bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
-            artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.artifactTrustRoot !== undefined &&
-            (obj.artifactTrustRoot = message.artifactTrustRoot ? sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot) : undefined);
-        message.artifactVerificationOptions !== undefined &&
-            (obj.artifactVerificationOptions = message.artifactVerificationOptions
-                ? exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions)
-                : undefined);
-        message.bundle !== undefined && (obj.bundle = message.bundle ? sigstore_bundle_1.Bundle.toJSON(message.bundle) : undefined);
-        message.artifact !== undefined && (obj.artifact = message.artifact ? exports.Artifact.toJSON(message.artifact) : undefined);
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js
deleted file mode 100644
index eafb768c48fca..0000000000000
--- a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js
+++ /dev/null
@@ -1,37 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __exportStar = (this && this.__exportStar) || function(m, exports) {
-    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-__exportStar(require("./__generated__/envelope"), exports);
-__exportStar(require("./__generated__/sigstore_bundle"), exports);
-__exportStar(require("./__generated__/sigstore_common"), exports);
-__exportStar(require("./__generated__/sigstore_rekor"), exports);
-__exportStar(require("./__generated__/sigstore_trustroot"), exports);
-__exportStar(require("./__generated__/sigstore_verification"), exports);
diff --git a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json
deleted file mode 100644
index 450abb157f31a..0000000000000
--- a/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json
+++ /dev/null
@@ -1,31 +0,0 @@
-{
-  "name": "@sigstore/protobuf-specs",
-  "version": "0.2.1",
-  "description": "code-signing for npm packages",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
-  "scripts": {
-    "build": "tsc"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/sigstore/protobuf-specs.git"
-  },
-  "files": [
-    "dist"
-  ],
-  "author": "bdehamer@github.com",
-  "license": "Apache-2.0",
-  "bugs": {
-    "url": "https://github.com/sigstore/protobuf-specs/issues"
-  },
-  "homepage": "https://github.com/sigstore/protobuf-specs#readme",
-  "devDependencies": {
-    "@tsconfig/node14": "^1.0.3",
-    "@types/node": "^18.14.0",
-    "typescript": "^4.9.5"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  }
-}
diff --git a/node_modules/pacote/lib/registry.js b/node_modules/pacote/lib/registry.js
index 34d9b2b87f3f3..993fd3f08a6d9 100644
--- a/node_modules/pacote/lib/registry.js
+++ b/node_modules/pacote/lib/registry.js
@@ -8,7 +8,7 @@ const pickManifest = require('npm-pick-manifest')
 const ssri = require('ssri')
 const crypto = require('crypto')
 const npa = require('npm-package-arg')
-const { sigstore } = require('sigstore')
+const sigstore = require('sigstore')
 
 // Corgis are cute. 🐕🐶
 const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*'
@@ -299,7 +299,7 @@ class RegistryFetcher extends Fetcher {
                 tufCachePath: this.tufCache,
                 keySelector: publicKey ? () => publicKey.pemkey : undefined,
               }
-              await sigstore.verify(bundle, null, options)
+              await sigstore.verify(bundle, options)
             } catch (e) {
               throw Object.assign(new Error(
                 `${mani._id} failed to verify attestation: ${e.message}`
diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json
index 62864ae4500a2..dffd3aecf9d13 100644
--- a/node_modules/pacote/package.json
+++ b/node_modules/pacote/package.json
@@ -1,6 +1,6 @@
 {
   "name": "pacote",
-  "version": "17.0.1",
+  "version": "17.0.2",
   "description": "JavaScript package downloader",
   "author": "GitHub Inc.",
   "bin": {
@@ -59,7 +59,7 @@
     "promise-retry": "^2.0.1",
     "read-package-json": "^7.0.0",
     "read-package-json-fast": "^3.0.0",
-    "sigstore": "^1.3.0",
+    "sigstore": "^2.0.0",
     "ssri": "^10.0.0",
     "tar": "^6.1.11"
   },
diff --git a/node_modules/sigstore/bin/sigstore.js b/node_modules/sigstore/bin/sigstore.js
deleted file mode 100755
index a07b7bdc1af95..0000000000000
--- a/node_modules/sigstore/bin/sigstore.js
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env node
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-require('../dist/cli').processArgv();
diff --git a/node_modules/sigstore/dist/ca/format.js b/node_modules/sigstore/dist/ca/format.js
deleted file mode 100644
index 6374243e80e02..0000000000000
--- a/node_modules/sigstore/dist/ca/format.js
+++ /dev/null
@@ -1,20 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.toCertificateRequest = void 0;
-function toCertificateRequest(identityToken, publicKey, challenge) {
-    return {
-        credentials: {
-            oidcIdentityToken: identityToken,
-        },
-        publicKeyRequest: {
-            publicKey: {
-                algorithm: 'ECDSA',
-                content: publicKey
-                    .export({ format: 'pem', type: 'spki' })
-                    .toString('ascii'),
-            },
-            proofOfPossession: challenge.toString('base64'),
-        },
-    };
-}
-exports.toCertificateRequest = toCertificateRequest;
diff --git a/node_modules/sigstore/dist/ca/index.js b/node_modules/sigstore/dist/ca/index.js
deleted file mode 100644
index 340dd46609aad..0000000000000
--- a/node_modules/sigstore/dist/ca/index.js
+++ /dev/null
@@ -1,39 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CAClient = void 0;
-const error_1 = require("../error");
-const external_1 = require("../external");
-const format_1 = require("./format");
-class CAClient {
-    constructor(options) {
-        this.fulcio = new external_1.Fulcio({
-            baseURL: options.fulcioBaseURL,
-            retry: options.retry,
-            timeout: options.timeout,
-        });
-    }
-    async createSigningCertificate(identityToken, publicKey, challenge) {
-        const request = (0, format_1.toCertificateRequest)(identityToken, publicKey, challenge);
-        try {
-            const resp = await this.fulcio.createSigningCertificate(request);
-            // Account for the fact that the response may contain either a
-            // signedCertificateEmbeddedSct or a signedCertificateDetachedSct.
-            const cert = resp.signedCertificateEmbeddedSct
-                ? resp.signedCertificateEmbeddedSct
-                : resp.signedCertificateDetachedSct;
-            // Return the first certificate in the chain, which is the signing
-            // certificate. Specifically not returning the rest of the chain to
-            // mitigate the risk of errors when verifying the certificate chain.
-            // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
-            return cert.chain.certificates.slice(0, 1);
-        }
-        catch (err) {
-            throw new error_1.InternalError({
-                code: 'CA_CREATE_SIGNING_CERTIFICATE_ERROR',
-                message: 'error creating signing certificate',
-                cause: err,
-            });
-        }
-    }
-}
-exports.CAClient = CAClient;
diff --git a/node_modules/sigstore/dist/ca/verify/signer.js b/node_modules/sigstore/dist/ca/verify/signer.js
index 51d722d7631ee..6f47651b944c9 100644
--- a/node_modules/sigstore/dist/ca/verify/signer.js
+++ b/node_modules/sigstore/dist/ca/verify/signer.js
@@ -54,7 +54,10 @@ function verifySignerIdentity(signingCert, identities) {
     // specified identities
     const signerVerified = identities.identities.some((identity) => verifyIdentity(signingCert, identity));
     if (!signerVerified) {
-        throw new error_1.PolicyError('Certificate issued to untrusted signer');
+        throw new error_1.PolicyError({
+            code: 'UNTRUSTED_SIGNER_ERROR',
+            message: 'Certificate issued to untrusted signer',
+        });
     }
 }
 exports.verifySignerIdentity = verifySignerIdentity;
diff --git a/node_modules/sigstore/dist/cli/index.js b/node_modules/sigstore/dist/cli/index.js
deleted file mode 100644
index 6015cd9df74ea..0000000000000
--- a/node_modules/sigstore/dist/cli/index.js
+++ /dev/null
@@ -1,125 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.processArgv = void 0;
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const fs_1 = __importDefault(require("fs"));
-const index_1 = require("../index");
-const INTOTO_PAYLOAD_TYPE = 'application/vnd.in-toto+json';
-async function cli(args) {
-    switch (args[0]) {
-        case 'sign':
-            await sign(args[1]);
-            break;
-        case 'attest':
-            await attest(args[1], args[2]);
-            break;
-        case 'verify':
-            await verify(args[1], args[2]);
-            break;
-        case 'version':
-        case '-version':
-        case '--version':
-        case '-v':
-            // eslint-disable-next-line @typescript-eslint/no-var-requires
-            console.log(require('../../package.json').version);
-            break;
-        case 'help':
-        case '--help':
-        case '-h':
-        case '-?':
-            printUsage();
-            break;
-        default:
-            throw 'Unknown command';
-    }
-}
-function printUsage() {
-    console.log(`sigstore  
-
-  Usage:
-
-  sigstore sign         sign an artifact
-  sigstore attest       sign an artifact using dsse (Dead Simple Signing Envelope)
-  sigstore verify       verify an artifact
-  sigstore version      print version information
-  sigstore help         print help information
-  `);
-}
-function printRekorEntry(bundle, options) {
-    let url;
-    if (options.rekorURL === index_1.sigstore.DEFAULT_REKOR_URL) {
-        url = `https://search.sigstore.dev`;
-    }
-    else {
-        url = `${options.rekorURL}/api/v1/log/entries`;
-    }
-    const logIndex = bundle.verificationMaterial?.tlogEntries[0].logIndex;
-    console.error(`Created entry at index ${logIndex}, available at`);
-    console.error(`${url}?logIndex=${logIndex}`);
-}
-// TODO: Allow customing these options
-const signOptions = {
-    oidcClientID: 'sigstore',
-    oidcIssuer: 'https://oauth2.sigstore.dev/auth',
-    oidcRedirectURL: process.env.OIDC_REDIRECT_URL,
-    rekorURL: index_1.sigstore.DEFAULT_REKOR_URL,
-};
-async function sign(artifactPath) {
-    const buffer = fs_1.default.readFileSync(artifactPath);
-    const bundle = await index_1.sigstore.sign(buffer, signOptions);
-    printRekorEntry(bundle, signOptions);
-    console.log(JSON.stringify(bundle));
-}
-async function attest(artifactPath, payloadType = INTOTO_PAYLOAD_TYPE) {
-    const buffer = fs_1.default.readFileSync(artifactPath);
-    const bundle = await index_1.sigstore.attest(buffer, payloadType, signOptions);
-    printRekorEntry(bundle, signOptions);
-    console.log(JSON.stringify(bundle));
-}
-async function verify(bundlePath, artifactPath) {
-    let payload = undefined;
-    if (artifactPath) {
-        payload = fs_1.default.readFileSync(artifactPath);
-    }
-    const bundleFile = fs_1.default.readFileSync(bundlePath);
-    const bundle = JSON.parse(bundleFile.toString('utf-8'));
-    try {
-        await index_1.sigstore.verify(bundle, payload, {});
-        console.error('Verified OK');
-    }
-    catch (e) {
-        console.error('Verification failed');
-        if (e instanceof Error) {
-            console.error('Error: ' + e.message);
-        }
-        process.exit(1);
-    }
-}
-async function processArgv() {
-    try {
-        await cli(process.argv.slice(2));
-        process.exit(0);
-    }
-    catch (e) {
-        console.error(e);
-        process.exit(1);
-    }
-}
-exports.processArgv = processArgv;
diff --git a/node_modules/sigstore/dist/config.js b/node_modules/sigstore/dist/config.js
index 1a22c5fef313b..65b20fbaa9829 100644
--- a/node_modules/sigstore/dist/config.js
+++ b/node_modules/sigstore/dist/config.js
@@ -22,11 +22,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
     __setModuleDefault(result, mod);
     return result;
 };
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.identityProviders = exports.artifactVerificationOptions = exports.createTSAClient = exports.createTLogClient = exports.createCAClient = exports.DEFAULT_TIMEOUT = exports.DEFAULT_RETRY = exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = void 0;
+exports.artifactVerificationOptions = exports.createBundleBuilder = exports.DEFAULT_TIMEOUT = exports.DEFAULT_RETRY = exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = void 0;
 /*
 Copyright 2023 The Sigstore Authors.
 
@@ -42,41 +39,74 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 */
-const ca_1 = require("./ca");
-const identity_1 = __importDefault(require("./identity"));
-const tlog_1 = require("./tlog");
-const tsa_1 = require("./tsa");
+const sign_1 = require("@sigstore/sign");
 const sigstore = __importStar(require("./types/sigstore"));
 exports.DEFAULT_FULCIO_URL = 'https://fulcio.sigstore.dev';
 exports.DEFAULT_REKOR_URL = 'https://rekor.sigstore.dev';
 exports.DEFAULT_RETRY = { retries: 2 };
 exports.DEFAULT_TIMEOUT = 5000;
-function createCAClient(options) {
-    return new ca_1.CAClient({
+function createBundleBuilder(bundleType, options) {
+    const bundlerOptions = {
+        signer: initSigner(options),
+        witnesses: initWitnesses(options),
+    };
+    switch (bundleType) {
+        case 'messageSignature':
+            return new sign_1.MessageSignatureBundleBuilder(bundlerOptions);
+        case 'dsseEnvelope':
+            return new sign_1.DSSEBundleBuilder(bundlerOptions);
+    }
+}
+exports.createBundleBuilder = createBundleBuilder;
+// Instantiate the FulcioSigner based on the supplied options.
+function initSigner(options) {
+    return new sign_1.FulcioSigner({
         fulcioBaseURL: options.fulcioURL || exports.DEFAULT_FULCIO_URL,
+        identityProvider: options.identityProvider || initIdentityProvider(options),
         retry: options.retry ?? exports.DEFAULT_RETRY,
         timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
     });
 }
-exports.createCAClient = createCAClient;
-function createTLogClient(options) {
-    return new tlog_1.TLogClient({
-        rekorBaseURL: options.rekorURL || exports.DEFAULT_REKOR_URL,
-        retry: options.retry ?? exports.DEFAULT_RETRY,
-        timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
-    });
+// Instantiate an identity provider based on the supplied options. If an
+// explicit identity token is provided, use that. Otherwise, use the CI
+// context provider.
+function initIdentityProvider(options) {
+    const token = options.identityToken;
+    if (token) {
+        return { getToken: () => Promise.resolve(token) };
+    }
+    else {
+        return new sign_1.CIContextProvider('sigstore');
+    }
 }
-exports.createTLogClient = createTLogClient;
-function createTSAClient(options) {
-    return options.tsaServerURL
-        ? new tsa_1.TSAClient({
+// Instantiate a collection of witnesses based on the supplied options.
+function initWitnesses(options) {
+    const witnesses = [];
+    if (isRekorEnabled(options)) {
+        witnesses.push(new sign_1.RekorWitness({
+            rekorBaseURL: options.rekorURL || exports.DEFAULT_REKOR_URL,
+            fetchOnConflict: false,
+            retry: options.retry ?? exports.DEFAULT_RETRY,
+            timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
+        }));
+    }
+    if (isTSAEnabled(options)) {
+        witnesses.push(new sign_1.TSAWitness({
             tsaBaseURL: options.tsaServerURL,
             retry: options.retry ?? exports.DEFAULT_RETRY,
             timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
-        })
-        : undefined;
+        }));
+    }
+    return witnesses;
+}
+// Type assertion to ensure that Rekor is enabled
+function isRekorEnabled(options) {
+    return options.tlogUpload !== false;
+}
+// Type assertion to ensure that TSA is enabled
+function isTSAEnabled(options) {
+    return options.tsaServerURL !== undefined;
 }
-exports.createTSAClient = createTSAClient;
 // Assembles the AtifactVerificationOptions from the supplied VerifyOptions.
 function artifactVerificationOptions(options) {
     // The trusted signers are only used if the options contain a certificate
@@ -102,7 +132,7 @@ function artifactVerificationOptions(options) {
                 },
             };
         }
-        const oids = Object.entries(options.certificateOIDs || {}).map(([oid, value]) => ({
+        const oids = Object.entries(options.certificateOIDs || /* istanbul ignore next */ {}).map(([oid, value]) => ({
             oid: { id: oid.split('.').map((s) => parseInt(s, 10)) },
             value: Buffer.from(value),
         }));
@@ -122,41 +152,16 @@ function artifactVerificationOptions(options) {
     // Construct the artifact verification options w/ defaults
     return {
         ctlogOptions: {
-            disable: false,
-            threshold: options.ctLogThreshold || 1,
+            disable: options.ctLogThreshold === 0,
+            threshold: options.ctLogThreshold ?? 1,
             detachedSct: false,
         },
         tlogOptions: {
-            disable: false,
-            threshold: options.tlogThreshold || 1,
+            disable: options.tlogThreshold === 0,
+            threshold: options.tlogThreshold ?? 1,
             performOnlineVerification: false,
         },
         signers,
     };
 }
 exports.artifactVerificationOptions = artifactVerificationOptions;
-// Translates the IdenityProviderOptions into a list of Providers which
-// should be queried to retrieve an identity token.
-function identityProviders(options) {
-    const idps = [];
-    const token = options.identityToken;
-    // If an explicit identity token is provided, use that. Setup a dummy
-    // provider that just returns the token. Otherwise, setup the CI context
-    // provider and (optionally) the OAuth provider.
-    if (token) {
-        idps.push({ getToken: () => Promise.resolve(token) });
-    }
-    else {
-        idps.push(identity_1.default.ciContextProvider());
-        if (options.oidcIssuer && options.oidcClientID) {
-            idps.push(identity_1.default.oauthProvider({
-                issuer: options.oidcIssuer,
-                clientID: options.oidcClientID,
-                clientSecret: options.oidcClientSecret,
-                redirectURL: options.oidcRedirectURL,
-            }));
-        }
-    }
-    return idps;
-}
-exports.identityProviders = identityProviders;
diff --git a/node_modules/sigstore/dist/error.js b/node_modules/sigstore/dist/error.js
index cee15dff90b61..b0a7dbc83f710 100644
--- a/node_modules/sigstore/dist/error.js
+++ b/node_modules/sigstore/dist/error.js
@@ -1,6 +1,4 @@
 "use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.InternalError = exports.PolicyError = exports.ValidationError = exports.VerificationError = void 0;
 /*
 Copyright 2023 The Sigstore Authors.
 
@@ -16,27 +14,22 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 */
-/* eslint-disable @typescript-eslint/no-explicit-any */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.PolicyError = exports.VerificationError = void 0;
 class BaseError extends Error {
-    constructor(message, cause) {
+    constructor({ code, message, cause, }) {
         super(message);
         this.name = this.constructor.name;
+        this.code = code;
         this.cause = cause;
     }
 }
 class VerificationError extends BaseError {
+    constructor(message) {
+        super({ code: 'VERIFICATION_ERROR', message });
+    }
 }
 exports.VerificationError = VerificationError;
-class ValidationError extends BaseError {
-}
-exports.ValidationError = ValidationError;
 class PolicyError extends BaseError {
 }
 exports.PolicyError = PolicyError;
-class InternalError extends BaseError {
-    constructor({ code, message, cause, }) {
-        super(message, cause);
-        this.code = code;
-    }
-}
-exports.InternalError = InternalError;
diff --git a/node_modules/sigstore/dist/external/error.js b/node_modules/sigstore/dist/external/error.js
deleted file mode 100644
index d1e1c3df8a878..0000000000000
--- a/node_modules/sigstore/dist/external/error.js
+++ /dev/null
@@ -1,21 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.checkStatus = exports.HTTPError = void 0;
-class HTTPError extends Error {
-    constructor(response) {
-        super(`HTTP Error: ${response.status} ${response.statusText}`);
-        this.response = response;
-        this.statusCode = response.status;
-        this.location = response.headers?.get('Location') || undefined;
-    }
-}
-exports.HTTPError = HTTPError;
-const checkStatus = (response) => {
-    if (response.ok) {
-        return response;
-    }
-    else {
-        throw new HTTPError(response);
-    }
-};
-exports.checkStatus = checkStatus;
diff --git a/node_modules/sigstore/dist/external/fulcio.js b/node_modules/sigstore/dist/external/fulcio.js
deleted file mode 100644
index aeb48d58d8d83..0000000000000
--- a/node_modules/sigstore/dist/external/fulcio.js
+++ /dev/null
@@ -1,51 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Fulcio = void 0;
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
-const util_1 = require("../util");
-const error_1 = require("./error");
-/**
- * Fulcio API client.
- */
-class Fulcio {
-    constructor(options) {
-        this.fetch = make_fetch_happen_1.default.defaults({
-            retry: options.retry,
-            timeout: options.timeout,
-            headers: {
-                'Content-Type': 'application/json',
-                'User-Agent': util_1.ua.getUserAgent(),
-            },
-        });
-        this.baseUrl = options.baseURL;
-    }
-    async createSigningCertificate(request) {
-        const url = `${this.baseUrl}/api/v2/signingCert`;
-        const response = await this.fetch(url, {
-            method: 'POST',
-            body: JSON.stringify(request),
-        });
-        (0, error_1.checkStatus)(response);
-        const data = await response.json();
-        return data;
-    }
-}
-exports.Fulcio = Fulcio;
diff --git a/node_modules/sigstore/dist/external/index.js b/node_modules/sigstore/dist/external/index.js
deleted file mode 100644
index f40816e9b7ca4..0000000000000
--- a/node_modules/sigstore/dist/external/index.js
+++ /dev/null
@@ -1,26 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TimestampAuthority = exports.Rekor = exports.Fulcio = exports.HTTPError = void 0;
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-var error_1 = require("./error");
-Object.defineProperty(exports, "HTTPError", { enumerable: true, get: function () { return error_1.HTTPError; } });
-var fulcio_1 = require("./fulcio");
-Object.defineProperty(exports, "Fulcio", { enumerable: true, get: function () { return fulcio_1.Fulcio; } });
-var rekor_1 = require("./rekor");
-Object.defineProperty(exports, "Rekor", { enumerable: true, get: function () { return rekor_1.Rekor; } });
-var tsa_1 = require("./tsa");
-Object.defineProperty(exports, "TimestampAuthority", { enumerable: true, get: function () { return tsa_1.TimestampAuthority; } });
diff --git a/node_modules/sigstore/dist/external/rekor.js b/node_modules/sigstore/dist/external/rekor.js
deleted file mode 100644
index b6bbeb6f20793..0000000000000
--- a/node_modules/sigstore/dist/external/rekor.js
+++ /dev/null
@@ -1,115 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Rekor = void 0;
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
-const util_1 = require("../util");
-const error_1 = require("./error");
-/**
- * Rekor API client.
- */
-class Rekor {
-    constructor(options) {
-        this.fetch = make_fetch_happen_1.default.defaults({
-            retry: options.retry,
-            timeout: options.timeout,
-            headers: {
-                Accept: 'application/json',
-                'User-Agent': util_1.ua.getUserAgent(),
-            },
-        });
-        this.baseUrl = options.baseURL;
-    }
-    /**
-     * Create a new entry in the Rekor log.
-     * @param propsedEntry {ProposedEntry} Data to create a new entry
-     * @returns {Promise} The created entry
-     */
-    async createEntry(propsedEntry) {
-        const url = `${this.baseUrl}/api/v1/log/entries`;
-        const response = await this.fetch(url, {
-            method: 'POST',
-            headers: { 'Content-Type': 'application/json' },
-            body: JSON.stringify(propsedEntry),
-        });
-        (0, error_1.checkStatus)(response);
-        const data = await response.json();
-        return entryFromResponse(data);
-    }
-    /**
-     * Get an entry from the Rekor log.
-     * @param uuid {string} The UUID of the entry to retrieve
-     * @returns {Promise} The retrieved entry
-     */
-    async getEntry(uuid) {
-        const url = `${this.baseUrl}/api/v1/log/entries/${uuid}`;
-        const response = await this.fetch(url);
-        (0, error_1.checkStatus)(response);
-        const data = await response.json();
-        return entryFromResponse(data);
-    }
-    /**
-     * Search the Rekor log index for entries matching the given query.
-     * @param opts {SearchIndex} Options to search the Rekor log
-     * @returns {Promise} UUIDs of matching entries
-     */
-    async searchIndex(opts) {
-        const url = `${this.baseUrl}/api/v1/index/retrieve`;
-        const response = await this.fetch(url, {
-            method: 'POST',
-            body: JSON.stringify(opts),
-            headers: { 'Content-Type': 'application/json' },
-        });
-        (0, error_1.checkStatus)(response);
-        const data = await response.json();
-        return data;
-    }
-    /**
-     * Search the Rekor logs for matching the given query.
-     * @param opts {SearchLogQuery} Query to search the Rekor log
-     * @returns {Promise} List of matching entries
-     */
-    async searchLog(opts) {
-        const url = `${this.baseUrl}/api/v1/log/entries/retrieve`;
-        const response = await this.fetch(url, {
-            method: 'POST',
-            body: JSON.stringify(opts),
-            headers: { 'Content-Type': 'application/json' },
-        });
-        (0, error_1.checkStatus)(response);
-        const rawData = await response.json();
-        const data = rawData.map((d) => entryFromResponse(d));
-        return data;
-    }
-}
-exports.Rekor = Rekor;
-// Unpack the response from the Rekor API into a more convenient format.
-function entryFromResponse(data) {
-    const entries = Object.entries(data);
-    if (entries.length != 1) {
-        throw new Error('Received multiple entries in Rekor response');
-    }
-    // Grab UUID and entry data from the response
-    const [uuid, entry] = entries[0];
-    return {
-        ...entry,
-        uuid,
-    };
-}
diff --git a/node_modules/sigstore/dist/external/tsa.js b/node_modules/sigstore/dist/external/tsa.js
deleted file mode 100644
index 5277d7d3f9707..0000000000000
--- a/node_modules/sigstore/dist/external/tsa.js
+++ /dev/null
@@ -1,47 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TimestampAuthority = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
-const util_1 = require("../util");
-const error_1 = require("./error");
-class TimestampAuthority {
-    constructor(options) {
-        this.fetch = make_fetch_happen_1.default.defaults({
-            retry: options.retry,
-            timeout: options.timeout,
-            headers: {
-                'Content-Type': 'application/json',
-                'User-Agent': util_1.ua.getUserAgent(),
-            },
-        });
-        this.baseUrl = options.baseURL;
-    }
-    async createTimestamp(request) {
-        const url = `${this.baseUrl}/api/v1/timestamp`;
-        const response = await this.fetch(url, {
-            method: 'POST',
-            body: JSON.stringify(request),
-        });
-        (0, error_1.checkStatus)(response);
-        return response.buffer();
-    }
-}
-exports.TimestampAuthority = TimestampAuthority;
diff --git a/node_modules/sigstore/dist/identity/ci.js b/node_modules/sigstore/dist/identity/ci.js
deleted file mode 100644
index 0f01e1baaec57..0000000000000
--- a/node_modules/sigstore/dist/identity/ci.js
+++ /dev/null
@@ -1,75 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CIContextProvider = void 0;
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
-const util_1 = require("../util");
-// Collection of all the CI-specific providers we have implemented
-const providers = [getGHAToken, getEnv];
-/**
- * CIContextProvider is a composite identity provider which will iterate
- * over all of the CI-specific providers and return the token from the first
- * one that resolves.
- */
-class CIContextProvider {
-    constructor(audience) {
-        this.audience = audience;
-    }
-    // Invoke all registered ProviderFuncs and return the value of whichever one
-    // resolves first.
-    async getToken() {
-        return util_1.promise
-            .promiseAny(providers.map((getToken) => getToken(this.audience)))
-            .catch(() => Promise.reject('CI: no tokens available'));
-    }
-}
-exports.CIContextProvider = CIContextProvider;
-/**
- * getGHAToken can retrieve an OIDC token when running in a GitHub Actions
- * workflow
- */
-async function getGHAToken(audience) {
-    // Check to see if we're running in GitHub Actions
-    if (!process.env.ACTIONS_ID_TOKEN_REQUEST_URL ||
-        !process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN) {
-        return Promise.reject('no token available');
-    }
-    // Construct URL to request token w/ appropriate audience
-    const url = new URL(process.env.ACTIONS_ID_TOKEN_REQUEST_URL);
-    url.searchParams.append('audience', audience);
-    const response = await (0, make_fetch_happen_1.default)(url.href, {
-        retry: 2,
-        headers: {
-            Accept: 'application/json',
-            Authorization: `Bearer ${process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN}`,
-        },
-    });
-    return response.json().then((data) => data.value);
-}
-/**
- * getEnv can retrieve an OIDC token from an environment variable.
- * This matches the behavior of https://github.com/sigstore/cosign/tree/main/pkg/providers/envvar
- */
-async function getEnv() {
-    if (!process.env.SIGSTORE_ID_TOKEN) {
-        return Promise.reject('no token available');
-    }
-    return process.env.SIGSTORE_ID_TOKEN;
-}
diff --git a/node_modules/sigstore/dist/identity/index.js b/node_modules/sigstore/dist/identity/index.js
deleted file mode 100644
index 351d607106700..0000000000000
--- a/node_modules/sigstore/dist/identity/index.js
+++ /dev/null
@@ -1,51 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const ci_1 = require("./ci");
-const issuer_1 = require("./issuer");
-const oauth_1 = require("./oauth");
-/**
- * oauthProvider returns a new Provider instance which attempts to retrieve
- * an identity token from the configured OAuth2 issuer.
- *
- * @param issuer Base URL of the issuer
- * @param clientID Client ID for the issuer
- * @param clientSecret Client secret for the issuer (optional)
- * @returns {Provider}
- */
-function oauthProvider(options) {
-    return new oauth_1.OAuthProvider({
-        issuer: new issuer_1.Issuer(options.issuer),
-        clientID: options.clientID,
-        clientSecret: options.clientSecret,
-        redirectURL: options.redirectURL,
-    });
-}
-/**
- * ciContextProvider returns a new Provider instance which attempts to retrieve
- * an identity token from the CI context.
- *
- * @param audience audience claim for the generated token
- * @returns {Provider}
- */
-function ciContextProvider(audience = 'sigstore') {
-    return new ci_1.CIContextProvider(audience);
-}
-exports.default = {
-    ciContextProvider,
-    oauthProvider,
-};
diff --git a/node_modules/sigstore/dist/identity/issuer.js b/node_modules/sigstore/dist/identity/issuer.js
deleted file mode 100644
index 2bf6c20f34932..0000000000000
--- a/node_modules/sigstore/dist/identity/issuer.js
+++ /dev/null
@@ -1,53 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Issuer = void 0;
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
-// Standard endpoint for retrieving OpenID configuration information
-const OPENID_CONFIG_PATH = '/.well-known/openid-configuration';
-/**
- * The Issuer reperesents a single OAuth2 provider.
- *
- * The Issuer is configured with a provider's base OAuth2 endpoint which is
- * used to retrieve the associated configuration information.
- */
-class Issuer {
-    constructor(baseURL) {
-        this.baseURL = baseURL;
-        this.fetch = make_fetch_happen_1.default.defaults({ retry: 2 });
-    }
-    async authEndpoint() {
-        if (!this.config) {
-            this.config = await this.loadOpenIDConfig();
-        }
-        return this.config.authorization_endpoint;
-    }
-    async tokenEndpoint() {
-        if (!this.config) {
-            this.config = await this.loadOpenIDConfig();
-        }
-        return this.config.token_endpoint;
-    }
-    async loadOpenIDConfig() {
-        const url = `${this.baseURL}${OPENID_CONFIG_PATH}`;
-        return this.fetch(url).then((res) => res.json());
-    }
-}
-exports.Issuer = Issuer;
diff --git a/node_modules/sigstore/dist/identity/oauth.js b/node_modules/sigstore/dist/identity/oauth.js
deleted file mode 100644
index 7cb5a00cdb694..0000000000000
--- a/node_modules/sigstore/dist/identity/oauth.js
+++ /dev/null
@@ -1,197 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.OAuthProvider = void 0;
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const assert_1 = __importDefault(require("assert"));
-const child_process_1 = __importDefault(require("child_process"));
-const http_1 = __importDefault(require("http"));
-const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
-const url_1 = require("url");
-const util_1 = require("../util");
-class OAuthProvider {
-    constructor(options) {
-        this.clientID = options.clientID;
-        this.clientSecret = options.clientSecret || '';
-        this.issuer = options.issuer;
-        this.redirectURI = options.redirectURL;
-        this.codeVerifier = generateRandomString(32);
-        this.state = generateRandomString(16);
-    }
-    async getToken() {
-        const authCode = await this.initiateAuthRequest();
-        return this.getIDToken(authCode);
-    }
-    // Initates the authorization request. This will start an HTTP server to
-    // receive the post-auth redirect and then open the user's default browser to
-    // the provider's authorization page.
-    async initiateAuthRequest() {
-        const server = http_1.default.createServer();
-        const sockets = new Set();
-        // Start server and wait till it is listening. If a redirect URL was
-        // provided, use that. Otherwise, use a random port and construct the
-        // redirect URL.
-        await new Promise((resolve) => {
-            if (this.redirectURI) {
-                const url = new url_1.URL(this.redirectURI);
-                server.listen(Number(url.port), url.hostname, resolve);
-            }
-            else {
-                server.listen(0, resolve);
-                // Get port the server is listening on and construct the server URL
-                const port = server.address().port;
-                this.redirectURI = `http://localhost:${port}`;
-            }
-        });
-        // Keep track of connections to the server so we can force a shutdown
-        server.on('connection', (socket) => {
-            sockets.add(socket);
-            socket.once('close', () => {
-                sockets.delete(socket);
-            });
-        });
-        const result = new Promise((resolve, reject) => {
-            // Set-up handler for post-auth redirect
-            server.on('request', (req, res) => {
-                if (!req.url) {
-                    reject('invalid server request');
-                    return;
-                }
-                res.writeHead(200);
-                res.end('Auth Successful');
-                // Parse incoming request URL
-                const query = new url_1.URL(req.url, this.redirectURI).searchParams;
-                // Check to see if the state matches
-                if (query.get('state') !== this.state) {
-                    reject('invalid state value');
-                    return;
-                }
-                const authCode = query.get('code');
-                // Force-close any open connections to the server so we can get a
-                // clean shutdown
-                for (const socket of sockets) {
-                    socket.destroy();
-                    sockets.delete(socket);
-                }
-                // Return auth code once we've shutdown server
-                server.close(() => {
-                    if (!authCode) {
-                        reject('authorization code not found');
-                    }
-                    else {
-                        resolve(authCode);
-                    }
-                });
-            });
-        });
-        try {
-            // Open browser to start authorization request
-            const authBaseURL = await this.issuer.authEndpoint();
-            const authURL = this.getAuthRequestURL(authBaseURL);
-            await this.openURL(authURL);
-        }
-        catch (err) {
-            // Prevent leaked server handler on error
-            server.close();
-            throw err;
-        }
-        return result;
-    }
-    // Uses the provided authorization code, to retrieve the ID token from the
-    // provider
-    async getIDToken(authCode) {
-        (0, assert_1.default)(this.redirectURI);
-        const tokenEndpointURL = await this.issuer.tokenEndpoint();
-        const params = new url_1.URLSearchParams();
-        params.append('grant_type', 'authorization_code');
-        params.append('code', authCode);
-        params.append('redirect_uri', this.redirectURI);
-        params.append('code_verifier', this.codeVerifier);
-        const response = await (0, make_fetch_happen_1.default)(tokenEndpointURL, {
-            method: 'POST',
-            headers: { Authorization: `Basic ${this.getBasicAuthHeaderValue()}` },
-            body: params,
-        }).then((r) => r.json());
-        return response.id_token;
-    }
-    // Construct the basic auth header value from the client ID and secret
-    getBasicAuthHeaderValue() {
-        return util_1.encoding.base64Encode(`${this.clientID}:${this.clientSecret}`);
-    }
-    // Generate starting URL for authorization request
-    getAuthRequestURL(baseURL) {
-        const params = this.getAuthRequestParams();
-        return `${baseURL}?${params.toString()}`;
-    }
-    // Collect parameters for authorization request
-    getAuthRequestParams() {
-        (0, assert_1.default)(this.redirectURI);
-        const codeChallenge = this.getCodeChallenge();
-        return new url_1.URLSearchParams({
-            response_type: 'code',
-            client_id: this.clientID,
-            client_secret: this.clientSecret,
-            scope: 'openid email',
-            redirect_uri: this.redirectURI,
-            code_challenge: codeChallenge,
-            code_challenge_method: 'S256',
-            state: this.state,
-            nonce: generateRandomString(16),
-        });
-    }
-    // Generate code challenge for authorization request
-    getCodeChallenge() {
-        return util_1.encoding.base64URLEscape(util_1.crypto.hash(this.codeVerifier).toString('base64'));
-    }
-    // Open the supplied URL in the user's default browser
-    async openURL(url) {
-        return new Promise((resolve, reject) => {
-            let open = null;
-            let command = `"${url}"`;
-            switch (process.platform) {
-                case 'darwin':
-                    open = 'open';
-                    break;
-                case 'linux' || 'freebsd' || 'netbsd' || 'openbsd':
-                    open = 'xdg-open';
-                    break;
-                case 'win32':
-                    open = 'start';
-                    command = `"" ${command}`;
-                    break;
-                default:
-                    return reject(`OAuth: unsupported platform: ${process.platform}`);
-            }
-            console.error(`Your browser will now be opened to: ${url}`);
-            child_process_1.default.exec(`${open} ${command}`, undefined, (err) => {
-                if (err) {
-                    reject(err);
-                }
-                else {
-                    resolve();
-                }
-            });
-        });
-    }
-}
-exports.OAuthProvider = OAuthProvider;
-// Generate random code verifier value
-function generateRandomString(len) {
-    return util_1.encoding.base64URLEscape(util_1.crypto.randomBytes(len).toString('base64'));
-}
diff --git a/node_modules/sigstore/dist/identity/provider.js b/node_modules/sigstore/dist/identity/provider.js
deleted file mode 100644
index c8ad2e549bdc6..0000000000000
--- a/node_modules/sigstore/dist/identity/provider.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/sigstore/dist/index.js b/node_modules/sigstore/dist/index.js
index 126fce58e45bd..d281e5b8d2ab7 100644
--- a/node_modules/sigstore/dist/index.js
+++ b/node_modules/sigstore/dist/index.js
@@ -1,27 +1,35 @@
 "use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.sigstore = void 0;
-exports.sigstore = __importStar(require("./sigstore"));
+exports.verify = exports.sign = exports.createVerifier = exports.attest = exports.VerificationError = exports.PolicyError = exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = exports.TUFError = exports.InternalError = exports.ValidationError = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var bundle_1 = require("@sigstore/bundle");
+Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return bundle_1.ValidationError; } });
+var sign_1 = require("@sigstore/sign");
+Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return sign_1.InternalError; } });
+var tuf_1 = require("@sigstore/tuf");
+Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return tuf_1.TUFError; } });
+var config_1 = require("./config");
+Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return config_1.DEFAULT_FULCIO_URL; } });
+Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return config_1.DEFAULT_REKOR_URL; } });
+var error_1 = require("./error");
+Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return error_1.PolicyError; } });
+Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return error_1.VerificationError; } });
+var sigstore_1 = require("./sigstore");
+Object.defineProperty(exports, "attest", { enumerable: true, get: function () { return sigstore_1.attest; } });
+Object.defineProperty(exports, "createVerifier", { enumerable: true, get: function () { return sigstore_1.createVerifier; } });
+Object.defineProperty(exports, "sign", { enumerable: true, get: function () { return sigstore_1.sign; } });
+Object.defineProperty(exports, "verify", { enumerable: true, get: function () { return sigstore_1.verify; } });
diff --git a/node_modules/sigstore/dist/sign.js b/node_modules/sigstore/dist/sign.js
deleted file mode 100644
index 96e6272750b49..0000000000000
--- a/node_modules/sigstore/dist/sign.js
+++ /dev/null
@@ -1,120 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Signer = void 0;
-const sigstore = __importStar(require("./types/sigstore"));
-const util_1 = require("./util");
-class Signer {
-    constructor(options) {
-        this.identityProviders = [];
-        this.ca = options.ca;
-        this.tlog = options.tlog;
-        this.tsa = options.tsa;
-        this.identityProviders = options.identityProviders;
-        this.tlogUpload = options.tlogUpload ?? true;
-        this.signer = options.signer || this.signWithEphemeralKey.bind(this);
-    }
-    async signBlob(payload) {
-        // Get signature and verification material for payload
-        const sigMaterial = await this.signer(payload);
-        // Calculate artifact digest
-        const digest = util_1.crypto.hash(payload);
-        // Create a Rekor entry (if tlogUpload is enabled)
-        const entry = this.tlogUpload
-            ? await this.tlog.createMessageSignatureEntry(digest, sigMaterial)
-            : undefined;
-        return sigstore.toMessageSignatureBundle({
-            digest,
-            signature: sigMaterial,
-            tlogEntry: entry,
-            timestamp: this.tsa
-                ? await this.tsa.createTimestamp(sigMaterial.signature)
-                : undefined,
-        });
-    }
-    async signAttestation(payload, payloadType) {
-        // Pre-authentication encoding to be signed
-        const paeBuffer = util_1.dsse.preAuthEncoding(payloadType, payload);
-        // Get signature and verification material for pae
-        const sigMaterial = await this.signer(paeBuffer);
-        const envelope = {
-            payloadType,
-            payload: payload,
-            signatures: [
-                {
-                    keyid: sigMaterial.key?.id || '',
-                    sig: sigMaterial.signature,
-                },
-            ],
-        };
-        // Create a Rekor entry (if tlogUpload is enabled)
-        const entry = this.tlogUpload
-            ? await this.tlog.createDSSEEntry(envelope, sigMaterial)
-            : undefined;
-        return sigstore.toDSSEBundle({
-            envelope,
-            signature: sigMaterial,
-            tlogEntry: entry,
-            timestamp: this.tsa
-                ? await this.tsa.createTimestamp(sigMaterial.signature)
-                : undefined,
-        });
-    }
-    async signWithEphemeralKey(payload) {
-        // Create emphemeral key pair
-        const keypair = util_1.crypto.generateKeyPair();
-        // Retrieve identity token from one of the supplied identity providers
-        const identityToken = await this.getIdentityToken();
-        // Extract challenge claim from OIDC token
-        const subject = util_1.oidc.extractJWTSubject(identityToken);
-        // Construct challenge value by encrypting subject with private key
-        const challenge = util_1.crypto.signBlob(Buffer.from(subject), keypair.privateKey);
-        // Create signing certificate
-        const certificates = await this.ca.createSigningCertificate(identityToken, keypair.publicKey, challenge);
-        // Generate artifact signature
-        const signature = util_1.crypto.signBlob(payload, keypair.privateKey);
-        return {
-            signature,
-            certificates,
-            key: undefined,
-        };
-    }
-    async getIdentityToken() {
-        const aggErrs = [];
-        for (const provider of this.identityProviders) {
-            try {
-                const token = await provider.getToken();
-                if (token) {
-                    return token;
-                }
-            }
-            catch (err) {
-                aggErrs.push(err);
-            }
-        }
-        throw new Error(`Identity token providers failed: ${aggErrs}`);
-    }
-}
-exports.Signer = Signer;
diff --git a/node_modules/sigstore/dist/sigstore-utils.js b/node_modules/sigstore/dist/sigstore-utils.js
deleted file mode 100644
index dc75692f40bf0..0000000000000
--- a/node_modules/sigstore/dist/sigstore-utils.js
+++ /dev/null
@@ -1,80 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.createRekorEntry = exports.createDSSEEnvelope = void 0;
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const config_1 = require("./config");
-const signature_1 = require("./types/signature");
-const sigstore = __importStar(require("./types/sigstore"));
-const util_1 = require("./util");
-async function createDSSEEnvelope(payload, payloadType, options) {
-    // Pre-authentication encoding to be signed
-    const paeBuffer = util_1.dsse.preAuthEncoding(payloadType, payload);
-    // Get signature and verification material for pae
-    const sigMaterial = await options.signer(paeBuffer);
-    const envelope = {
-        payloadType,
-        payload,
-        signatures: [
-            {
-                keyid: sigMaterial.key?.id || '',
-                sig: sigMaterial.signature,
-            },
-        ],
-    };
-    return sigstore.Envelope.toJSON(envelope);
-}
-exports.createDSSEEnvelope = createDSSEEnvelope;
-// Accepts a signed DSSE envelope and a PEM-encoded public key to be added to the
-// transparency log. Returns a Sigstore bundle suitable for offline verification.
-async function createRekorEntry(dsseEnvelope, publicKey, options = {}) {
-    const envelope = sigstore.Envelope.fromJSON(dsseEnvelope);
-    const tlog = (0, config_1.createTLogClient)(options);
-    const sigMaterial = (0, signature_1.extractSignatureMaterial)(envelope, publicKey);
-    const entry = await tlog.createDSSEEntry(envelope, sigMaterial, {
-        fetchOnConflict: true,
-    });
-    const bundle = sigstore.toDSSEBundle({
-        envelope,
-        signature: sigMaterial,
-        tlogEntry: entry,
-    });
-    return sigstore.bundleToJSON(bundle);
-}
-exports.createRekorEntry = createRekorEntry;
diff --git a/node_modules/sigstore/dist/sigstore.js b/node_modules/sigstore/dist/sigstore.js
index dca476dd29203..72770e4359f9e 100644
--- a/node_modules/sigstore/dist/sigstore.js
+++ b/node_modules/sigstore/dist/sigstore.js
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
     return result;
 };
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = exports.tuf = exports.utils = exports.VerificationError = exports.ValidationError = exports.PolicyError = exports.InternalError = exports.createVerifier = exports.verify = exports.attest = exports.sign = void 0;
+exports.createVerifier = exports.verify = exports.attest = exports.sign = void 0;
 /*
 Copyright 2023 The Sigstore Authors.
 
@@ -39,60 +39,40 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 */
+const bundle_1 = require("@sigstore/bundle");
 const tuf = __importStar(require("@sigstore/tuf"));
 const config = __importStar(require("./config"));
-const sign_1 = require("./sign");
-const sigstore = __importStar(require("./types/sigstore"));
 const verify_1 = require("./verify");
-async function sign(payload, options = {}) {
-    const ca = config.createCAClient(options);
-    const tlog = config.createTLogClient(options);
-    const idps = config.identityProviders(options);
-    const signer = new sign_1.Signer({
-        ca,
-        tlog,
-        identityProviders: options.identityProvider
-            ? [options.identityProvider]
-            : idps,
-        tlogUpload: options.tlogUpload,
-    });
-    const bundle = await signer.signBlob(payload);
-    return sigstore.bundleToJSON(bundle);
+async function sign(payload, 
+/* istanbul ignore next */
+options = {}) {
+    const bundler = config.createBundleBuilder('messageSignature', options);
+    const bundle = await bundler.create({ data: payload });
+    return (0, bundle_1.bundleToJSON)(bundle);
 }
 exports.sign = sign;
-async function attest(payload, payloadType, options = {}) {
-    const ca = config.createCAClient(options);
-    const tlog = config.createTLogClient(options);
-    const tsa = config.createTSAClient(options);
-    const idps = config.identityProviders(options);
-    const signer = new sign_1.Signer({
-        ca,
-        tlog,
-        tsa,
-        identityProviders: options.identityProvider
-            ? [options.identityProvider]
-            : idps,
-        tlogUpload: options.tlogUpload,
-    });
-    const bundle = await signer.signAttestation(payload, payloadType);
-    return sigstore.bundleToJSON(bundle);
+async function attest(payload, payloadType, 
+/* istanbul ignore next */
+options = {}) {
+    const bundler = config.createBundleBuilder('dsseEnvelope', options);
+    const bundle = await bundler.create({ data: payload, type: payloadType });
+    return (0, bundle_1.bundleToJSON)(bundle);
 }
 exports.attest = attest;
-async function verify(bundle, payload, options = {}) {
-    const trustedRoot = await tuf.getTrustedRoot({
-        mirrorURL: options.tufMirrorURL,
-        rootPath: options.tufRootPath,
-        cachePath: options.tufCachePath,
-        retry: options.retry ?? config.DEFAULT_RETRY,
-        timeout: options.timeout ?? config.DEFAULT_TIMEOUT,
-    });
-    const verifier = new verify_1.Verifier(trustedRoot, options.keySelector);
-    const deserializedBundle = sigstore.bundleFromJSON(bundle);
-    const opts = config.artifactVerificationOptions(options);
-    return verifier.verify(deserializedBundle, opts, payload);
+async function verify(bundle, dataOrOptions, options) {
+    let data;
+    if (Buffer.isBuffer(dataOrOptions)) {
+        data = dataOrOptions;
+    }
+    else {
+        options = dataOrOptions;
+    }
+    return createVerifier(options).then((verifier) => verifier.verify(bundle, data));
 }
 exports.verify = verify;
-async function createVerifier(options) {
+async function createVerifier(
+/* istanbul ignore next */
+options = {}) {
     const trustedRoot = await tuf.getTrustedRoot({
         mirrorURL: options.tufMirrorURL,
         rootPath: options.tufRootPath,
@@ -103,44 +83,10 @@ async function createVerifier(options) {
     const verifier = new verify_1.Verifier(trustedRoot, options.keySelector);
     const verifyOpts = config.artifactVerificationOptions(options);
     return {
-        verify: (bundle) => {
-            const deserializedBundle = sigstore.bundleFromJSON(bundle);
-            return verifier.verify(deserializedBundle, verifyOpts);
+        verify: (bundle, payload) => {
+            const deserializedBundle = (0, bundle_1.bundleFromJSON)(bundle);
+            return verifier.verify(deserializedBundle, verifyOpts, payload);
         },
     };
 }
 exports.createVerifier = createVerifier;
-const tufUtils = {
-    client: (options = {}) => {
-        return tuf.initTUF({
-            mirrorURL: options.tufMirrorURL,
-            rootPath: options.tufRootPath,
-            cachePath: options.tufCachePath,
-            retry: options.retry,
-            timeout: options.timeout,
-        });
-    },
-    /*
-     * @deprecated Use tufUtils.client instead.
-     */
-    getTarget: (path, options = {}) => {
-        return tuf
-            .initTUF({
-            mirrorURL: options.tufMirrorURL,
-            rootPath: options.tufRootPath,
-            cachePath: options.tufCachePath,
-            retry: options.retry,
-            timeout: options.timeout,
-        })
-            .then((t) => t.getTarget(path));
-    },
-};
-exports.tuf = tufUtils;
-var error_1 = require("./error");
-Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return error_1.InternalError; } });
-Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return error_1.PolicyError; } });
-Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return error_1.ValidationError; } });
-Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return error_1.VerificationError; } });
-exports.utils = __importStar(require("./sigstore-utils"));
-exports.DEFAULT_FULCIO_URL = config.DEFAULT_FULCIO_URL;
-exports.DEFAULT_REKOR_URL = config.DEFAULT_REKOR_URL;
diff --git a/node_modules/sigstore/dist/tlog/format.js b/node_modules/sigstore/dist/tlog/format.js
deleted file mode 100644
index b0eae95098af0..0000000000000
--- a/node_modules/sigstore/dist/tlog/format.js
+++ /dev/null
@@ -1,134 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.toProposedIntotoEntry = exports.toProposedHashedRekordEntry = exports.toProposedDSSEEntry = void 0;
-const sigstore_1 = require("../types/sigstore");
-const util_1 = require("../util");
-const DEFAULT_DSSE_API_VERSION = '0.0.1';
-const DEFAULT_HASHEDREKORD_API_VERSION = '0.0.1';
-const DEFAULT_INTOTO_API_VERSION = '0.0.2';
-// Returns a properly formatted Rekor "dsse" entry for the given DSSE
-// envelope and signature
-function toProposedDSSEEntry(envelope, signature, apiVersion = DEFAULT_DSSE_API_VERSION) {
-    switch (apiVersion) {
-        case '0.0.1':
-            return toProposedDSSEV001Entry(envelope, signature);
-        default:
-            throw new Error(`Unsupported dsse kind API version: ${apiVersion}`);
-    }
-}
-exports.toProposedDSSEEntry = toProposedDSSEEntry;
-// Returns a properly formatted Rekor "hashedrekord" entry for the given digest
-// and signature
-function toProposedHashedRekordEntry(digest, signature) {
-    const hexDigest = digest.toString('hex');
-    const b64Signature = signature.signature.toString('base64');
-    const b64Key = util_1.encoding.base64Encode(toPublicKey(signature));
-    return {
-        apiVersion: DEFAULT_HASHEDREKORD_API_VERSION,
-        kind: 'hashedrekord',
-        spec: {
-            data: {
-                hash: {
-                    algorithm: 'sha256',
-                    value: hexDigest,
-                },
-            },
-            signature: {
-                content: b64Signature,
-                publicKey: {
-                    content: b64Key,
-                },
-            },
-        },
-    };
-}
-exports.toProposedHashedRekordEntry = toProposedHashedRekordEntry;
-// Returns a properly formatted Rekor "intoto" entry for the given DSSE
-// envelope and signature
-function toProposedIntotoEntry(envelope, signature, apiVersion = DEFAULT_INTOTO_API_VERSION) {
-    switch (apiVersion) {
-        case '0.0.2':
-            return toProposedIntotoV002Entry(envelope, signature);
-        default:
-            throw new Error(`Unsupported intoto kind API version: ${apiVersion}`);
-    }
-}
-exports.toProposedIntotoEntry = toProposedIntotoEntry;
-function toProposedDSSEV001Entry(envelope, signature) {
-    return {
-        apiVersion: '0.0.1',
-        kind: 'dsse',
-        spec: {
-            proposedContent: {
-                envelope: JSON.stringify(sigstore_1.Envelope.toJSON(envelope)),
-                verifiers: [util_1.encoding.base64Encode(toPublicKey(signature))],
-            },
-        },
-    };
-}
-function toProposedIntotoV002Entry(envelope, signature) {
-    // Calculate the value for the payloadHash field in the Rekor entry
-    const payloadHash = util_1.crypto.hash(envelope.payload).toString('hex');
-    // Calculate the value for the hash field in the Rekor entry
-    const envelopeHash = calculateDSSEHash(envelope, signature);
-    // Collect values for re-creating the DSSE envelope.
-    // Double-encode payload and signature cause that's what Rekor expects
-    const payload = util_1.encoding.base64Encode(envelope.payload.toString('base64'));
-    const sig = util_1.encoding.base64Encode(envelope.signatures[0].sig.toString('base64'));
-    const keyid = envelope.signatures[0].keyid;
-    const publicKey = util_1.encoding.base64Encode(toPublicKey(signature));
-    // Create the envelope portion of the entry. Note the inclusion of the
-    // publicKey in the signature struct is not a standard part of a DSSE
-    // envelope, but is required by Rekor.
-    const dsseEnv = {
-        payloadType: envelope.payloadType,
-        payload: payload,
-        signatures: [{ sig, publicKey }],
-    };
-    // If the keyid is an empty string, Rekor seems to remove it altogether. We
-    // need to do the same here so that we can properly recreate the entry for
-    // verification.
-    if (keyid.length > 0) {
-        dsseEnv.signatures[0].keyid = keyid;
-    }
-    return {
-        apiVersion: '0.0.2',
-        kind: 'intoto',
-        spec: {
-            content: {
-                envelope: dsseEnv,
-                hash: { algorithm: 'sha256', value: envelopeHash },
-                payloadHash: { algorithm: 'sha256', value: payloadHash },
-            },
-        },
-    };
-}
-// Calculates the hash of a DSSE envelope for inclusion in a Rekor entry.
-// There is no standard way to do this, so the scheme we're using as as
-// follows:
-//  * payload is base64 encoded
-//  * signature is base64 encoded (only the first signature is used)
-//  * keyid is included ONLY if it is NOT an empty string
-//  * The resulting JSON is canonicalized and hashed to a hex string
-function calculateDSSEHash(envelope, signature) {
-    const dsseEnv = {
-        payloadType: envelope.payloadType,
-        payload: envelope.payload.toString('base64'),
-        signatures: [
-            {
-                sig: envelope.signatures[0].sig.toString('base64'),
-                publicKey: toPublicKey(signature),
-            },
-        ],
-    };
-    // If the keyid is an empty string, Rekor seems to remove it altogether.
-    if (envelope.signatures[0].keyid.length > 0) {
-        dsseEnv.signatures[0].keyid = envelope.signatures[0].keyid;
-    }
-    return util_1.crypto.hash(util_1.json.canonicalize(dsseEnv)).toString('hex');
-}
-function toPublicKey(signature) {
-    return signature.certificates
-        ? signature.certificates[0]
-        : signature.key.value;
-}
diff --git a/node_modules/sigstore/dist/tlog/index.js b/node_modules/sigstore/dist/tlog/index.js
deleted file mode 100644
index 7f5f531983b37..0000000000000
--- a/node_modules/sigstore/dist/tlog/index.js
+++ /dev/null
@@ -1,75 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TLogClient = void 0;
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const error_1 = require("../error");
-const external_1 = require("../external");
-const format_1 = require("./format");
-class TLogClient {
-    constructor(options) {
-        this.rekor = new external_1.Rekor({
-            baseURL: options.rekorBaseURL,
-            retry: options.retry,
-            timeout: options.timeout,
-        });
-    }
-    async createMessageSignatureEntry(digest, sigMaterial, options = {}) {
-        const proposedEntry = (0, format_1.toProposedHashedRekordEntry)(digest, sigMaterial);
-        return this.createEntry(proposedEntry, options.fetchOnConflict);
-    }
-    async createDSSEEntry(envelope, sigMaterial, options = {}) {
-        const proposedEntry = (0, format_1.toProposedIntotoEntry)(envelope, sigMaterial);
-        return this.createEntry(proposedEntry, options.fetchOnConflict);
-    }
-    async createEntry(proposedEntry, fetchOnConflict = false) {
-        let entry;
-        try {
-            entry = await this.rekor.createEntry(proposedEntry);
-        }
-        catch (err) {
-            // If the entry already exists, fetch it (if enabled)
-            if (entryExistsError(err) && fetchOnConflict) {
-                // Grab the UUID of the existing entry from the location header
-                const uuid = err.location.split('/').pop() || '';
-                try {
-                    entry = await this.rekor.getEntry(uuid);
-                }
-                catch (err) {
-                    throw new error_1.InternalError({
-                        code: 'TLOG_FETCH_ENTRY_ERROR',
-                        message: 'error fetching tlog entry',
-                        cause: err,
-                    });
-                }
-            }
-            else {
-                throw new error_1.InternalError({
-                    code: 'TLOG_CREATE_ENTRY_ERROR',
-                    message: 'error creating tlog entry',
-                    cause: err,
-                });
-            }
-        }
-        return entry;
-    }
-}
-exports.TLogClient = TLogClient;
-function entryExistsError(value) {
-    return (value instanceof external_1.HTTPError &&
-        value.statusCode === 409 &&
-        value.location !== undefined);
-}
diff --git a/node_modules/sigstore/dist/tlog/verify/checkpoint.js b/node_modules/sigstore/dist/tlog/verify/checkpoint.js
new file mode 100644
index 0000000000000..f6f35a5cad64d
--- /dev/null
+++ b/node_modules/sigstore/dist/tlog/verify/checkpoint.js
@@ -0,0 +1,148 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyCheckpoint = void 0;
+const error_1 = require("../../error");
+const util_1 = require("../../util");
+// Separator between the note and the signatures in a checkpoint
+const CHECKPOINT_SEPARATOR = '\n\n';
+// Checkpoint signatures are of the following form:
+// "–  \n"
+// where:
+// - the prefix is an emdash (U+2014).
+// -  gives a human-readable representation of the signing ID.
+// -  is the first 4 bytes of the SHA256 hash of the
+//   associated public key followed by the signature bytes.
+const SIGNATURE_REGEX = /\u2014 (\S+) (\S+)\n/g;
+// Verifies the checkpoint value in the given tlog entry. There are two steps
+// to the verification:
+// 1. Verify that all signatures in the checkpoint can be verified against a
+//    trusted public key
+// 2. Verify that the root hash in the checkpoint matches the root hash in the
+//    inclusion proof
+// See: https://github.com/transparency-dev/formats/blob/main/log/README.md
+function verifyCheckpoint(entry, tlogs) {
+    // Filter tlog instances to just those which were valid at the time of the
+    // entry
+    const validTLogs = filterTLogInstances(tlogs, entry.integratedTime);
+    const inclusionProof = entry.inclusionProof;
+    const signedNote = SignedNote.fromString(inclusionProof.checkpoint.envelope);
+    const checkpoint = LogCheckpoint.fromString(signedNote.note);
+    // Verify that the signatures in the checkpoint are all valid, also check
+    // that the root hash from the checkpoint matches the root hash in the
+    // inclusion proof
+    return (signedNote.verify(validTLogs) &&
+        util_1.crypto.bufferEqual(checkpoint.logHash, inclusionProof.rootHash));
+}
+exports.verifyCheckpoint = verifyCheckpoint;
+// SignedNote represents a signed note from a transparency log checkpoint. Consists
+// of a body (or note) and one more signatures calculated over the body. See
+// https://github.com/transparency-dev/formats/blob/main/log/README.md#signed-envelope
+class SignedNote {
+    constructor(note, signatures) {
+        this.note = note;
+        this.signatures = signatures;
+    }
+    // Deserialize a SignedNote from a string
+    static fromString(envelope) {
+        if (!envelope.includes(CHECKPOINT_SEPARATOR)) {
+            throw new error_1.VerificationError('malformed checkpoint: no separator');
+        }
+        // Split the note into the header and the data portions at the separator
+        const split = envelope.indexOf(CHECKPOINT_SEPARATOR);
+        const header = envelope.slice(0, split + 1);
+        const data = envelope.slice(split + CHECKPOINT_SEPARATOR.length);
+        // Find all the signature lines in the data portion
+        const matches = data.matchAll(SIGNATURE_REGEX);
+        // Parse each of the matched signature lines into the name and signature.
+        // The first four bytes of the signature are the key hint (should match the
+        // first four bytes of the log ID), and the rest is the signature itself.
+        const signatures = Array.from(matches, (match) => {
+            const [, name, signature] = match;
+            const sigBytes = Buffer.from(signature, 'base64');
+            if (sigBytes.length < 5) {
+                throw new error_1.VerificationError('malformed checkpoint: invalid signature');
+            }
+            return {
+                name,
+                keyHint: sigBytes.subarray(0, 4),
+                signature: sigBytes.subarray(4),
+            };
+        });
+        if (signatures.length === 0) {
+            throw new error_1.VerificationError('malformed checkpoint: no signatures');
+        }
+        return new SignedNote(header, signatures);
+    }
+    // Verifies the signatures in the SignedNote. For each signature, the
+    // corresponding transparency log is looked up by the key hint and the
+    // signature is verified against the public key in the transparency log.
+    // Throws an error if any of the signatures are invalid.
+    verify(tlogs) {
+        const data = Buffer.from(this.note, 'utf-8');
+        return this.signatures.every((signature) => {
+            // Find the transparency log instance with the matching key hint
+            const tlog = tlogs.find((tlog) => util_1.crypto.bufferEqual(tlog.logId.keyId.subarray(0, 4), signature.keyHint));
+            if (!tlog) {
+                return false;
+            }
+            const publicKey = util_1.crypto.createPublicKey(tlog.publicKey.rawBytes);
+            return util_1.crypto.verifyBlob(data, publicKey, signature.signature);
+        });
+    }
+}
+// LogCheckpoint represents a transparency log checkpoint. Consists of the
+// following:
+//  - origin: the name of the transparency log
+//  - logSize: the size of the log at the time of the checkpoint
+//  - logHash: the root hash of the log at the time of the checkpoint
+//  - rest: the rest of the checkpoint body, which is a list of log entries
+// See:
+// https://github.com/transparency-dev/formats/blob/main/log/README.md#checkpoint-body
+class LogCheckpoint {
+    constructor(origin, logSize, logHash, rest) {
+        this.origin = origin;
+        this.logSize = logSize;
+        this.logHash = logHash;
+        this.rest = rest;
+    }
+    static fromString(note) {
+        const lines = note.trim().split('\n');
+        if (lines.length < 4) {
+            throw new error_1.VerificationError('malformed checkpoint: too few lines in header');
+        }
+        const origin = lines[0];
+        const logSize = BigInt(lines[1]);
+        const rootHash = Buffer.from(lines[2], 'base64');
+        const rest = lines.slice(3);
+        return new LogCheckpoint(origin, logSize, rootHash, rest);
+    }
+}
+// Filter the list of tlog instances to only those which have usable public
+// keys and were valid at the given time.
+function filterTLogInstances(tlogInstances, integratedTime) {
+    const targetDate = new Date(Number(integratedTime) * 1000);
+    return tlogInstances.filter((tlog) => {
+        // Must have a log ID
+        if (!tlog.logId) {
+            return false;
+        }
+        // If the tlog doesn't have a public key, we can't use it
+        const publicKey = tlog.publicKey;
+        if (publicKey === undefined) {
+            return false;
+        }
+        // If the tlog doesn't have a rawBytes field, we can't use it
+        if (publicKey.rawBytes === undefined) {
+            return false;
+        }
+        // If the tlog doesn't have a validFor field, we don't need to check it
+        const validFor = publicKey.validFor;
+        if (validFor === undefined) {
+            return true;
+        }
+        // Check that the integrated time is within the validFor range
+        return (validFor.start !== undefined &&
+            validFor.start <= targetDate &&
+            (validFor.end === undefined || targetDate <= validFor.end));
+    });
+}
diff --git a/node_modules/sigstore/dist/tlog/verify/index.js b/node_modules/sigstore/dist/tlog/verify/index.js
index cbb93133c2685..9224feffde00b 100644
--- a/node_modules/sigstore/dist/tlog/verify/index.js
+++ b/node_modules/sigstore/dist/tlog/verify/index.js
@@ -1,27 +1,4 @@
 "use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
 Object.defineProperty(exports, "__esModule", { value: true });
 exports.verifyTLogEntries = void 0;
 /*
@@ -39,31 +16,51 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 */
+const bundle_1 = require("@sigstore/bundle");
 const error_1 = require("../../error");
-const sigstore = __importStar(require("../../types/sigstore"));
 const cert_1 = require("../../x509/cert");
 const body_1 = require("./body");
+const checkpoint_1 = require("./checkpoint");
+const merkle_1 = require("./merkle");
 const set_1 = require("./set");
 // Verifies that the number of tlog entries that pass offline verification
 // is greater than or equal to the threshold specified in the options.
 function verifyTLogEntries(bundle, trustedRoot, options) {
+    if (bundle.mediaType === bundle_1.BUNDLE_V01_MEDIA_TYPE) {
+        (0, bundle_1.assertBundleV01)(bundle);
+        verifyTLogEntriesForBundleV01(bundle, trustedRoot, options);
+    }
+    else {
+        (0, bundle_1.assertBundleLatest)(bundle);
+        verifyTLogEntriesForBundleLatest(bundle, trustedRoot, options);
+    }
+}
+exports.verifyTLogEntries = verifyTLogEntries;
+function verifyTLogEntriesForBundleV01(bundle, trustedRoot, options) {
     if (options.performOnlineVerification) {
         throw new error_1.VerificationError('Online verification not implemented');
     }
     // Extract the signing cert, if available
     const signingCert = signingCertificate(bundle);
     // Iterate over the tlog entries and verify each one
-    const verifiedEntries = bundle.verificationMaterial.tlogEntries.filter((entry) => verifyTLogEntryOffline(entry, bundle.content, trustedRoot.tlogs, signingCert));
+    const verifiedEntries = bundle.verificationMaterial.tlogEntries.filter((entry) => verifyTLogEntryWithInclusionPromise(entry, bundle.content, trustedRoot.tlogs, signingCert));
     if (verifiedEntries.length < options.threshold) {
         throw new error_1.VerificationError('tlog verification failed');
     }
 }
-exports.verifyTLogEntries = verifyTLogEntries;
-function verifyTLogEntryOffline(entry, bundleContent, tlogs, signingCert) {
-    // Check that the TLog entry has the fields necessary for verification
-    if (!sigstore.isVerifiableTransparencyLogEntry(entry)) {
-        return false;
+function verifyTLogEntriesForBundleLatest(bundle, trustedRoot, options) {
+    if (options.performOnlineVerification) {
+        throw new error_1.VerificationError('Online verification not implemented');
+    }
+    // Extract the signing cert, if available
+    const signingCert = signingCertificate(bundle);
+    // Iterate over the tlog entries and verify each one
+    const verifiedEntries = bundle.verificationMaterial.tlogEntries.filter((entry) => verifyTLogEntryWithInclusionProof(entry, bundle.content, trustedRoot.tlogs, signingCert));
+    if (verifiedEntries.length < options.threshold) {
+        throw new error_1.VerificationError('tlog verification failed');
     }
+}
+function verifyTLogEntryWithInclusionPromise(entry, bundleContent, tlogs, signingCert) {
     // If there is a signing certificate availble, check that the tlog integrated
     // time is within the certificate's validity period; otherwise, skip this
     // check.
@@ -74,8 +71,20 @@ function verifyTLogEntryOffline(entry, bundleContent, tlogs, signingCert) {
         (0, set_1.verifyTLogSET)(entry, tlogs) &&
         verifyTLogIntegrationTime());
 }
+function verifyTLogEntryWithInclusionProof(entry, bundleContent, tlogs, signingCert) {
+    // If there is a signing certificate availble, check that the tlog integrated
+    // time is within the certificate's validity period; otherwise, skip this
+    // check.
+    const verifyTLogIntegrationTime = signingCert
+        ? () => signingCert.validForDate(new Date(Number(entry.integratedTime) * 1000))
+        : () => true;
+    return ((0, body_1.verifyTLogBody)(entry, bundleContent) &&
+        (0, merkle_1.verifyMerkleInclusion)(entry) &&
+        (0, checkpoint_1.verifyCheckpoint)(entry, tlogs) &&
+        verifyTLogIntegrationTime());
+}
 function signingCertificate(bundle) {
-    if (!sigstore.isBundleWithCertificateChain(bundle)) {
+    if (!(0, bundle_1.isBundleWithCertificateChain)(bundle)) {
         return undefined;
     }
     const signingCert = bundle.verificationMaterial.content.x509CertificateChain.certificates[0];
diff --git a/node_modules/sigstore/dist/tlog/verify/merkle.js b/node_modules/sigstore/dist/tlog/verify/merkle.js
index 90609cb73576f..0f246af4a28a3 100644
--- a/node_modules/sigstore/dist/tlog/verify/merkle.js
+++ b/node_modules/sigstore/dist/tlog/verify/merkle.js
@@ -25,9 +25,6 @@ const RFC6962_LEAF_HASH_PREFIX = Buffer.from([0x00]);
 const RFC6962_NODE_HASH_PREFIX = Buffer.from([0x01]);
 function verifyMerkleInclusion(entry) {
     const inclusionProof = entry.inclusionProof;
-    if (!inclusionProof) {
-        throw new error_1.VerificationError('tlog entry has no inclusion proof');
-    }
     const logIndex = BigInt(inclusionProof.logIndex);
     const treeSize = BigInt(inclusionProof.treeSize);
     if (logIndex < 0n || logIndex >= treeSize) {
@@ -76,13 +73,20 @@ function chainBorderRight(seed, hashes) {
     return hashes.reduce((acc, h) => hashChildren(h, acc), seed);
 }
 function innerProofSize(index, size) {
-    return (index ^ (size - BigInt(1))).toString(2).length;
+    return bitLength(index ^ (size - BigInt(1)));
 }
 // Counts the number of ones in the binary representation of the given number.
 // https://en.wikipedia.org/wiki/Hamming_weight
 function onesCount(x) {
     return x.toString(2).split('1').length - 1;
 }
+// Returns the number of bits necessary to represent an integer in binary.
+function bitLength(n) {
+    if (n === 0n) {
+        return 0;
+    }
+    return n.toString(2).length;
+}
 // Hashing logic according to RFC6962.
 // https://datatracker.ietf.org/doc/html/rfc6962#section-2
 function hashChildren(left, right) {
diff --git a/node_modules/sigstore/dist/tlog/verify/set.js b/node_modules/sigstore/dist/tlog/verify/set.js
index 89a544283d73d..959cd5883f1ca 100644
--- a/node_modules/sigstore/dist/tlog/verify/set.js
+++ b/node_modules/sigstore/dist/tlog/verify/set.js
@@ -11,9 +11,6 @@ function verifyTLogSET(entry, tlogs) {
     const validTLogs = filterTLogInstances(tlogs, entry.logId.keyId, entry.integratedTime);
     // Check to see if we can verify the SET against any of the valid tlogs
     return validTLogs.some((tlog) => {
-        if (!tlog.publicKey?.rawBytes) {
-            return false;
-        }
         const publicKey = util_1.crypto.createPublicKey(tlog.publicKey.rawBytes);
         // Re-create the original Rekor verification payload
         const payload = toVerificationPayload(entry);
@@ -60,7 +57,7 @@ function filterTLogInstances(tlogInstances, logID, integratedTime) {
             return true;
         }
         // Check that the integrated time is within the validFor range
-        return (publicKey.validFor.start &&
+        return (publicKey.validFor.start !== undefined &&
             publicKey.validFor.start <= targetDate &&
             (!publicKey.validFor.end || targetDate <= publicKey.validFor.end));
     });
diff --git a/node_modules/sigstore/dist/tsa/index.js b/node_modules/sigstore/dist/tsa/index.js
deleted file mode 100644
index 4951b24a93f4f..0000000000000
--- a/node_modules/sigstore/dist/tsa/index.js
+++ /dev/null
@@ -1,47 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TSAClient = void 0;
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const error_1 = require("../error");
-const external_1 = require("../external");
-const util_1 = require("../util");
-class TSAClient {
-    constructor(options) {
-        this.tsa = new external_1.TimestampAuthority({
-            baseURL: options.tsaBaseURL,
-            retry: options.retry,
-            timeout: options.timeout,
-        });
-    }
-    async createTimestamp(signature) {
-        const request = {
-            artifactHash: util_1.crypto.hash(signature).toString('base64'),
-            hashAlgorithm: 'sha256',
-        };
-        try {
-            return await this.tsa.createTimestamp(request);
-        }
-        catch (err) {
-            throw new error_1.InternalError({
-                code: 'TSA_CREATE_TIMESTAMP_ERROR',
-                message: 'error creating timestamp',
-                cause: err,
-            });
-        }
-    }
-}
-exports.TSAClient = TSAClient;
diff --git a/node_modules/sigstore/dist/types/signature.js b/node_modules/sigstore/dist/types/signature.js
deleted file mode 100644
index 339e2a2731b41..0000000000000
--- a/node_modules/sigstore/dist/types/signature.js
+++ /dev/null
@@ -1,15 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.extractSignatureMaterial = void 0;
-function extractSignatureMaterial(dsseEnvelope, publicKey) {
-    const signature = dsseEnvelope.signatures[0];
-    return {
-        signature: signature.sig,
-        key: {
-            id: signature.keyid,
-            value: publicKey,
-        },
-        certificates: undefined,
-    };
-}
-exports.extractSignatureMaterial = extractSignatureMaterial;
diff --git a/node_modules/sigstore/dist/types/sigstore.js b/node_modules/sigstore/dist/types/sigstore.js
new file mode 100644
index 0000000000000..36efb67e38a5e
--- /dev/null
+++ b/node_modules/sigstore/dist/types/sigstore.js
@@ -0,0 +1,27 @@
+"use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.isCAVerificationOptions = exports.SubjectAlternativeNameType = void 0;
+// Enums from protobuf-specs
+var protobuf_specs_1 = require("@sigstore/protobuf-specs");
+Object.defineProperty(exports, "SubjectAlternativeNameType", { enumerable: true, get: function () { return protobuf_specs_1.SubjectAlternativeNameType; } });
+function isCAVerificationOptions(options) {
+    return (options.ctlogOptions !== undefined &&
+        (options.signers === undefined ||
+            options.signers.$case === 'certificateIdentities'));
+}
+exports.isCAVerificationOptions = isCAVerificationOptions;
diff --git a/node_modules/sigstore/dist/types/sigstore/index.js b/node_modules/sigstore/dist/types/sigstore/index.js
deleted file mode 100644
index 2c240c865cf37..0000000000000
--- a/node_modules/sigstore/dist/types/sigstore/index.js
+++ /dev/null
@@ -1,162 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.toMessageSignatureBundle = exports.toDSSEBundle = exports.isVerifiableTransparencyLogEntry = exports.isCAVerificationOptions = exports.isBundleWithCertificateChain = exports.bundleToJSON = exports.bundleFromJSON = exports.SubjectAlternativeNameType = exports.PublicKeyDetails = exports.HashAlgorithm = exports.Envelope = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const protobuf_specs_1 = require("@sigstore/protobuf-specs");
-const util_1 = require("../../util");
-const validate_1 = require("./validate");
-// Enums from protobuf-specs
-// TODO: Move Envelope to "type" export once @sigstore/sign is a thing
-var protobuf_specs_2 = require("@sigstore/protobuf-specs");
-Object.defineProperty(exports, "Envelope", { enumerable: true, get: function () { return protobuf_specs_2.Envelope; } });
-Object.defineProperty(exports, "HashAlgorithm", { enumerable: true, get: function () { return protobuf_specs_2.HashAlgorithm; } });
-Object.defineProperty(exports, "PublicKeyDetails", { enumerable: true, get: function () { return protobuf_specs_2.PublicKeyDetails; } });
-Object.defineProperty(exports, "SubjectAlternativeNameType", { enumerable: true, get: function () { return protobuf_specs_2.SubjectAlternativeNameType; } });
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-const bundleFromJSON = (obj) => {
-    const bundle = protobuf_specs_1.Bundle.fromJSON(obj);
-    (0, validate_1.assertValidBundle)(bundle);
-    return bundle;
-};
-exports.bundleFromJSON = bundleFromJSON;
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-const bundleToJSON = (bundle) => {
-    return protobuf_specs_1.Bundle.toJSON(bundle);
-};
-exports.bundleToJSON = bundleToJSON;
-const BUNDLE_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1';
-// Type guard for narrowing a Bundle to a BundleWithCertificateChain
-function isBundleWithCertificateChain(bundle) {
-    return (bundle.verificationMaterial.content !== undefined &&
-        bundle.verificationMaterial.content.$case === 'x509CertificateChain');
-}
-exports.isBundleWithCertificateChain = isBundleWithCertificateChain;
-function isCAVerificationOptions(options) {
-    return (options.ctlogOptions !== undefined &&
-        (options.signers === undefined ||
-            options.signers.$case === 'certificateIdentities'));
-}
-exports.isCAVerificationOptions = isCAVerificationOptions;
-function isVerifiableTransparencyLogEntry(entry) {
-    return (entry.logId !== undefined &&
-        entry.inclusionPromise !== undefined &&
-        entry.kindVersion !== undefined);
-}
-exports.isVerifiableTransparencyLogEntry = isVerifiableTransparencyLogEntry;
-// All of the following functions are used to construct a ValidBundle
-// from various types of input. When this code moves into the
-// @sigstore/sign package, these functions will be exported from there.
-function toDSSEBundle({ envelope, signature, tlogEntry, timestamp, }) {
-    return {
-        mediaType: BUNDLE_MEDIA_TYPE,
-        content: { $case: 'dsseEnvelope', dsseEnvelope: envelope },
-        verificationMaterial: toVerificationMaterial({
-            signature,
-            tlogEntry,
-            timestamp,
-        }),
-    };
-}
-exports.toDSSEBundle = toDSSEBundle;
-function toMessageSignatureBundle({ digest, signature, tlogEntry, timestamp, }) {
-    return {
-        mediaType: BUNDLE_MEDIA_TYPE,
-        content: {
-            $case: 'messageSignature',
-            messageSignature: {
-                messageDigest: {
-                    algorithm: protobuf_specs_1.HashAlgorithm.SHA2_256,
-                    digest: digest,
-                },
-                signature: signature.signature,
-            },
-        },
-        verificationMaterial: toVerificationMaterial({
-            signature,
-            tlogEntry,
-            timestamp,
-        }),
-    };
-}
-exports.toMessageSignatureBundle = toMessageSignatureBundle;
-function toTransparencyLogEntry(entry) {
-    const b64SET = entry.verification?.signedEntryTimestamp || '';
-    const set = Buffer.from(b64SET, 'base64');
-    const logID = Buffer.from(entry.logID, 'hex');
-    const proof = entry.verification?.inclusionProof
-        ? toInclusionProof(entry.verification.inclusionProof)
-        : undefined;
-    // Parse entry body so we can extract the kind and version.
-    const bodyJSON = util_1.encoding.base64Decode(entry.body);
-    const entryBody = JSON.parse(bodyJSON);
-    return {
-        inclusionPromise: {
-            signedEntryTimestamp: set,
-        },
-        logIndex: entry.logIndex.toString(),
-        logId: {
-            keyId: logID,
-        },
-        integratedTime: entry.integratedTime.toString(),
-        kindVersion: {
-            kind: entryBody.kind,
-            version: entryBody.apiVersion,
-        },
-        inclusionProof: proof,
-        canonicalizedBody: Buffer.from(entry.body, 'base64'),
-    };
-}
-function toInclusionProof(proof) {
-    return {
-        logIndex: proof.logIndex.toString(),
-        rootHash: Buffer.from(proof.rootHash, 'hex'),
-        treeSize: proof.treeSize.toString(),
-        checkpoint: {
-            envelope: proof.checkpoint,
-        },
-        hashes: proof.hashes.map((h) => Buffer.from(h, 'hex')),
-    };
-}
-function toVerificationMaterial({ signature, tlogEntry, timestamp, }) {
-    return {
-        content: signature.certificates
-            ? toVerificationMaterialx509CertificateChain(signature.certificates)
-            : toVerificationMaterialPublicKey(signature.key.id || ''),
-        tlogEntries: tlogEntry ? [toTransparencyLogEntry(tlogEntry)] : [],
-        timestampVerificationData: timestamp
-            ? toTimestampVerificationData(timestamp)
-            : undefined,
-    };
-}
-function toVerificationMaterialx509CertificateChain(certificates) {
-    return {
-        $case: 'x509CertificateChain',
-        x509CertificateChain: {
-            certificates: certificates.map((c) => ({
-                rawBytes: util_1.pem.toDER(c),
-            })),
-        },
-    };
-}
-function toVerificationMaterialPublicKey(hint) {
-    return { $case: 'publicKey', publicKey: { hint } };
-}
-function toTimestampVerificationData(timestamp) {
-    return {
-        rfc3161Timestamps: [{ signedTimestamp: timestamp }],
-    };
-}
diff --git a/node_modules/sigstore/dist/types/sigstore/serialized.js b/node_modules/sigstore/dist/types/sigstore/serialized.js
deleted file mode 100644
index c8ad2e549bdc6..0000000000000
--- a/node_modules/sigstore/dist/types/sigstore/serialized.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/sigstore/dist/types/sigstore/validate.js b/node_modules/sigstore/dist/types/sigstore/validate.js
deleted file mode 100644
index a19d8ad3ec702..0000000000000
--- a/node_modules/sigstore/dist/types/sigstore/validate.js
+++ /dev/null
@@ -1,88 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.assertValidBundle = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const error_1 = require("../../error");
-// Performs basic validation of a Sigstore bundle to ensure that all required
-// fields are populated. This is not a complete validation of the bundle, but
-// rather a check that the bundle is in a valid state to be processed by the
-// rest of the code.
-function assertValidBundle(b) {
-    const invalidValues = [];
-    // Content-related validation
-    if (b.content === undefined) {
-        invalidValues.push('content');
-    }
-    else {
-        switch (b.content.$case) {
-            case 'messageSignature':
-                if (b.content.messageSignature.messageDigest === undefined) {
-                    invalidValues.push('content.messageSignature.messageDigest');
-                }
-                else {
-                    if (b.content.messageSignature.messageDigest.digest.length === 0) {
-                        invalidValues.push('content.messageSignature.messageDigest.digest');
-                    }
-                }
-                if (b.content.messageSignature.signature.length === 0) {
-                    invalidValues.push('content.messageSignature.signature');
-                }
-                break;
-            case 'dsseEnvelope':
-                if (b.content.dsseEnvelope.payload.length === 0) {
-                    invalidValues.push('content.dsseEnvelope.payload');
-                }
-                if (b.content.dsseEnvelope.signatures.length !== 1) {
-                    invalidValues.push('content.dsseEnvelope.signatures');
-                }
-                else {
-                    if (b.content.dsseEnvelope.signatures[0].sig.length === 0) {
-                        invalidValues.push('content.dsseEnvelope.signatures[0].sig');
-                    }
-                }
-                break;
-        }
-    }
-    // Verification material-related validation
-    if (b.verificationMaterial === undefined) {
-        invalidValues.push('verificationMaterial');
-    }
-    else {
-        if (b.verificationMaterial.content === undefined) {
-            invalidValues.push('verificationMaterial.content');
-        }
-        else {
-            switch (b.verificationMaterial.content.$case) {
-                case 'x509CertificateChain':
-                    if (b.verificationMaterial.content.x509CertificateChain.certificates
-                        .length === 0) {
-                        invalidValues.push('verificationMaterial.content.x509CertificateChain.certificates');
-                    }
-                    b.verificationMaterial.content.x509CertificateChain.certificates.forEach((cert, i) => {
-                        if (cert.rawBytes.length === 0) {
-                            invalidValues.push(`verificationMaterial.content.x509CertificateChain.certificates[${i}].rawBytes`);
-                        }
-                    });
-                    break;
-            }
-        }
-    }
-    if (invalidValues.length > 0) {
-        throw new error_1.ValidationError(`invalid/missing bundle values: ${invalidValues.join(', ')}`);
-    }
-}
-exports.assertValidBundle = assertValidBundle;
diff --git a/node_modules/sigstore/dist/types/utility.js b/node_modules/sigstore/dist/types/utility.js
index 132848cd7587e..77c91b1923ca0 100644
--- a/node_modules/sigstore/dist/types/utility.js
+++ b/node_modules/sigstore/dist/types/utility.js
@@ -14,5 +14,4 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 */
-// https://dev.to/maxime1992/implement-a-generic-oneof-type-with-typescript-22em
 Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/sigstore/dist/util/crypto.js b/node_modules/sigstore/dist/util/crypto.js
index 0b1e0bc62d8ab..c26de091ecdb6 100644
--- a/node_modules/sigstore/dist/util/crypto.js
+++ b/node_modules/sigstore/dist/util/crypto.js
@@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
     return (mod && mod.__esModule) ? mod : { "default": mod };
 };
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.randomBytes = exports.hash = exports.verifyBlob = exports.signBlob = exports.createPublicKey = exports.generateKeyPair = void 0;
+exports.bufferEqual = exports.randomBytes = exports.hash = exports.verifyBlob = exports.createPublicKey = void 0;
 /*
 Copyright 2022 The Sigstore Authors.
 
@@ -20,15 +20,7 @@ See the License for the specific language governing permissions and
 limitations under the License.
 */
 const crypto_1 = __importDefault(require("crypto"));
-const EC_KEYPAIR_TYPE = 'ec';
-const P256_CURVE = 'P-256';
 const SHA256_ALGORITHM = 'sha256';
-function generateKeyPair() {
-    return crypto_1.default.generateKeyPairSync(EC_KEYPAIR_TYPE, {
-        namedCurve: P256_CURVE,
-    });
-}
-exports.generateKeyPair = generateKeyPair;
 function createPublicKey(key) {
     if (typeof key === 'string') {
         return crypto_1.default.createPublicKey(key);
@@ -38,10 +30,6 @@ function createPublicKey(key) {
     }
 }
 exports.createPublicKey = createPublicKey;
-function signBlob(data, privateKey) {
-    return crypto_1.default.sign(null, data, privateKey);
-}
-exports.signBlob = signBlob;
 function verifyBlob(data, key, signature, algorithm) {
     // The try/catch is to work around an issue in Node 14.x where verify throws
     // an error in some scenarios if the signature is invalid.
@@ -49,6 +37,7 @@ function verifyBlob(data, key, signature, algorithm) {
         return crypto_1.default.verify(algorithm, data, key, signature);
     }
     catch (e) {
+        /* istanbul ignore next */
         return false;
     }
 }
@@ -62,3 +51,13 @@ function randomBytes(count) {
     return crypto_1.default.randomBytes(count);
 }
 exports.randomBytes = randomBytes;
+function bufferEqual(a, b) {
+    try {
+        return crypto_1.default.timingSafeEqual(a, b);
+    }
+    catch {
+        /* istanbul ignore next */
+        return false;
+    }
+}
+exports.bufferEqual = bufferEqual;
diff --git a/node_modules/sigstore/dist/util/index.js b/node_modules/sigstore/dist/util/index.js
index b7d6ce21aafd3..ff4cec375af8f 100644
--- a/node_modules/sigstore/dist/util/index.js
+++ b/node_modules/sigstore/dist/util/index.js
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
     return result;
 };
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.ua = exports.promise = exports.pem = exports.oidc = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.asn1 = void 0;
+exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.asn1 = void 0;
 /*
 Copyright 2022 The Sigstore Authors.
 
@@ -44,7 +44,4 @@ exports.crypto = __importStar(require("./crypto"));
 exports.dsse = __importStar(require("./dsse"));
 exports.encoding = __importStar(require("./encoding"));
 exports.json = __importStar(require("./json"));
-exports.oidc = __importStar(require("./oidc"));
 exports.pem = __importStar(require("./pem"));
-exports.promise = __importStar(require("./promise"));
-exports.ua = __importStar(require("./ua"));
diff --git a/node_modules/sigstore/dist/util/oidc.js b/node_modules/sigstore/dist/util/oidc.js
deleted file mode 100644
index 05af90d09ae68..0000000000000
--- a/node_modules/sigstore/dist/util/oidc.js
+++ /dev/null
@@ -1,54 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.extractJWTSubject = void 0;
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const enc = __importStar(require("./encoding"));
-function extractJWTSubject(jwt) {
-    const parts = jwt.split('.', 3);
-    const payload = JSON.parse(enc.base64Decode(parts[1]));
-    switch (payload.iss) {
-        case 'https://accounts.google.com':
-        case 'https://oauth2.sigstore.dev/auth':
-            return payload.email;
-        default:
-            return payload.sub;
-    }
-}
-exports.extractJWTSubject = extractJWTSubject;
diff --git a/node_modules/sigstore/dist/util/promise.js b/node_modules/sigstore/dist/util/promise.js
deleted file mode 100644
index 8101dd47afe02..0000000000000
--- a/node_modules/sigstore/dist/util/promise.js
+++ /dev/null
@@ -1,27 +0,0 @@
-"use strict";
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.promiseAny = void 0;
-// Implementation of Promise.any (not available until Node v15).
-// We're basically inverting the logic of Promise.all and taking advantage
-// of the fact that Promise.all will return early on the first rejection.
-// By reversing the resolve/reject logic we can use this to return early
-// on the first resolved promise.
-const promiseAny = async (values) => {
-    return Promise.all([...values].map((promise) => new Promise((resolve, reject) => promise.then(reject, resolve)))).then((errors) => Promise.reject(errors), (value) => Promise.resolve(value));
-};
-exports.promiseAny = promiseAny;
diff --git a/node_modules/sigstore/dist/util/ua.js b/node_modules/sigstore/dist/util/ua.js
deleted file mode 100644
index 6db6b5a2723db..0000000000000
--- a/node_modules/sigstore/dist/util/ua.js
+++ /dev/null
@@ -1,33 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.getUserAgent = void 0;
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const os_1 = __importDefault(require("os"));
-// Format User-Agent:  /  ()
-// source: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent
-const getUserAgent = () => {
-    // eslint-disable-next-line @typescript-eslint/no-var-requires
-    const packageVersion = require('../../package.json').version;
-    const nodeVersion = process.version;
-    const platformName = os_1.default.platform();
-    const archName = os_1.default.arch();
-    return `sigstore-js/${packageVersion} (Node ${nodeVersion}) (${platformName}/${archName})`;
-};
-exports.getUserAgent = getUserAgent;
diff --git a/node_modules/sigstore/dist/verify.js b/node_modules/sigstore/dist/verify.js
index 49f63d93abb26..a3dc4b307e495 100644
--- a/node_modules/sigstore/dist/verify.js
+++ b/node_modules/sigstore/dist/verify.js
@@ -24,6 +24,22 @@ var __importStar = (this && this.__importStar) || function (mod) {
 };
 Object.defineProperty(exports, "__esModule", { value: true });
 exports.Verifier = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const bundle_1 = require("@sigstore/bundle");
 const ca = __importStar(require("./ca/verify"));
 const error_1 = require("./error");
 const tlog = __importStar(require("./tlog/verify"));
@@ -38,7 +54,7 @@ class Verifier {
     // and the bundle's transparency log entries.
     verify(bundle, options, data) {
         this.verifyArtifactSignature(bundle, data);
-        if (sigstore.isBundleWithCertificateChain(bundle)) {
+        if ((0, bundle_1.isBundleWithCertificateChain)(bundle)) {
             this.verifySigningCertificate(bundle, options);
         }
         if (options.tlogOptions.disable === false) {
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/LICENSE b/node_modules/sigstore/node_modules/@sigstore/tuf/LICENSE
deleted file mode 100644
index e9e7c1679a09d..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright 2023 The Sigstore Authors
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/appdata.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/appdata.js
deleted file mode 100644
index c9a8ee92b531e..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/appdata.js
+++ /dev/null
@@ -1,44 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.appDataPath = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const os_1 = __importDefault(require("os"));
-const path_1 = __importDefault(require("path"));
-function appDataPath(name) {
-    const homedir = os_1.default.homedir();
-    switch (process.platform) {
-        /* istanbul ignore next */
-        case 'darwin': {
-            const appSupport = path_1.default.join(homedir, 'Library', 'Application Support');
-            return path_1.default.join(appSupport, name);
-        }
-        /* istanbul ignore next */
-        case 'win32': {
-            const localAppData = process.env.LOCALAPPDATA || path_1.default.join(homedir, 'AppData', 'Local');
-            return path_1.default.join(localAppData, name, 'Data');
-        }
-        /* istanbul ignore next */
-        default: {
-            const localData = process.env.XDG_DATA_HOME || path_1.default.join(homedir, '.local', 'share');
-            return path_1.default.join(localData, name);
-        }
-    }
-}
-exports.appDataPath = appDataPath;
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/client.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/client.js
deleted file mode 100644
index 08d6b61840909..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/client.js
+++ /dev/null
@@ -1,101 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TUFClient = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const fs_1 = __importDefault(require("fs"));
-const path_1 = __importDefault(require("path"));
-const tuf_js_1 = require("tuf-js");
-const target_1 = require("./target");
-class TUFClient {
-    constructor(options) {
-        initTufCache(options.cachePath, options.rootPath);
-        const remote = initRemoteConfig(options.cachePath, options.mirrorURL);
-        this.updater = initClient(options.cachePath, remote, options);
-    }
-    async refresh() {
-        return this.updater.refresh();
-    }
-    getTarget(targetName) {
-        return (0, target_1.readTarget)(this.updater, targetName);
-    }
-}
-exports.TUFClient = TUFClient;
-// Initializes the TUF cache directory structure including the initial
-// root.json file. If the cache directory does not exist, it will be
-// created. If the targets directory does not exist, it will be created.
-// If the root.json file does not exist, it will be copied from the
-// rootPath argument.
-function initTufCache(cachePath, tufRootPath) {
-    const targetsPath = path_1.default.join(cachePath, 'targets');
-    const cachedRootPath = path_1.default.join(cachePath, 'root.json');
-    if (!fs_1.default.existsSync(cachePath)) {
-        fs_1.default.mkdirSync(cachePath, { recursive: true });
-    }
-    if (!fs_1.default.existsSync(targetsPath)) {
-        fs_1.default.mkdirSync(targetsPath);
-    }
-    if (!fs_1.default.existsSync(cachedRootPath)) {
-        fs_1.default.copyFileSync(tufRootPath, cachedRootPath);
-    }
-    return cachePath;
-}
-// Initializes the remote.json file, which contains the URL of the TUF
-// repository. If the file does not exist, it will be created. If the file
-// exists, it will be parsed and returned.
-function initRemoteConfig(rootDir, mirrorURL) {
-    let remoteConfig;
-    const remoteConfigPath = path_1.default.join(rootDir, 'remote.json');
-    if (fs_1.default.existsSync(remoteConfigPath)) {
-        const data = fs_1.default.readFileSync(remoteConfigPath, 'utf-8');
-        remoteConfig = JSON.parse(data);
-    }
-    if (!remoteConfig) {
-        remoteConfig = { mirror: mirrorURL };
-        fs_1.default.writeFileSync(remoteConfigPath, JSON.stringify(remoteConfig));
-    }
-    return remoteConfig;
-}
-function initClient(cachePath, remote, options) {
-    const baseURL = remote.mirror;
-    const config = {
-        fetchTimeout: options.timeout,
-    };
-    // tuf-js only supports a number for fetchRetries so we have to
-    // convert the boolean and object options to a number.
-    /* istanbul ignore if */
-    if (typeof options.retry !== 'undefined') {
-        if (typeof options.retry === 'number') {
-            config.fetchRetries = options.retry;
-        }
-        else if (typeof options.retry === 'object') {
-            config.fetchRetries = options.retry.retries;
-        }
-        else if (options.retry === true) {
-            config.fetchRetries = 1;
-        }
-    }
-    return new tuf_js_1.Updater({
-        metadataBaseUrl: baseURL,
-        targetBaseUrl: `${baseURL}/targets`,
-        metadataDir: cachePath,
-        targetDir: path_1.default.join(cachePath, 'targets'),
-        config,
-    });
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/error.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/error.js
deleted file mode 100644
index e13971b289ff2..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/error.js
+++ /dev/null
@@ -1,12 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TUFError = void 0;
-class TUFError extends Error {
-    constructor({ code, message, cause, }) {
-        super(message);
-        this.code = code;
-        this.cause = cause;
-        this.name = this.constructor.name;
-    }
-}
-exports.TUFError = TUFError;
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/index.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/index.js
deleted file mode 100644
index 0d201c356dffc..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/index.js
+++ /dev/null
@@ -1,55 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TUFError = exports.initTUF = exports.getTrustedRoot = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const protobuf_specs_1 = require("@sigstore/protobuf-specs");
-const appdata_1 = require("./appdata");
-const client_1 = require("./client");
-const DEFAULT_CACHE_DIR = 'sigstore-js';
-const DEFAULT_MIRROR_URL = 'https://tuf-repo-cdn.sigstore.dev';
-const DEFAULT_TUF_ROOT_PATH = '../store/public-good-instance-root.json';
-const DEFAULT_RETRY = { retries: 2 };
-const DEFAULT_TIMEOUT = 5000;
-const TRUSTED_ROOT_TARGET = 'trusted_root.json';
-async function getTrustedRoot(
-/* istanbul ignore next */
-options = {}) {
-    const client = createClient(options);
-    const trustedRoot = await client.getTarget(TRUSTED_ROOT_TARGET);
-    return protobuf_specs_1.TrustedRoot.fromJSON(JSON.parse(trustedRoot));
-}
-exports.getTrustedRoot = getTrustedRoot;
-async function initTUF(
-/* istanbul ignore next */
-options = {}) {
-    const client = createClient(options);
-    return client.refresh().then(() => client);
-}
-exports.initTUF = initTUF;
-// Create a TUF client with default options
-function createClient(options) {
-    /* istanbul ignore next */
-    return new client_1.TUFClient({
-        cachePath: options.cachePath || (0, appdata_1.appDataPath)(DEFAULT_CACHE_DIR),
-        rootPath: options.rootPath || require.resolve(DEFAULT_TUF_ROOT_PATH),
-        mirrorURL: options.mirrorURL || DEFAULT_MIRROR_URL,
-        retry: options.retry ?? DEFAULT_RETRY,
-        timeout: options.timeout ?? DEFAULT_TIMEOUT,
-    });
-}
-var error_1 = require("./error");
-Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return error_1.TUFError; } });
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/target.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/target.js
deleted file mode 100644
index 29eaf99a7e721..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/target.js
+++ /dev/null
@@ -1,80 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.readTarget = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const fs_1 = __importDefault(require("fs"));
-const error_1 = require("./error");
-// Downloads and returns the specified target from the provided TUF Updater.
-async function readTarget(tuf, targetPath) {
-    const path = await getTargetPath(tuf, targetPath);
-    return new Promise((resolve, reject) => {
-        fs_1.default.readFile(path, 'utf-8', (err, data) => {
-            if (err) {
-                reject(new error_1.TUFError({
-                    code: 'TUF_READ_TARGET_ERROR',
-                    message: `error reading target ${path}`,
-                    cause: err,
-                }));
-            }
-            else {
-                resolve(data);
-            }
-        });
-    });
-}
-exports.readTarget = readTarget;
-// Returns the local path to the specified target. If the target is not yet
-// cached locally, the provided TUF Updater will be used to download and
-// cache the target.
-async function getTargetPath(tuf, target) {
-    let targetInfo;
-    try {
-        targetInfo = await tuf.getTargetInfo(target);
-    }
-    catch (err) {
-        throw new error_1.TUFError({
-            code: 'TUF_REFRESH_METADATA_ERROR',
-            message: 'error refreshing TUF metadata',
-            cause: err,
-        });
-    }
-    if (!targetInfo) {
-        throw new error_1.TUFError({
-            code: 'TUF_FIND_TARGET_ERROR',
-            message: `target ${target} not found`,
-        });
-    }
-    let path = await tuf.findCachedTarget(targetInfo);
-    // An empty path here means the target has not been cached locally, or is
-    // out of date. In either case, we need to download it.
-    if (!path) {
-        try {
-            path = await tuf.downloadTarget(targetInfo);
-        }
-        catch (err) {
-            throw new error_1.TUFError({
-                code: 'TUF_DOWNLOAD_TARGET_ERROR',
-                message: `error downloading target ${path}`,
-                cause: err,
-            });
-        }
-    }
-    return path;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE
deleted file mode 100644
index e9e7c1679a09d..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright 2023 The Sigstore Authors
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
deleted file mode 100644
index 0c367a8384454..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
+++ /dev/null
@@ -1,89 +0,0 @@
-"use strict";
-/* eslint-disable */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Signature = exports.Envelope = void 0;
-function createBaseEnvelope() {
-    return { payload: Buffer.alloc(0), payloadType: "", signatures: [] };
-}
-exports.Envelope = {
-    fromJSON(object) {
-        return {
-            payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
-            payloadType: isSet(object.payloadType) ? String(object.payloadType) : "",
-            signatures: Array.isArray(object?.signatures) ? object.signatures.map((e) => exports.Signature.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.payload !== undefined &&
-            (obj.payload = base64FromBytes(message.payload !== undefined ? message.payload : Buffer.alloc(0)));
-        message.payloadType !== undefined && (obj.payloadType = message.payloadType);
-        if (message.signatures) {
-            obj.signatures = message.signatures.map((e) => e ? exports.Signature.toJSON(e) : undefined);
-        }
-        else {
-            obj.signatures = [];
-        }
-        return obj;
-    },
-};
-function createBaseSignature() {
-    return { sig: Buffer.alloc(0), keyid: "" };
-}
-exports.Signature = {
-    fromJSON(object) {
-        return {
-            sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
-            keyid: isSet(object.keyid) ? String(object.keyid) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.sig !== undefined && (obj.sig = base64FromBytes(message.sig !== undefined ? message.sig : Buffer.alloc(0)));
-        message.keyid !== undefined && (obj.keyid = message.keyid);
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
deleted file mode 100644
index 073093b8371a8..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
+++ /dev/null
@@ -1,185 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
-/* eslint-disable */
-const any_1 = require("./google/protobuf/any");
-const timestamp_1 = require("./google/protobuf/timestamp");
-function createBaseCloudEvent() {
-    return { id: "", source: "", specVersion: "", type: "", attributes: {}, data: undefined };
-}
-exports.CloudEvent = {
-    fromJSON(object) {
-        return {
-            id: isSet(object.id) ? String(object.id) : "",
-            source: isSet(object.source) ? String(object.source) : "",
-            specVersion: isSet(object.specVersion) ? String(object.specVersion) : "",
-            type: isSet(object.type) ? String(object.type) : "",
-            attributes: isObject(object.attributes)
-                ? Object.entries(object.attributes).reduce((acc, [key, value]) => {
-                    acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
-                    return acc;
-                }, {})
-                : {},
-            data: isSet(object.binaryData)
-                ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
-                : isSet(object.textData)
-                    ? { $case: "textData", textData: String(object.textData) }
-                    : isSet(object.protoData)
-                        ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
-                        : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.id !== undefined && (obj.id = message.id);
-        message.source !== undefined && (obj.source = message.source);
-        message.specVersion !== undefined && (obj.specVersion = message.specVersion);
-        message.type !== undefined && (obj.type = message.type);
-        obj.attributes = {};
-        if (message.attributes) {
-            Object.entries(message.attributes).forEach(([k, v]) => {
-                obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
-            });
-        }
-        message.data?.$case === "binaryData" &&
-            (obj.binaryData = message.data?.binaryData !== undefined ? base64FromBytes(message.data?.binaryData) : undefined);
-        message.data?.$case === "textData" && (obj.textData = message.data?.textData);
-        message.data?.$case === "protoData" &&
-            (obj.protoData = message.data?.protoData ? any_1.Any.toJSON(message.data?.protoData) : undefined);
-        return obj;
-    },
-};
-function createBaseCloudEvent_AttributesEntry() {
-    return { key: "", value: undefined };
-}
-exports.CloudEvent_AttributesEntry = {
-    fromJSON(object) {
-        return {
-            key: isSet(object.key) ? String(object.key) : "",
-            value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.key !== undefined && (obj.key = message.key);
-        message.value !== undefined &&
-            (obj.value = message.value ? exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value) : undefined);
-        return obj;
-    },
-};
-function createBaseCloudEvent_CloudEventAttributeValue() {
-    return { attr: undefined };
-}
-exports.CloudEvent_CloudEventAttributeValue = {
-    fromJSON(object) {
-        return {
-            attr: isSet(object.ceBoolean)
-                ? { $case: "ceBoolean", ceBoolean: Boolean(object.ceBoolean) }
-                : isSet(object.ceInteger)
-                    ? { $case: "ceInteger", ceInteger: Number(object.ceInteger) }
-                    : isSet(object.ceString)
-                        ? { $case: "ceString", ceString: String(object.ceString) }
-                        : isSet(object.ceBytes)
-                            ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
-                            : isSet(object.ceUri)
-                                ? { $case: "ceUri", ceUri: String(object.ceUri) }
-                                : isSet(object.ceUriRef)
-                                    ? { $case: "ceUriRef", ceUriRef: String(object.ceUriRef) }
-                                    : isSet(object.ceTimestamp)
-                                        ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
-                                        : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.attr?.$case === "ceBoolean" && (obj.ceBoolean = message.attr?.ceBoolean);
-        message.attr?.$case === "ceInteger" && (obj.ceInteger = Math.round(message.attr?.ceInteger));
-        message.attr?.$case === "ceString" && (obj.ceString = message.attr?.ceString);
-        message.attr?.$case === "ceBytes" &&
-            (obj.ceBytes = message.attr?.ceBytes !== undefined ? base64FromBytes(message.attr?.ceBytes) : undefined);
-        message.attr?.$case === "ceUri" && (obj.ceUri = message.attr?.ceUri);
-        message.attr?.$case === "ceUriRef" && (obj.ceUriRef = message.attr?.ceUriRef);
-        message.attr?.$case === "ceTimestamp" && (obj.ceTimestamp = message.attr?.ceTimestamp.toISOString());
-        return obj;
-    },
-};
-function createBaseCloudEventBatch() {
-    return { events: [] };
-}
-exports.CloudEventBatch = {
-    fromJSON(object) {
-        return { events: Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [] };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.events) {
-            obj.events = message.events.map((e) => e ? exports.CloudEvent.toJSON(e) : undefined);
-        }
-        else {
-            obj.events = [];
-        }
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function fromTimestamp(t) {
-    let millis = Number(t.seconds) * 1000;
-    millis += t.nanos / 1000000;
-    return new Date(millis);
-}
-function fromJsonTimestamp(o) {
-    if (o instanceof Date) {
-        return o;
-    }
-    else if (typeof o === "string") {
-        return new Date(o);
-    }
-    else {
-        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
-    }
-}
-function isObject(value) {
-    return typeof value === "object" && value !== null;
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
deleted file mode 100644
index da627499ad765..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
+++ /dev/null
@@ -1,119 +0,0 @@
-"use strict";
-/* eslint-disable */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.fieldBehaviorToJSON = exports.fieldBehaviorFromJSON = exports.FieldBehavior = void 0;
-/**
- * An indicator of the behavior of a given field (for example, that a field
- * is required in requests, or given as output but ignored as input).
- * This **does not** change the behavior in protocol buffers itself; it only
- * denotes the behavior and may affect how API tooling handles the field.
- *
- * Note: This enum **may** receive new values in the future.
- */
-var FieldBehavior;
-(function (FieldBehavior) {
-    /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
-    FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
-    /**
-     * OPTIONAL - Specifically denotes a field as optional.
-     * While all fields in protocol buffers are optional, this may be specified
-     * for emphasis if appropriate.
-     */
-    FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
-    /**
-     * REQUIRED - Denotes a field as required.
-     * This indicates that the field **must** be provided as part of the request,
-     * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
-     */
-    FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
-    /**
-     * OUTPUT_ONLY - Denotes a field as output only.
-     * This indicates that the field is provided in responses, but including the
-     * field in a request does nothing (the server *must* ignore it and
-     * *must not* throw an error as a result of the field's presence).
-     */
-    FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
-    /**
-     * INPUT_ONLY - Denotes a field as input only.
-     * This indicates that the field is provided in requests, and the
-     * corresponding field is not included in output.
-     */
-    FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
-    /**
-     * IMMUTABLE - Denotes a field as immutable.
-     * This indicates that the field may be set once in a request to create a
-     * resource, but may not be changed thereafter.
-     */
-    FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
-    /**
-     * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
-     * This indicates that the service may provide the elements of the list
-     * in any arbitrary order, rather than the order the user originally
-     * provided. Additionally, the list's order may or may not be stable.
-     */
-    FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
-})(FieldBehavior = exports.FieldBehavior || (exports.FieldBehavior = {}));
-function fieldBehaviorFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "FIELD_BEHAVIOR_UNSPECIFIED":
-            return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
-        case 1:
-        case "OPTIONAL":
-            return FieldBehavior.OPTIONAL;
-        case 2:
-        case "REQUIRED":
-            return FieldBehavior.REQUIRED;
-        case 3:
-        case "OUTPUT_ONLY":
-            return FieldBehavior.OUTPUT_ONLY;
-        case 4:
-        case "INPUT_ONLY":
-            return FieldBehavior.INPUT_ONLY;
-        case 5:
-        case "IMMUTABLE":
-            return FieldBehavior.IMMUTABLE;
-        case 6:
-        case "UNORDERED_LIST":
-            return FieldBehavior.UNORDERED_LIST;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
-    }
-}
-exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
-function fieldBehaviorToJSON(object) {
-    switch (object) {
-        case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
-            return "FIELD_BEHAVIOR_UNSPECIFIED";
-        case FieldBehavior.OPTIONAL:
-            return "OPTIONAL";
-        case FieldBehavior.REQUIRED:
-            return "REQUIRED";
-        case FieldBehavior.OUTPUT_ONLY:
-            return "OUTPUT_ONLY";
-        case FieldBehavior.INPUT_ONLY:
-            return "INPUT_ONLY";
-        case FieldBehavior.IMMUTABLE:
-            return "IMMUTABLE";
-        case FieldBehavior.UNORDERED_LIST:
-            return "UNORDERED_LIST";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
-    }
-}
-exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
deleted file mode 100644
index 6b3f3c97a6647..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
+++ /dev/null
@@ -1,65 +0,0 @@
-"use strict";
-/* eslint-disable */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Any = void 0;
-function createBaseAny() {
-    return { typeUrl: "", value: Buffer.alloc(0) };
-}
-exports.Any = {
-    fromJSON(object) {
-        return {
-            typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "",
-            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl);
-        message.value !== undefined &&
-            (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
deleted file mode 100644
index d429aac846043..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
+++ /dev/null
@@ -1,1308 +0,0 @@
-"use strict";
-/* eslint-disable */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.GeneratedCodeInfo_Annotation = exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.methodOptions_IdempotencyLevelToJSON = exports.methodOptions_IdempotencyLevelFromJSON = exports.MethodOptions_IdempotencyLevel = exports.fieldOptions_JSTypeToJSON = exports.fieldOptions_JSTypeFromJSON = exports.FieldOptions_JSType = exports.fieldOptions_CTypeToJSON = exports.fieldOptions_CTypeFromJSON = exports.FieldOptions_CType = exports.fileOptions_OptimizeModeToJSON = exports.fileOptions_OptimizeModeFromJSON = exports.FileOptions_OptimizeMode = exports.fieldDescriptorProto_LabelToJSON = exports.fieldDescriptorProto_LabelFromJSON = exports.FieldDescriptorProto_Label = exports.fieldDescriptorProto_TypeToJSON = exports.fieldDescriptorProto_TypeFromJSON = exports.FieldDescriptorProto_Type = void 0;
-var FieldDescriptorProto_Type;
-(function (FieldDescriptorProto_Type) {
-    /**
-     * TYPE_DOUBLE - 0 is reserved for errors.
-     * Order is weird for historical reasons.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT";
-    /**
-     * TYPE_INT64 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
-     * negative values are likely.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64";
-    /**
-     * TYPE_INT32 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
-     * negative values are likely.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
-    /**
-     * TYPE_GROUP - Tag-delimited aggregate.
-     * Group type is deprecated and not supported in proto3. However, Proto3
-     * implementations should still be able to parse the group wire format and
-     * treat group fields as unknown fields.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
-    /** TYPE_MESSAGE - Length-delimited aggregate. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE";
-    /** TYPE_BYTES - New in version 2. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64";
-    /** TYPE_SINT32 - Uses ZigZag encoding. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
-    /** TYPE_SINT64 - Uses ZigZag encoding. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
-})(FieldDescriptorProto_Type = exports.FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = {}));
-function fieldDescriptorProto_TypeFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "TYPE_DOUBLE":
-            return FieldDescriptorProto_Type.TYPE_DOUBLE;
-        case 2:
-        case "TYPE_FLOAT":
-            return FieldDescriptorProto_Type.TYPE_FLOAT;
-        case 3:
-        case "TYPE_INT64":
-            return FieldDescriptorProto_Type.TYPE_INT64;
-        case 4:
-        case "TYPE_UINT64":
-            return FieldDescriptorProto_Type.TYPE_UINT64;
-        case 5:
-        case "TYPE_INT32":
-            return FieldDescriptorProto_Type.TYPE_INT32;
-        case 6:
-        case "TYPE_FIXED64":
-            return FieldDescriptorProto_Type.TYPE_FIXED64;
-        case 7:
-        case "TYPE_FIXED32":
-            return FieldDescriptorProto_Type.TYPE_FIXED32;
-        case 8:
-        case "TYPE_BOOL":
-            return FieldDescriptorProto_Type.TYPE_BOOL;
-        case 9:
-        case "TYPE_STRING":
-            return FieldDescriptorProto_Type.TYPE_STRING;
-        case 10:
-        case "TYPE_GROUP":
-            return FieldDescriptorProto_Type.TYPE_GROUP;
-        case 11:
-        case "TYPE_MESSAGE":
-            return FieldDescriptorProto_Type.TYPE_MESSAGE;
-        case 12:
-        case "TYPE_BYTES":
-            return FieldDescriptorProto_Type.TYPE_BYTES;
-        case 13:
-        case "TYPE_UINT32":
-            return FieldDescriptorProto_Type.TYPE_UINT32;
-        case 14:
-        case "TYPE_ENUM":
-            return FieldDescriptorProto_Type.TYPE_ENUM;
-        case 15:
-        case "TYPE_SFIXED32":
-            return FieldDescriptorProto_Type.TYPE_SFIXED32;
-        case 16:
-        case "TYPE_SFIXED64":
-            return FieldDescriptorProto_Type.TYPE_SFIXED64;
-        case 17:
-        case "TYPE_SINT32":
-            return FieldDescriptorProto_Type.TYPE_SINT32;
-        case 18:
-        case "TYPE_SINT64":
-            return FieldDescriptorProto_Type.TYPE_SINT64;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
-    }
-}
-exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
-function fieldDescriptorProto_TypeToJSON(object) {
-    switch (object) {
-        case FieldDescriptorProto_Type.TYPE_DOUBLE:
-            return "TYPE_DOUBLE";
-        case FieldDescriptorProto_Type.TYPE_FLOAT:
-            return "TYPE_FLOAT";
-        case FieldDescriptorProto_Type.TYPE_INT64:
-            return "TYPE_INT64";
-        case FieldDescriptorProto_Type.TYPE_UINT64:
-            return "TYPE_UINT64";
-        case FieldDescriptorProto_Type.TYPE_INT32:
-            return "TYPE_INT32";
-        case FieldDescriptorProto_Type.TYPE_FIXED64:
-            return "TYPE_FIXED64";
-        case FieldDescriptorProto_Type.TYPE_FIXED32:
-            return "TYPE_FIXED32";
-        case FieldDescriptorProto_Type.TYPE_BOOL:
-            return "TYPE_BOOL";
-        case FieldDescriptorProto_Type.TYPE_STRING:
-            return "TYPE_STRING";
-        case FieldDescriptorProto_Type.TYPE_GROUP:
-            return "TYPE_GROUP";
-        case FieldDescriptorProto_Type.TYPE_MESSAGE:
-            return "TYPE_MESSAGE";
-        case FieldDescriptorProto_Type.TYPE_BYTES:
-            return "TYPE_BYTES";
-        case FieldDescriptorProto_Type.TYPE_UINT32:
-            return "TYPE_UINT32";
-        case FieldDescriptorProto_Type.TYPE_ENUM:
-            return "TYPE_ENUM";
-        case FieldDescriptorProto_Type.TYPE_SFIXED32:
-            return "TYPE_SFIXED32";
-        case FieldDescriptorProto_Type.TYPE_SFIXED64:
-            return "TYPE_SFIXED64";
-        case FieldDescriptorProto_Type.TYPE_SINT32:
-            return "TYPE_SINT32";
-        case FieldDescriptorProto_Type.TYPE_SINT64:
-            return "TYPE_SINT64";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
-    }
-}
-exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
-var FieldDescriptorProto_Label;
-(function (FieldDescriptorProto_Label) {
-    /** LABEL_OPTIONAL - 0 is reserved for errors */
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
-})(FieldDescriptorProto_Label = exports.FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = {}));
-function fieldDescriptorProto_LabelFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "LABEL_OPTIONAL":
-            return FieldDescriptorProto_Label.LABEL_OPTIONAL;
-        case 2:
-        case "LABEL_REQUIRED":
-            return FieldDescriptorProto_Label.LABEL_REQUIRED;
-        case 3:
-        case "LABEL_REPEATED":
-            return FieldDescriptorProto_Label.LABEL_REPEATED;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
-    }
-}
-exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
-function fieldDescriptorProto_LabelToJSON(object) {
-    switch (object) {
-        case FieldDescriptorProto_Label.LABEL_OPTIONAL:
-            return "LABEL_OPTIONAL";
-        case FieldDescriptorProto_Label.LABEL_REQUIRED:
-            return "LABEL_REQUIRED";
-        case FieldDescriptorProto_Label.LABEL_REPEATED:
-            return "LABEL_REPEATED";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
-    }
-}
-exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
-/** Generated classes can be optimized for speed or code size. */
-var FileOptions_OptimizeMode;
-(function (FileOptions_OptimizeMode) {
-    /** SPEED - Generate complete code for parsing, serialization, */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED";
-    /** CODE_SIZE - etc. */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
-    /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
-})(FileOptions_OptimizeMode = exports.FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = {}));
-function fileOptions_OptimizeModeFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "SPEED":
-            return FileOptions_OptimizeMode.SPEED;
-        case 2:
-        case "CODE_SIZE":
-            return FileOptions_OptimizeMode.CODE_SIZE;
-        case 3:
-        case "LITE_RUNTIME":
-            return FileOptions_OptimizeMode.LITE_RUNTIME;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
-    }
-}
-exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
-function fileOptions_OptimizeModeToJSON(object) {
-    switch (object) {
-        case FileOptions_OptimizeMode.SPEED:
-            return "SPEED";
-        case FileOptions_OptimizeMode.CODE_SIZE:
-            return "CODE_SIZE";
-        case FileOptions_OptimizeMode.LITE_RUNTIME:
-            return "LITE_RUNTIME";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
-    }
-}
-exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
-var FieldOptions_CType;
-(function (FieldOptions_CType) {
-    /** STRING - Default mode. */
-    FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
-    FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
-    FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
-})(FieldOptions_CType = exports.FieldOptions_CType || (exports.FieldOptions_CType = {}));
-function fieldOptions_CTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "STRING":
-            return FieldOptions_CType.STRING;
-        case 1:
-        case "CORD":
-            return FieldOptions_CType.CORD;
-        case 2:
-        case "STRING_PIECE":
-            return FieldOptions_CType.STRING_PIECE;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
-    }
-}
-exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
-function fieldOptions_CTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_CType.STRING:
-            return "STRING";
-        case FieldOptions_CType.CORD:
-            return "CORD";
-        case FieldOptions_CType.STRING_PIECE:
-            return "STRING_PIECE";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
-    }
-}
-exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
-var FieldOptions_JSType;
-(function (FieldOptions_JSType) {
-    /** JS_NORMAL - Use the default type. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL";
-    /** JS_STRING - Use JavaScript strings. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
-    /** JS_NUMBER - Use JavaScript numbers. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
-})(FieldOptions_JSType = exports.FieldOptions_JSType || (exports.FieldOptions_JSType = {}));
-function fieldOptions_JSTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "JS_NORMAL":
-            return FieldOptions_JSType.JS_NORMAL;
-        case 1:
-        case "JS_STRING":
-            return FieldOptions_JSType.JS_STRING;
-        case 2:
-        case "JS_NUMBER":
-            return FieldOptions_JSType.JS_NUMBER;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
-    }
-}
-exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
-function fieldOptions_JSTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_JSType.JS_NORMAL:
-            return "JS_NORMAL";
-        case FieldOptions_JSType.JS_STRING:
-            return "JS_STRING";
-        case FieldOptions_JSType.JS_NUMBER:
-            return "JS_NUMBER";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
-    }
-}
-exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
-/**
- * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
- * or neither? HTTP based RPC implementation may choose GET verb for safe
- * methods, and PUT verb for idempotent methods instead of the default POST.
- */
-var MethodOptions_IdempotencyLevel;
-(function (MethodOptions_IdempotencyLevel) {
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN";
-    /** NO_SIDE_EFFECTS - implies idempotent */
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
-    /** IDEMPOTENT - idempotent, but may have side effects */
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
-})(MethodOptions_IdempotencyLevel = exports.MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = {}));
-function methodOptions_IdempotencyLevelFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "IDEMPOTENCY_UNKNOWN":
-            return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN;
-        case 1:
-        case "NO_SIDE_EFFECTS":
-            return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS;
-        case 2:
-        case "IDEMPOTENT":
-            return MethodOptions_IdempotencyLevel.IDEMPOTENT;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
-    }
-}
-exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
-function methodOptions_IdempotencyLevelToJSON(object) {
-    switch (object) {
-        case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
-            return "IDEMPOTENCY_UNKNOWN";
-        case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS:
-            return "NO_SIDE_EFFECTS";
-        case MethodOptions_IdempotencyLevel.IDEMPOTENT:
-            return "IDEMPOTENT";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
-    }
-}
-exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
-function createBaseFileDescriptorSet() {
-    return { file: [] };
-}
-exports.FileDescriptorSet = {
-    fromJSON(object) {
-        return { file: Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [] };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.file) {
-            obj.file = message.file.map((e) => e ? exports.FileDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.file = [];
-        }
-        return obj;
-    },
-};
-function createBaseFileDescriptorProto() {
-    return {
-        name: "",
-        package: "",
-        dependency: [],
-        publicDependency: [],
-        weakDependency: [],
-        messageType: [],
-        enumType: [],
-        service: [],
-        extension: [],
-        options: undefined,
-        sourceCodeInfo: undefined,
-        syntax: "",
-    };
-}
-exports.FileDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            package: isSet(object.package) ? String(object.package) : "",
-            dependency: Array.isArray(object?.dependency) ? object.dependency.map((e) => String(e)) : [],
-            publicDependency: Array.isArray(object?.publicDependency)
-                ? object.publicDependency.map((e) => Number(e))
-                : [],
-            weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e) => Number(e)) : [],
-            messageType: Array.isArray(object?.messageType)
-                ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
-                : [],
-            enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
-            service: Array.isArray(object?.service) ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e)) : [],
-            extension: Array.isArray(object?.extension)
-                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
-            sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
-            syntax: isSet(object.syntax) ? String(object.syntax) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.package !== undefined && (obj.package = message.package);
-        if (message.dependency) {
-            obj.dependency = message.dependency.map((e) => e);
-        }
-        else {
-            obj.dependency = [];
-        }
-        if (message.publicDependency) {
-            obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
-        }
-        else {
-            obj.publicDependency = [];
-        }
-        if (message.weakDependency) {
-            obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
-        }
-        else {
-            obj.weakDependency = [];
-        }
-        if (message.messageType) {
-            obj.messageType = message.messageType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.messageType = [];
-        }
-        if (message.enumType) {
-            obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.enumType = [];
-        }
-        if (message.service) {
-            obj.service = message.service.map((e) => e ? exports.ServiceDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.service = [];
-        }
-        if (message.extension) {
-            obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.extension = [];
-        }
-        message.options !== undefined && (obj.options = message.options ? exports.FileOptions.toJSON(message.options) : undefined);
-        message.sourceCodeInfo !== undefined &&
-            (obj.sourceCodeInfo = message.sourceCodeInfo ? exports.SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined);
-        message.syntax !== undefined && (obj.syntax = message.syntax);
-        return obj;
-    },
-};
-function createBaseDescriptorProto() {
-    return {
-        name: "",
-        field: [],
-        extension: [],
-        nestedType: [],
-        enumType: [],
-        extensionRange: [],
-        oneofDecl: [],
-        options: undefined,
-        reservedRange: [],
-        reservedName: [],
-    };
-}
-exports.DescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            field: Array.isArray(object?.field) ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e)) : [],
-            extension: Array.isArray(object?.extension)
-                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            nestedType: Array.isArray(object?.nestedType)
-                ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
-                : [],
-            enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
-            extensionRange: Array.isArray(object?.extensionRange)
-                ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
-                : [],
-            oneofDecl: Array.isArray(object?.oneofDecl)
-                ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
-            reservedRange: Array.isArray(object?.reservedRange)
-                ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
-                : [],
-            reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e) => String(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        if (message.field) {
-            obj.field = message.field.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.field = [];
-        }
-        if (message.extension) {
-            obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.extension = [];
-        }
-        if (message.nestedType) {
-            obj.nestedType = message.nestedType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.nestedType = [];
-        }
-        if (message.enumType) {
-            obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.enumType = [];
-        }
-        if (message.extensionRange) {
-            obj.extensionRange = message.extensionRange.map((e) => e ? exports.DescriptorProto_ExtensionRange.toJSON(e) : undefined);
-        }
-        else {
-            obj.extensionRange = [];
-        }
-        if (message.oneofDecl) {
-            obj.oneofDecl = message.oneofDecl.map((e) => e ? exports.OneofDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.oneofDecl = [];
-        }
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.MessageOptions.toJSON(message.options) : undefined);
-        if (message.reservedRange) {
-            obj.reservedRange = message.reservedRange.map((e) => e ? exports.DescriptorProto_ReservedRange.toJSON(e) : undefined);
-        }
-        else {
-            obj.reservedRange = [];
-        }
-        if (message.reservedName) {
-            obj.reservedName = message.reservedName.map((e) => e);
-        }
-        else {
-            obj.reservedName = [];
-        }
-        return obj;
-    },
-};
-function createBaseDescriptorProto_ExtensionRange() {
-    return { start: 0, end: 0, options: undefined };
-}
-exports.DescriptorProto_ExtensionRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? Number(object.start) : 0,
-            end: isSet(object.end) ? Number(object.end) : 0,
-            options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.start !== undefined && (obj.start = Math.round(message.start));
-        message.end !== undefined && (obj.end = Math.round(message.end));
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.ExtensionRangeOptions.toJSON(message.options) : undefined);
-        return obj;
-    },
-};
-function createBaseDescriptorProto_ReservedRange() {
-    return { start: 0, end: 0 };
-}
-exports.DescriptorProto_ReservedRange = {
-    fromJSON(object) {
-        return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.start !== undefined && (obj.start = Math.round(message.start));
-        message.end !== undefined && (obj.end = Math.round(message.end));
-        return obj;
-    },
-};
-function createBaseExtensionRangeOptions() {
-    return { uninterpretedOption: [] };
-}
-exports.ExtensionRangeOptions = {
-    fromJSON(object) {
-        return {
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseFieldDescriptorProto() {
-    return {
-        name: "",
-        number: 0,
-        label: 1,
-        type: 1,
-        typeName: "",
-        extendee: "",
-        defaultValue: "",
-        oneofIndex: 0,
-        jsonName: "",
-        options: undefined,
-        proto3Optional: false,
-    };
-}
-exports.FieldDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            number: isSet(object.number) ? Number(object.number) : 0,
-            label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
-            type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
-            typeName: isSet(object.typeName) ? String(object.typeName) : "",
-            extendee: isSet(object.extendee) ? String(object.extendee) : "",
-            defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "",
-            oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0,
-            jsonName: isSet(object.jsonName) ? String(object.jsonName) : "",
-            options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
-            proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.number !== undefined && (obj.number = Math.round(message.number));
-        message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label));
-        message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type));
-        message.typeName !== undefined && (obj.typeName = message.typeName);
-        message.extendee !== undefined && (obj.extendee = message.extendee);
-        message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue);
-        message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex));
-        message.jsonName !== undefined && (obj.jsonName = message.jsonName);
-        message.options !== undefined && (obj.options = message.options ? exports.FieldOptions.toJSON(message.options) : undefined);
-        message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional);
-        return obj;
-    },
-};
-function createBaseOneofDescriptorProto() {
-    return { name: "", options: undefined };
-}
-exports.OneofDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.options !== undefined && (obj.options = message.options ? exports.OneofOptions.toJSON(message.options) : undefined);
-        return obj;
-    },
-};
-function createBaseEnumDescriptorProto() {
-    return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] };
-}
-exports.EnumDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            value: Array.isArray(object?.value) ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e)) : [],
-            options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
-            reservedRange: Array.isArray(object?.reservedRange)
-                ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
-                : [],
-            reservedName: Array.isArray(object?.reservedName)
-                ? object.reservedName.map((e) => String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        if (message.value) {
-            obj.value = message.value.map((e) => e ? exports.EnumValueDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.value = [];
-        }
-        message.options !== undefined && (obj.options = message.options ? exports.EnumOptions.toJSON(message.options) : undefined);
-        if (message.reservedRange) {
-            obj.reservedRange = message.reservedRange.map((e) => e ? exports.EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined);
-        }
-        else {
-            obj.reservedRange = [];
-        }
-        if (message.reservedName) {
-            obj.reservedName = message.reservedName.map((e) => e);
-        }
-        else {
-            obj.reservedName = [];
-        }
-        return obj;
-    },
-};
-function createBaseEnumDescriptorProto_EnumReservedRange() {
-    return { start: 0, end: 0 };
-}
-exports.EnumDescriptorProto_EnumReservedRange = {
-    fromJSON(object) {
-        return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.start !== undefined && (obj.start = Math.round(message.start));
-        message.end !== undefined && (obj.end = Math.round(message.end));
-        return obj;
-    },
-};
-function createBaseEnumValueDescriptorProto() {
-    return { name: "", number: 0, options: undefined };
-}
-exports.EnumValueDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            number: isSet(object.number) ? Number(object.number) : 0,
-            options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.number !== undefined && (obj.number = Math.round(message.number));
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.EnumValueOptions.toJSON(message.options) : undefined);
-        return obj;
-    },
-};
-function createBaseServiceDescriptorProto() {
-    return { name: "", method: [], options: undefined };
-}
-exports.ServiceDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            method: Array.isArray(object?.method) ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e)) : [],
-            options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        if (message.method) {
-            obj.method = message.method.map((e) => e ? exports.MethodDescriptorProto.toJSON(e) : undefined);
-        }
-        else {
-            obj.method = [];
-        }
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.ServiceOptions.toJSON(message.options) : undefined);
-        return obj;
-    },
-};
-function createBaseMethodDescriptorProto() {
-    return {
-        name: "",
-        inputType: "",
-        outputType: "",
-        options: undefined,
-        clientStreaming: false,
-        serverStreaming: false,
-    };
-}
-exports.MethodDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? String(object.name) : "",
-            inputType: isSet(object.inputType) ? String(object.inputType) : "",
-            outputType: isSet(object.outputType) ? String(object.outputType) : "",
-            options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
-            clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false,
-            serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.name !== undefined && (obj.name = message.name);
-        message.inputType !== undefined && (obj.inputType = message.inputType);
-        message.outputType !== undefined && (obj.outputType = message.outputType);
-        message.options !== undefined &&
-            (obj.options = message.options ? exports.MethodOptions.toJSON(message.options) : undefined);
-        message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming);
-        message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming);
-        return obj;
-    },
-};
-function createBaseFileOptions() {
-    return {
-        javaPackage: "",
-        javaOuterClassname: "",
-        javaMultipleFiles: false,
-        javaGenerateEqualsAndHash: false,
-        javaStringCheckUtf8: false,
-        optimizeFor: 1,
-        goPackage: "",
-        ccGenericServices: false,
-        javaGenericServices: false,
-        pyGenericServices: false,
-        phpGenericServices: false,
-        deprecated: false,
-        ccEnableArenas: false,
-        objcClassPrefix: "",
-        csharpNamespace: "",
-        swiftPrefix: "",
-        phpClassPrefix: "",
-        phpNamespace: "",
-        phpMetadataNamespace: "",
-        rubyPackage: "",
-        uninterpretedOption: [],
-    };
-}
-exports.FileOptions = {
-    fromJSON(object) {
-        return {
-            javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "",
-            javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "",
-            javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false,
-            javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
-                ? Boolean(object.javaGenerateEqualsAndHash)
-                : false,
-            javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false,
-            optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
-            goPackage: isSet(object.goPackage) ? String(object.goPackage) : "",
-            ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false,
-            javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false,
-            pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false,
-            phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false,
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false,
-            objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "",
-            csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "",
-            swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "",
-            phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "",
-            phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "",
-            phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "",
-            rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "",
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage);
-        message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname);
-        message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles);
-        message.javaGenerateEqualsAndHash !== undefined &&
-            (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash);
-        message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8);
-        message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor));
-        message.goPackage !== undefined && (obj.goPackage = message.goPackage);
-        message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices);
-        message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices);
-        message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices);
-        message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices);
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas);
-        message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix);
-        message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace);
-        message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix);
-        message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix);
-        message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace);
-        message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace);
-        message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseMessageOptions() {
-    return {
-        messageSetWireFormat: false,
-        noStandardDescriptorAccessor: false,
-        deprecated: false,
-        mapEntry: false,
-        uninterpretedOption: [],
-    };
-}
-exports.MessageOptions = {
-    fromJSON(object) {
-        return {
-            messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false,
-            noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
-                ? Boolean(object.noStandardDescriptorAccessor)
-                : false,
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat);
-        message.noStandardDescriptorAccessor !== undefined &&
-            (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor);
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseFieldOptions() {
-    return {
-        ctype: 0,
-        packed: false,
-        jstype: 0,
-        lazy: false,
-        unverifiedLazy: false,
-        deprecated: false,
-        weak: false,
-        uninterpretedOption: [],
-    };
-}
-exports.FieldOptions = {
-    fromJSON(object) {
-        return {
-            ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
-            packed: isSet(object.packed) ? Boolean(object.packed) : false,
-            jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
-            lazy: isSet(object.lazy) ? Boolean(object.lazy) : false,
-            unverifiedLazy: isSet(object.unverifiedLazy) ? Boolean(object.unverifiedLazy) : false,
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            weak: isSet(object.weak) ? Boolean(object.weak) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype));
-        message.packed !== undefined && (obj.packed = message.packed);
-        message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype));
-        message.lazy !== undefined && (obj.lazy = message.lazy);
-        message.unverifiedLazy !== undefined && (obj.unverifiedLazy = message.unverifiedLazy);
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        message.weak !== undefined && (obj.weak = message.weak);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseOneofOptions() {
-    return { uninterpretedOption: [] };
-}
-exports.OneofOptions = {
-    fromJSON(object) {
-        return {
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseEnumOptions() {
-    return { allowAlias: false, deprecated: false, uninterpretedOption: [] };
-}
-exports.EnumOptions = {
-    fromJSON(object) {
-        return {
-            allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false,
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias);
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseEnumValueOptions() {
-    return { deprecated: false, uninterpretedOption: [] };
-}
-exports.EnumValueOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseServiceOptions() {
-    return { deprecated: false, uninterpretedOption: [] };
-}
-exports.ServiceOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseMethodOptions() {
-    return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] };
-}
-exports.MethodOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
-            idempotencyLevel: isSet(object.idempotencyLevel)
-                ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
-                : 0,
-            uninterpretedOption: Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.deprecated !== undefined && (obj.deprecated = message.deprecated);
-        message.idempotencyLevel !== undefined &&
-            (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel));
-        if (message.uninterpretedOption) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
-        }
-        else {
-            obj.uninterpretedOption = [];
-        }
-        return obj;
-    },
-};
-function createBaseUninterpretedOption() {
-    return {
-        name: [],
-        identifierValue: "",
-        positiveIntValue: "0",
-        negativeIntValue: "0",
-        doubleValue: 0,
-        stringValue: Buffer.alloc(0),
-        aggregateValue: "",
-    };
-}
-exports.UninterpretedOption = {
-    fromJSON(object) {
-        return {
-            name: Array.isArray(object?.name) ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e)) : [],
-            identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "",
-            positiveIntValue: isSet(object.positiveIntValue) ? String(object.positiveIntValue) : "0",
-            negativeIntValue: isSet(object.negativeIntValue) ? String(object.negativeIntValue) : "0",
-            doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0,
-            stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
-            aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name) {
-            obj.name = message.name.map((e) => e ? exports.UninterpretedOption_NamePart.toJSON(e) : undefined);
-        }
-        else {
-            obj.name = [];
-        }
-        message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue);
-        message.positiveIntValue !== undefined && (obj.positiveIntValue = message.positiveIntValue);
-        message.negativeIntValue !== undefined && (obj.negativeIntValue = message.negativeIntValue);
-        message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue);
-        message.stringValue !== undefined &&
-            (obj.stringValue = base64FromBytes(message.stringValue !== undefined ? message.stringValue : Buffer.alloc(0)));
-        message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue);
-        return obj;
-    },
-};
-function createBaseUninterpretedOption_NamePart() {
-    return { namePart: "", isExtension: false };
-}
-exports.UninterpretedOption_NamePart = {
-    fromJSON(object) {
-        return {
-            namePart: isSet(object.namePart) ? String(object.namePart) : "",
-            isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.namePart !== undefined && (obj.namePart = message.namePart);
-        message.isExtension !== undefined && (obj.isExtension = message.isExtension);
-        return obj;
-    },
-};
-function createBaseSourceCodeInfo() {
-    return { location: [] };
-}
-exports.SourceCodeInfo = {
-    fromJSON(object) {
-        return {
-            location: Array.isArray(object?.location)
-                ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.location) {
-            obj.location = message.location.map((e) => e ? exports.SourceCodeInfo_Location.toJSON(e) : undefined);
-        }
-        else {
-            obj.location = [];
-        }
-        return obj;
-    },
-};
-function createBaseSourceCodeInfo_Location() {
-    return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] };
-}
-exports.SourceCodeInfo_Location = {
-    fromJSON(object) {
-        return {
-            path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
-            span: Array.isArray(object?.span) ? object.span.map((e) => Number(e)) : [],
-            leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "",
-            trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "",
-            leadingDetachedComments: Array.isArray(object?.leadingDetachedComments)
-                ? object.leadingDetachedComments.map((e) => String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.path) {
-            obj.path = message.path.map((e) => Math.round(e));
-        }
-        else {
-            obj.path = [];
-        }
-        if (message.span) {
-            obj.span = message.span.map((e) => Math.round(e));
-        }
-        else {
-            obj.span = [];
-        }
-        message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments);
-        message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments);
-        if (message.leadingDetachedComments) {
-            obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e);
-        }
-        else {
-            obj.leadingDetachedComments = [];
-        }
-        return obj;
-    },
-};
-function createBaseGeneratedCodeInfo() {
-    return { annotation: [] };
-}
-exports.GeneratedCodeInfo = {
-    fromJSON(object) {
-        return {
-            annotation: Array.isArray(object?.annotation)
-                ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.annotation) {
-            obj.annotation = message.annotation.map((e) => e ? exports.GeneratedCodeInfo_Annotation.toJSON(e) : undefined);
-        }
-        else {
-            obj.annotation = [];
-        }
-        return obj;
-    },
-};
-function createBaseGeneratedCodeInfo_Annotation() {
-    return { path: [], sourceFile: "", begin: 0, end: 0 };
-}
-exports.GeneratedCodeInfo_Annotation = {
-    fromJSON(object) {
-        return {
-            path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
-            sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "",
-            begin: isSet(object.begin) ? Number(object.begin) : 0,
-            end: isSet(object.end) ? Number(object.end) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.path) {
-            obj.path = message.path.map((e) => Math.round(e));
-        }
-        else {
-            obj.path = [];
-        }
-        message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile);
-        message.begin !== undefined && (obj.begin = Math.round(message.begin));
-        message.end !== undefined && (obj.end = Math.round(message.end));
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
deleted file mode 100644
index 159135fe87172..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
+++ /dev/null
@@ -1,24 +0,0 @@
-"use strict";
-/* eslint-disable */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Timestamp = void 0;
-function createBaseTimestamp() {
-    return { seconds: "0", nanos: 0 };
-}
-exports.Timestamp = {
-    fromJSON(object) {
-        return {
-            seconds: isSet(object.seconds) ? String(object.seconds) : "0",
-            nanos: isSet(object.nanos) ? Number(object.nanos) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.seconds !== undefined && (obj.seconds = message.seconds);
-        message.nanos !== undefined && (obj.nanos = Math.round(message.nanos));
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
deleted file mode 100644
index 1ef3e1b3356b7..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
+++ /dev/null
@@ -1,106 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
-/* eslint-disable */
-const envelope_1 = require("./envelope");
-const sigstore_common_1 = require("./sigstore_common");
-const sigstore_rekor_1 = require("./sigstore_rekor");
-function createBaseTimestampVerificationData() {
-    return { rfc3161Timestamps: [] };
-}
-exports.TimestampVerificationData = {
-    fromJSON(object) {
-        return {
-            rfc3161Timestamps: Array.isArray(object?.rfc3161Timestamps)
-                ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rfc3161Timestamps) {
-            obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => e ? sigstore_common_1.RFC3161SignedTimestamp.toJSON(e) : undefined);
-        }
-        else {
-            obj.rfc3161Timestamps = [];
-        }
-        return obj;
-    },
-};
-function createBaseVerificationMaterial() {
-    return { content: undefined, tlogEntries: [], timestampVerificationData: undefined };
-}
-exports.VerificationMaterial = {
-    fromJSON(object) {
-        return {
-            content: isSet(object.publicKey)
-                ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
-                : isSet(object.x509CertificateChain)
-                    ? {
-                        $case: "x509CertificateChain",
-                        x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
-                    }
-                    : undefined,
-            tlogEntries: Array.isArray(object?.tlogEntries)
-                ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
-                : [],
-            timestampVerificationData: isSet(object.timestampVerificationData)
-                ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
-                : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.content?.$case === "publicKey" &&
-            (obj.publicKey = message.content?.publicKey ? sigstore_common_1.PublicKeyIdentifier.toJSON(message.content?.publicKey) : undefined);
-        message.content?.$case === "x509CertificateChain" &&
-            (obj.x509CertificateChain = message.content?.x509CertificateChain
-                ? sigstore_common_1.X509CertificateChain.toJSON(message.content?.x509CertificateChain)
-                : undefined);
-        if (message.tlogEntries) {
-            obj.tlogEntries = message.tlogEntries.map((e) => e ? sigstore_rekor_1.TransparencyLogEntry.toJSON(e) : undefined);
-        }
-        else {
-            obj.tlogEntries = [];
-        }
-        message.timestampVerificationData !== undefined &&
-            (obj.timestampVerificationData = message.timestampVerificationData
-                ? exports.TimestampVerificationData.toJSON(message.timestampVerificationData)
-                : undefined);
-        return obj;
-    },
-};
-function createBaseBundle() {
-    return { mediaType: "", verificationMaterial: undefined, content: undefined };
-}
-exports.Bundle = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
-            verificationMaterial: isSet(object.verificationMaterial)
-                ? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
-                : undefined,
-            content: isSet(object.messageSignature)
-                ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
-                : isSet(object.dsseEnvelope)
-                    ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.mediaType !== undefined && (obj.mediaType = message.mediaType);
-        message.verificationMaterial !== undefined && (obj.verificationMaterial = message.verificationMaterial
-            ? exports.VerificationMaterial.toJSON(message.verificationMaterial)
-            : undefined);
-        message.content?.$case === "messageSignature" && (obj.messageSignature = message.content?.messageSignature
-            ? sigstore_common_1.MessageSignature.toJSON(message.content?.messageSignature)
-            : undefined);
-        message.content?.$case === "dsseEnvelope" &&
-            (obj.dsseEnvelope = message.content?.dsseEnvelope ? envelope_1.Envelope.toJSON(message.content?.dsseEnvelope) : undefined);
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
deleted file mode 100644
index bcd654e9154b9..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
+++ /dev/null
@@ -1,457 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.subjectAlternativeNameTypeToJSON = exports.subjectAlternativeNameTypeFromJSON = exports.SubjectAlternativeNameType = exports.publicKeyDetailsToJSON = exports.publicKeyDetailsFromJSON = exports.PublicKeyDetails = exports.hashAlgorithmToJSON = exports.hashAlgorithmFromJSON = exports.HashAlgorithm = void 0;
-/* eslint-disable */
-const timestamp_1 = require("./google/protobuf/timestamp");
-/**
- * Only a subset of the secure hash standard algorithms are supported.
- * See  for more
- * details.
- * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
- * any proto JSON serialization to emit the used hash algorithm, as default
- * option is to *omit* the default value of an enum (which is the first
- * value, represented by '0'.
- */
-var HashAlgorithm;
-(function (HashAlgorithm) {
-    HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
-    HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
-})(HashAlgorithm = exports.HashAlgorithm || (exports.HashAlgorithm = {}));
-function hashAlgorithmFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "HASH_ALGORITHM_UNSPECIFIED":
-            return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
-        case 1:
-        case "SHA2_256":
-            return HashAlgorithm.SHA2_256;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
-    }
-}
-exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
-function hashAlgorithmToJSON(object) {
-    switch (object) {
-        case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
-            return "HASH_ALGORITHM_UNSPECIFIED";
-        case HashAlgorithm.SHA2_256:
-            return "SHA2_256";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
-    }
-}
-exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
-/**
- * Details of a specific public key, capturing the the key encoding method,
- * and signature algorithm.
- * To avoid the possibility of contradicting formats such as PKCS1 with
- * ED25519 the valid permutations are listed as a linear set instead of a
- * cartesian set (i.e one combined variable instead of two, one for encoding
- * and one for the signature algorithm).
- */
-var PublicKeyDetails;
-(function (PublicKeyDetails) {
-    PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
-    /** PKCS1_RSA_PKCS1V5 - RSA */
-    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
-    /** PKCS1_RSA_PSS - See RFC8017 */
-    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
-    /** PKIX_ECDSA_P256_SHA_256 - ECDSA */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
-    /** PKIX_ECDSA_P256_HMAC_SHA_256 - See RFC6979 */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
-    /** PKIX_ED25519 - Ed 25519 */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
-})(PublicKeyDetails = exports.PublicKeyDetails || (exports.PublicKeyDetails = {}));
-function publicKeyDetailsFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
-            return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
-        case 1:
-        case "PKCS1_RSA_PKCS1V5":
-            return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
-        case 2:
-        case "PKCS1_RSA_PSS":
-            return PublicKeyDetails.PKCS1_RSA_PSS;
-        case 3:
-        case "PKIX_RSA_PKCS1V5":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V5;
-        case 4:
-        case "PKIX_RSA_PSS":
-            return PublicKeyDetails.PKIX_RSA_PSS;
-        case 5:
-        case "PKIX_ECDSA_P256_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
-        case 6:
-        case "PKIX_ECDSA_P256_HMAC_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
-        case 7:
-        case "PKIX_ED25519":
-            return PublicKeyDetails.PKIX_ED25519;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
-    }
-}
-exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
-function publicKeyDetailsToJSON(object) {
-    switch (object) {
-        case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
-            return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
-        case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
-            return "PKCS1_RSA_PKCS1V5";
-        case PublicKeyDetails.PKCS1_RSA_PSS:
-            return "PKCS1_RSA_PSS";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V5:
-            return "PKIX_RSA_PKCS1V5";
-        case PublicKeyDetails.PKIX_RSA_PSS:
-            return "PKIX_RSA_PSS";
-        case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
-            return "PKIX_ECDSA_P256_SHA_256";
-        case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
-            return "PKIX_ECDSA_P256_HMAC_SHA_256";
-        case PublicKeyDetails.PKIX_ED25519:
-            return "PKIX_ED25519";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
-    }
-}
-exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
-var SubjectAlternativeNameType;
-(function (SubjectAlternativeNameType) {
-    SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
-    SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
-    SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
-    /**
-     * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
-     * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
-     * for more details.
-     */
-    SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
-})(SubjectAlternativeNameType = exports.SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = {}));
-function subjectAlternativeNameTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
-            return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
-        case 1:
-        case "EMAIL":
-            return SubjectAlternativeNameType.EMAIL;
-        case 2:
-        case "URI":
-            return SubjectAlternativeNameType.URI;
-        case 3:
-        case "OTHER_NAME":
-            return SubjectAlternativeNameType.OTHER_NAME;
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
-    }
-}
-exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
-function subjectAlternativeNameTypeToJSON(object) {
-    switch (object) {
-        case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
-            return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
-        case SubjectAlternativeNameType.EMAIL:
-            return "EMAIL";
-        case SubjectAlternativeNameType.URI:
-            return "URI";
-        case SubjectAlternativeNameType.OTHER_NAME:
-            return "OTHER_NAME";
-        default:
-            throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
-    }
-}
-exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
-function createBaseHashOutput() {
-    return { algorithm: 0, digest: Buffer.alloc(0) };
-}
-exports.HashOutput = {
-    fromJSON(object) {
-        return {
-            algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
-            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.algorithm !== undefined && (obj.algorithm = hashAlgorithmToJSON(message.algorithm));
-        message.digest !== undefined &&
-            (obj.digest = base64FromBytes(message.digest !== undefined ? message.digest : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseMessageSignature() {
-    return { messageDigest: undefined, signature: Buffer.alloc(0) };
-}
-exports.MessageSignature = {
-    fromJSON(object) {
-        return {
-            messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
-            signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.messageDigest !== undefined &&
-            (obj.messageDigest = message.messageDigest ? exports.HashOutput.toJSON(message.messageDigest) : undefined);
-        message.signature !== undefined &&
-            (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseLogId() {
-    return { keyId: Buffer.alloc(0) };
-}
-exports.LogId = {
-    fromJSON(object) {
-        return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.keyId !== undefined &&
-            (obj.keyId = base64FromBytes(message.keyId !== undefined ? message.keyId : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseRFC3161SignedTimestamp() {
-    return { signedTimestamp: Buffer.alloc(0) };
-}
-exports.RFC3161SignedTimestamp = {
-    fromJSON(object) {
-        return {
-            signedTimestamp: isSet(object.signedTimestamp)
-                ? Buffer.from(bytesFromBase64(object.signedTimestamp))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.signedTimestamp !== undefined &&
-            (obj.signedTimestamp = base64FromBytes(message.signedTimestamp !== undefined ? message.signedTimestamp : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBasePublicKey() {
-    return { rawBytes: undefined, keyDetails: 0, validFor: undefined };
-}
-exports.PublicKey = {
-    fromJSON(object) {
-        return {
-            rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
-            keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
-            validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.rawBytes !== undefined &&
-            (obj.rawBytes = message.rawBytes !== undefined ? base64FromBytes(message.rawBytes) : undefined);
-        message.keyDetails !== undefined && (obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails));
-        message.validFor !== undefined &&
-            (obj.validFor = message.validFor ? exports.TimeRange.toJSON(message.validFor) : undefined);
-        return obj;
-    },
-};
-function createBasePublicKeyIdentifier() {
-    return { hint: "" };
-}
-exports.PublicKeyIdentifier = {
-    fromJSON(object) {
-        return { hint: isSet(object.hint) ? String(object.hint) : "" };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.hint !== undefined && (obj.hint = message.hint);
-        return obj;
-    },
-};
-function createBaseObjectIdentifier() {
-    return { id: [] };
-}
-exports.ObjectIdentifier = {
-    fromJSON(object) {
-        return { id: Array.isArray(object?.id) ? object.id.map((e) => Number(e)) : [] };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.id) {
-            obj.id = message.id.map((e) => Math.round(e));
-        }
-        else {
-            obj.id = [];
-        }
-        return obj;
-    },
-};
-function createBaseObjectIdentifierValuePair() {
-    return { oid: undefined, value: Buffer.alloc(0) };
-}
-exports.ObjectIdentifierValuePair = {
-    fromJSON(object) {
-        return {
-            oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
-            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.oid !== undefined && (obj.oid = message.oid ? exports.ObjectIdentifier.toJSON(message.oid) : undefined);
-        message.value !== undefined &&
-            (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseDistinguishedName() {
-    return { organization: "", commonName: "" };
-}
-exports.DistinguishedName = {
-    fromJSON(object) {
-        return {
-            organization: isSet(object.organization) ? String(object.organization) : "",
-            commonName: isSet(object.commonName) ? String(object.commonName) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.organization !== undefined && (obj.organization = message.organization);
-        message.commonName !== undefined && (obj.commonName = message.commonName);
-        return obj;
-    },
-};
-function createBaseX509Certificate() {
-    return { rawBytes: Buffer.alloc(0) };
-}
-exports.X509Certificate = {
-    fromJSON(object) {
-        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.rawBytes !== undefined &&
-            (obj.rawBytes = base64FromBytes(message.rawBytes !== undefined ? message.rawBytes : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseSubjectAlternativeName() {
-    return { type: 0, identity: undefined };
-}
-exports.SubjectAlternativeName = {
-    fromJSON(object) {
-        return {
-            type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
-            identity: isSet(object.regexp)
-                ? { $case: "regexp", regexp: String(object.regexp) }
-                : isSet(object.value)
-                    ? { $case: "value", value: String(object.value) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.type !== undefined && (obj.type = subjectAlternativeNameTypeToJSON(message.type));
-        message.identity?.$case === "regexp" && (obj.regexp = message.identity?.regexp);
-        message.identity?.$case === "value" && (obj.value = message.identity?.value);
-        return obj;
-    },
-};
-function createBaseX509CertificateChain() {
-    return { certificates: [] };
-}
-exports.X509CertificateChain = {
-    fromJSON(object) {
-        return {
-            certificates: Array.isArray(object?.certificates)
-                ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.certificates) {
-            obj.certificates = message.certificates.map((e) => e ? exports.X509Certificate.toJSON(e) : undefined);
-        }
-        else {
-            obj.certificates = [];
-        }
-        return obj;
-    },
-};
-function createBaseTimeRange() {
-    return { start: undefined, end: undefined };
-}
-exports.TimeRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
-            end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.start !== undefined && (obj.start = message.start.toISOString());
-        message.end !== undefined && (obj.end = message.end.toISOString());
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function fromTimestamp(t) {
-    let millis = Number(t.seconds) * 1000;
-    millis += t.nanos / 1000000;
-    return new Date(millis);
-}
-function fromJsonTimestamp(o) {
-    if (o instanceof Date) {
-        return o;
-    }
-    else if (typeof o === "string") {
-        return new Date(o);
-    }
-    else {
-        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
deleted file mode 100644
index 398193b2075a7..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
+++ /dev/null
@@ -1,167 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("./sigstore_common");
-function createBaseKindVersion() {
-    return { kind: "", version: "" };
-}
-exports.KindVersion = {
-    fromJSON(object) {
-        return {
-            kind: isSet(object.kind) ? String(object.kind) : "",
-            version: isSet(object.version) ? String(object.version) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.kind !== undefined && (obj.kind = message.kind);
-        message.version !== undefined && (obj.version = message.version);
-        return obj;
-    },
-};
-function createBaseCheckpoint() {
-    return { envelope: "" };
-}
-exports.Checkpoint = {
-    fromJSON(object) {
-        return { envelope: isSet(object.envelope) ? String(object.envelope) : "" };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.envelope !== undefined && (obj.envelope = message.envelope);
-        return obj;
-    },
-};
-function createBaseInclusionProof() {
-    return { logIndex: "0", rootHash: Buffer.alloc(0), treeSize: "0", hashes: [], checkpoint: undefined };
-}
-exports.InclusionProof = {
-    fromJSON(object) {
-        return {
-            logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
-            rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
-            treeSize: isSet(object.treeSize) ? String(object.treeSize) : "0",
-            hashes: Array.isArray(object?.hashes) ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e))) : [],
-            checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.logIndex !== undefined && (obj.logIndex = message.logIndex);
-        message.rootHash !== undefined &&
-            (obj.rootHash = base64FromBytes(message.rootHash !== undefined ? message.rootHash : Buffer.alloc(0)));
-        message.treeSize !== undefined && (obj.treeSize = message.treeSize);
-        if (message.hashes) {
-            obj.hashes = message.hashes.map((e) => base64FromBytes(e !== undefined ? e : Buffer.alloc(0)));
-        }
-        else {
-            obj.hashes = [];
-        }
-        message.checkpoint !== undefined &&
-            (obj.checkpoint = message.checkpoint ? exports.Checkpoint.toJSON(message.checkpoint) : undefined);
-        return obj;
-    },
-};
-function createBaseInclusionPromise() {
-    return { signedEntryTimestamp: Buffer.alloc(0) };
-}
-exports.InclusionPromise = {
-    fromJSON(object) {
-        return {
-            signedEntryTimestamp: isSet(object.signedEntryTimestamp)
-                ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.signedEntryTimestamp !== undefined &&
-            (obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp !== undefined ? message.signedEntryTimestamp : Buffer.alloc(0)));
-        return obj;
-    },
-};
-function createBaseTransparencyLogEntry() {
-    return {
-        logIndex: "0",
-        logId: undefined,
-        kindVersion: undefined,
-        integratedTime: "0",
-        inclusionPromise: undefined,
-        inclusionProof: undefined,
-        canonicalizedBody: Buffer.alloc(0),
-    };
-}
-exports.TransparencyLogEntry = {
-    fromJSON(object) {
-        return {
-            logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
-            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
-            kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
-            integratedTime: isSet(object.integratedTime) ? String(object.integratedTime) : "0",
-            inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
-            inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
-            canonicalizedBody: isSet(object.canonicalizedBody)
-                ? Buffer.from(bytesFromBase64(object.canonicalizedBody))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.logIndex !== undefined && (obj.logIndex = message.logIndex);
-        message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
-        message.kindVersion !== undefined &&
-            (obj.kindVersion = message.kindVersion ? exports.KindVersion.toJSON(message.kindVersion) : undefined);
-        message.integratedTime !== undefined && (obj.integratedTime = message.integratedTime);
-        message.inclusionPromise !== undefined &&
-            (obj.inclusionPromise = message.inclusionPromise ? exports.InclusionPromise.toJSON(message.inclusionPromise) : undefined);
-        message.inclusionProof !== undefined &&
-            (obj.inclusionProof = message.inclusionProof ? exports.InclusionProof.toJSON(message.inclusionProof) : undefined);
-        message.canonicalizedBody !== undefined &&
-            (obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody !== undefined ? message.canonicalizedBody : Buffer.alloc(0)));
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
deleted file mode 100644
index 05e566767cdb2..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
+++ /dev/null
@@ -1,103 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("./sigstore_common");
-function createBaseTransparencyLogInstance() {
-    return { baseUrl: "", hashAlgorithm: 0, publicKey: undefined, logId: undefined };
-}
-exports.TransparencyLogInstance = {
-    fromJSON(object) {
-        return {
-            baseUrl: isSet(object.baseUrl) ? String(object.baseUrl) : "",
-            hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
-            publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
-            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.baseUrl !== undefined && (obj.baseUrl = message.baseUrl);
-        message.hashAlgorithm !== undefined && (obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm));
-        message.publicKey !== undefined &&
-            (obj.publicKey = message.publicKey ? sigstore_common_1.PublicKey.toJSON(message.publicKey) : undefined);
-        message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
-        return obj;
-    },
-};
-function createBaseCertificateAuthority() {
-    return { subject: undefined, uri: "", certChain: undefined, validFor: undefined };
-}
-exports.CertificateAuthority = {
-    fromJSON(object) {
-        return {
-            subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
-            uri: isSet(object.uri) ? String(object.uri) : "",
-            certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
-            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.subject !== undefined &&
-            (obj.subject = message.subject ? sigstore_common_1.DistinguishedName.toJSON(message.subject) : undefined);
-        message.uri !== undefined && (obj.uri = message.uri);
-        message.certChain !== undefined &&
-            (obj.certChain = message.certChain ? sigstore_common_1.X509CertificateChain.toJSON(message.certChain) : undefined);
-        message.validFor !== undefined &&
-            (obj.validFor = message.validFor ? sigstore_common_1.TimeRange.toJSON(message.validFor) : undefined);
-        return obj;
-    },
-};
-function createBaseTrustedRoot() {
-    return { mediaType: "", tlogs: [], certificateAuthorities: [], ctlogs: [], timestampAuthorities: [] };
-}
-exports.TrustedRoot = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
-            tlogs: Array.isArray(object?.tlogs) ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) : [],
-            certificateAuthorities: Array.isArray(object?.certificateAuthorities)
-                ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
-                : [],
-            ctlogs: Array.isArray(object?.ctlogs)
-                ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
-                : [],
-            timestampAuthorities: Array.isArray(object?.timestampAuthorities)
-                ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.mediaType !== undefined && (obj.mediaType = message.mediaType);
-        if (message.tlogs) {
-            obj.tlogs = message.tlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
-        }
-        else {
-            obj.tlogs = [];
-        }
-        if (message.certificateAuthorities) {
-            obj.certificateAuthorities = message.certificateAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
-        }
-        else {
-            obj.certificateAuthorities = [];
-        }
-        if (message.ctlogs) {
-            obj.ctlogs = message.ctlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
-        }
-        else {
-            obj.ctlogs = [];
-        }
-        if (message.timestampAuthorities) {
-            obj.timestampAuthorities = message.timestampAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
-        }
-        else {
-            obj.timestampAuthorities = [];
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
deleted file mode 100644
index 8a72b89761869..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
+++ /dev/null
@@ -1,273 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
-/* eslint-disable */
-const sigstore_bundle_1 = require("./sigstore_bundle");
-const sigstore_common_1 = require("./sigstore_common");
-const sigstore_trustroot_1 = require("./sigstore_trustroot");
-function createBaseCertificateIdentity() {
-    return { issuer: "", san: undefined, oids: [] };
-}
-exports.CertificateIdentity = {
-    fromJSON(object) {
-        return {
-            issuer: isSet(object.issuer) ? String(object.issuer) : "",
-            san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
-            oids: Array.isArray(object?.oids) ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.issuer !== undefined && (obj.issuer = message.issuer);
-        message.san !== undefined && (obj.san = message.san ? sigstore_common_1.SubjectAlternativeName.toJSON(message.san) : undefined);
-        if (message.oids) {
-            obj.oids = message.oids.map((e) => e ? sigstore_common_1.ObjectIdentifierValuePair.toJSON(e) : undefined);
-        }
-        else {
-            obj.oids = [];
-        }
-        return obj;
-    },
-};
-function createBaseCertificateIdentities() {
-    return { identities: [] };
-}
-exports.CertificateIdentities = {
-    fromJSON(object) {
-        return {
-            identities: Array.isArray(object?.identities)
-                ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.identities) {
-            obj.identities = message.identities.map((e) => e ? exports.CertificateIdentity.toJSON(e) : undefined);
-        }
-        else {
-            obj.identities = [];
-        }
-        return obj;
-    },
-};
-function createBasePublicKeyIdentities() {
-    return { publicKeys: [] };
-}
-exports.PublicKeyIdentities = {
-    fromJSON(object) {
-        return {
-            publicKeys: Array.isArray(object?.publicKeys) ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.publicKeys) {
-            obj.publicKeys = message.publicKeys.map((e) => e ? sigstore_common_1.PublicKey.toJSON(e) : undefined);
-        }
-        else {
-            obj.publicKeys = [];
-        }
-        return obj;
-    },
-};
-function createBaseArtifactVerificationOptions() {
-    return { signers: undefined, tlogOptions: undefined, ctlogOptions: undefined, tsaOptions: undefined };
-}
-exports.ArtifactVerificationOptions = {
-    fromJSON(object) {
-        return {
-            signers: isSet(object.certificateIdentities)
-                ? {
-                    $case: "certificateIdentities",
-                    certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
-                }
-                : isSet(object.publicKeys)
-                    ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
-                    : undefined,
-            tlogOptions: isSet(object.tlogOptions)
-                ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
-                : undefined,
-            ctlogOptions: isSet(object.ctlogOptions)
-                ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
-                : undefined,
-            tsaOptions: isSet(object.tsaOptions)
-                ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
-                : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.signers?.$case === "certificateIdentities" &&
-            (obj.certificateIdentities = message.signers?.certificateIdentities
-                ? exports.CertificateIdentities.toJSON(message.signers?.certificateIdentities)
-                : undefined);
-        message.signers?.$case === "publicKeys" && (obj.publicKeys = message.signers?.publicKeys
-            ? exports.PublicKeyIdentities.toJSON(message.signers?.publicKeys)
-            : undefined);
-        message.tlogOptions !== undefined && (obj.tlogOptions = message.tlogOptions
-            ? exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions)
-            : undefined);
-        message.ctlogOptions !== undefined && (obj.ctlogOptions = message.ctlogOptions
-            ? exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions)
-            : undefined);
-        message.tsaOptions !== undefined && (obj.tsaOptions = message.tsaOptions
-            ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions)
-            : undefined);
-        return obj;
-    },
-};
-function createBaseArtifactVerificationOptions_TlogOptions() {
-    return { threshold: 0, performOnlineVerification: false, disable: false };
-}
-exports.ArtifactVerificationOptions_TlogOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
-            performOnlineVerification: isSet(object.performOnlineVerification)
-                ? Boolean(object.performOnlineVerification)
-                : false,
-            disable: isSet(object.disable) ? Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
-        message.performOnlineVerification !== undefined &&
-            (obj.performOnlineVerification = message.performOnlineVerification);
-        message.disable !== undefined && (obj.disable = message.disable);
-        return obj;
-    },
-};
-function createBaseArtifactVerificationOptions_CtlogOptions() {
-    return { threshold: 0, detachedSct: false, disable: false };
-}
-exports.ArtifactVerificationOptions_CtlogOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
-            detachedSct: isSet(object.detachedSct) ? Boolean(object.detachedSct) : false,
-            disable: isSet(object.disable) ? Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
-        message.detachedSct !== undefined && (obj.detachedSct = message.detachedSct);
-        message.disable !== undefined && (obj.disable = message.disable);
-        return obj;
-    },
-};
-function createBaseArtifactVerificationOptions_TimestampAuthorityOptions() {
-    return { threshold: 0, disable: false };
-}
-exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
-        message.disable !== undefined && (obj.disable = message.disable);
-        return obj;
-    },
-};
-function createBaseArtifact() {
-    return { data: undefined };
-}
-exports.Artifact = {
-    fromJSON(object) {
-        return {
-            data: isSet(object.artifactUri)
-                ? { $case: "artifactUri", artifactUri: String(object.artifactUri) }
-                : isSet(object.artifact)
-                    ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.data?.$case === "artifactUri" && (obj.artifactUri = message.data?.artifactUri);
-        message.data?.$case === "artifact" &&
-            (obj.artifact = message.data?.artifact !== undefined ? base64FromBytes(message.data?.artifact) : undefined);
-        return obj;
-    },
-};
-function createBaseInput() {
-    return {
-        artifactTrustRoot: undefined,
-        artifactVerificationOptions: undefined,
-        bundle: undefined,
-        artifact: undefined,
-    };
-}
-exports.Input = {
-    fromJSON(object) {
-        return {
-            artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
-            artifactVerificationOptions: isSet(object.artifactVerificationOptions)
-                ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
-                : undefined,
-            bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
-            artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        message.artifactTrustRoot !== undefined &&
-            (obj.artifactTrustRoot = message.artifactTrustRoot ? sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot) : undefined);
-        message.artifactVerificationOptions !== undefined &&
-            (obj.artifactVerificationOptions = message.artifactVerificationOptions
-                ? exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions)
-                : undefined);
-        message.bundle !== undefined && (obj.bundle = message.bundle ? sigstore_bundle_1.Bundle.toJSON(message.bundle) : undefined);
-        message.artifact !== undefined && (obj.artifact = message.artifact ? exports.Artifact.toJSON(message.artifact) : undefined);
-        return obj;
-    },
-};
-var tsProtoGlobalThis = (() => {
-    if (typeof globalThis !== "undefined") {
-        return globalThis;
-    }
-    if (typeof self !== "undefined") {
-        return self;
-    }
-    if (typeof window !== "undefined") {
-        return window;
-    }
-    if (typeof global !== "undefined") {
-        return global;
-    }
-    throw "Unable to locate global object";
-})();
-function bytesFromBase64(b64) {
-    if (tsProtoGlobalThis.Buffer) {
-        return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
-    }
-    else {
-        const bin = tsProtoGlobalThis.atob(b64);
-        const arr = new Uint8Array(bin.length);
-        for (let i = 0; i < bin.length; ++i) {
-            arr[i] = bin.charCodeAt(i);
-        }
-        return arr;
-    }
-}
-function base64FromBytes(arr) {
-    if (tsProtoGlobalThis.Buffer) {
-        return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
-    }
-    else {
-        const bin = [];
-        arr.forEach((byte) => {
-            bin.push(String.fromCharCode(byte));
-        });
-        return tsProtoGlobalThis.btoa(bin.join(""));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js
deleted file mode 100644
index eafb768c48fca..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/dist/index.js
+++ /dev/null
@@ -1,37 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __exportStar = (this && this.__exportStar) || function(m, exports) {
-    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-__exportStar(require("./__generated__/envelope"), exports);
-__exportStar(require("./__generated__/sigstore_bundle"), exports);
-__exportStar(require("./__generated__/sigstore_common"), exports);
-__exportStar(require("./__generated__/sigstore_rekor"), exports);
-__exportStar(require("./__generated__/sigstore_trustroot"), exports);
-__exportStar(require("./__generated__/sigstore_verification"), exports);
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json
deleted file mode 100644
index 450abb157f31a..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs/package.json
+++ /dev/null
@@ -1,31 +0,0 @@
-{
-  "name": "@sigstore/protobuf-specs",
-  "version": "0.2.1",
-  "description": "code-signing for npm packages",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
-  "scripts": {
-    "build": "tsc"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/sigstore/protobuf-specs.git"
-  },
-  "files": [
-    "dist"
-  ],
-  "author": "bdehamer@github.com",
-  "license": "Apache-2.0",
-  "bugs": {
-    "url": "https://github.com/sigstore/protobuf-specs/issues"
-  },
-  "homepage": "https://github.com/sigstore/protobuf-specs#readme",
-  "devDependencies": {
-    "@tsconfig/node14": "^1.0.3",
-    "@types/node": "^18.14.0",
-    "typescript": "^4.9.5"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  }
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/package.json b/node_modules/sigstore/node_modules/@sigstore/tuf/package.json
deleted file mode 100644
index 3473dfef2cde9..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/package.json
+++ /dev/null
@@ -1,41 +0,0 @@
-{
-  "name": "@sigstore/tuf",
-  "version": "1.0.3",
-  "description": "Client for the Sigstore TUF repository",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
-  "scripts": {
-    "clean": "shx rm -rf dist *.tsbuildinfo",
-    "build": "tsc --build",
-    "test": "jest"
-  },
-  "files": [
-    "dist",
-    "store"
-  ],
-  "author": "bdehamer@github.com",
-  "license": "Apache-2.0",
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/sigstore/sigstore-js.git"
-  },
-  "bugs": {
-    "url": "https://github.com/sigstore/sigstore-js/issues"
-  },
-  "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/tuf#readme",
-  "publishConfig": {
-    "provenance": true
-  },
-  "devDependencies": {
-    "@sigstore/jest": "^0.0.0",
-    "@tufjs/repo-mock": "^1.1.0",
-    "@types/make-fetch-happen": "^10.0.0"
-  },
-  "dependencies": {
-    "@sigstore/protobuf-specs": "^0.2.0",
-    "tuf-js": "^1.1.7"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  }
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/store/public-good-instance-root.json b/node_modules/sigstore/node_modules/@sigstore/tuf/store/public-good-instance-root.json
deleted file mode 100644
index e95c7e88cdf09..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/store/public-good-instance-root.json
+++ /dev/null
@@ -1 +0,0 @@
-{"signed":{"_type":"root","spec_version":"1.0","version":7,"expires":"2023-10-04T13:08:11Z","keys":{"25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEEXsz3SZXFb8jMV42j6pJlyjbjR8K\nN3Bwocexq6LMIb5qsWKOQvLN16NUefLc4HswOoumRsVVaajSpQS6fobkRw==\n-----END PUBLIC KEY-----\n"}},"2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE0ghrh92Lw1Yr3idGV5WqCtMDB8Cx\n+D8hdC4w2ZLNIplVRoVGLskYa3gheMyOjiJ8kPi15aQ2//7P+oj7UvJPGw==\n-----END PUBLIC KEY-----\n"}},"45b283825eb184cabd582eb17b74fc8ed404f68cf452acabdad2ed6f90ce216b":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAELrWvNt94v4R085ELeeCMxHp7PldF\n0/T1GxukUh2ODuggLGJE0pc1e8CSBf6CS91Fwo9FUOuRsjBUld+VqSyCdQ==\n-----END PUBLIC KEY-----\n"}},"7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEinikSsAQmYkNeH5eYq/CnIzLaacO\nxlSaawQDOwqKy/tCqxq5xxPSJc21K4WIhs9GyOkKfzueY3GILzcMJZ4cWw==\n-----END PUBLIC KEY-----\n"}},"e1863ba02070322ebc626dcecf9d881a3a38c35c3b41a83765b6ad6c37eaec2a":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEWRiGr5+j+3J5SsH+Ztr5nE2H2wO7\nBV+nO3s93gLca18qTOzHY1oWyAGDykMSsGTUBSt9D+An0KfKsD2mfSM42Q==\n-----END PUBLIC KEY-----\n"}},"f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEzBzVOmHCPojMVLSI364WiiV8NPrD\n6IgRxVliskz/v+y3JER5mcVGcONliDcWMC5J2lfHmjPNPhb4H7xm8LzfSA==\n-----END PUBLIC KEY-----\n"}},"ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c":{"keytype":"ecdsa-sha2-nistp256","scheme":"ecdsa-sha2-nistp256","keyid_hash_algorithms":["sha256","sha512"],"keyval":{"public":"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEy8XKsmhBYDI8Jc0GwzBxeKax0cm5\nSTKEU65HPFunUn41sT8pi0FjM4IkHz/YUmwmLUO0Wt7lxhj6BkLIK4qYAw==\n-----END PUBLIC KEY-----\n"}}},"roles":{"root":{"keyids":["ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c","25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99","f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f","7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b","2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de"],"threshold":3},"snapshot":{"keyids":["45b283825eb184cabd582eb17b74fc8ed404f68cf452acabdad2ed6f90ce216b"],"threshold":1},"targets":{"keyids":["ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c","25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99","f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f","7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b","2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de"],"threshold":3},"timestamp":{"keyids":["e1863ba02070322ebc626dcecf9d881a3a38c35c3b41a83765b6ad6c37eaec2a"],"threshold":1}},"consistent_snapshot":true},"signatures":[{"keyid":"25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99","sig":"3046022100c0610c0055ce5c4a52d054d7322e7b514d55baf44423d63aa4daa077cc60fd1f022100a097f2803f090fb66c42ead915a2c46ebe7db53a32bf18f2188275cc936f8bdd"},{"keyid":"f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f","sig":"304502203134f0468810299d5493a867c40630b341296b92e59c29821311d353343bb3a4022100e667ae3d304e7e3da0894c7425f6b9ecd917106841280e5cf6f3496ad5f8f68e"},{"keyid":"7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b","sig":"3045022037fe5f45426f21eaaf4730d2136f2b1611d6379688f79b9d1e3f61719997135c022100b63b022d7b79d4694b96f416d88aa4d7b1a3bff8a01f4fb51e0f42137c7d2d06"},{"keyid":"2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de","sig":"3044022007cc8fcc4940809f2751ad5b535f4c5f53f5b4952f5b5696b09668e743306ac1022006dfcdf94e94c92163eeb1b47796db62cedaa730aa13aa61b573fe23714730f2"}]}
diff --git a/node_modules/sigstore/node_modules/@tufjs/canonical-json/LICENSE b/node_modules/sigstore/node_modules/@tufjs/canonical-json/LICENSE
deleted file mode 100644
index 420700f5d3765..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/canonical-json/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-MIT License
-
-Copyright (c) 2022 GitHub and the TUF Contributors
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/@tufjs/canonical-json/lib/index.js b/node_modules/sigstore/node_modules/@tufjs/canonical-json/lib/index.js
deleted file mode 100644
index d480696de1f6c..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/canonical-json/lib/index.js
+++ /dev/null
@@ -1,64 +0,0 @@
-const COMMA = ',';
-const COLON = ':';
-const LEFT_SQUARE_BRACKET = '[';
-const RIGHT_SQUARE_BRACKET = ']';
-const LEFT_CURLY_BRACKET = '{';
-const RIGHT_CURLY_BRACKET = '}';
-
-// Recursively encodes the supplied object according to the canonical JSON form
-// as specified at http://wiki.laptop.org/go/Canonical_JSON. It's a restricted
-// dialect of JSON in which keys are lexically sorted, floats are not allowed,
-// and only double quotes and backslashes are escaped.
-function canonicalize(object) {
-  const buffer = [];
-  if (typeof object === 'string') {
-    buffer.push(canonicalizeString(object));
-  } else if (typeof object === 'boolean') {
-    buffer.push(JSON.stringify(object));
-  } else if (Number.isInteger(object)) {
-    buffer.push(JSON.stringify(object));
-  } else if (object === null) {
-    buffer.push(JSON.stringify(object));
-  } else if (Array.isArray(object)) {
-    buffer.push(LEFT_SQUARE_BRACKET);
-    let first = true;
-    object.forEach((element) => {
-      if (!first) {
-        buffer.push(COMMA);
-      }
-      first = false;
-      buffer.push(canonicalize(element));
-    });
-    buffer.push(RIGHT_SQUARE_BRACKET);
-  } else if (typeof object === 'object') {
-    buffer.push(LEFT_CURLY_BRACKET);
-    let first = true;
-    Object.keys(object)
-      .sort()
-      .forEach((property) => {
-        if (!first) {
-          buffer.push(COMMA);
-        }
-        first = false;
-        buffer.push(canonicalizeString(property));
-        buffer.push(COLON);
-        buffer.push(canonicalize(object[property]));
-      });
-    buffer.push(RIGHT_CURLY_BRACKET);
-  } else {
-    throw new TypeError('cannot encode ' + object.toString());
-  }
-
-  return buffer.join('');
-}
-
-// String canonicalization consists of escaping backslash (\) and double
-// quote (") characters and wrapping the resulting string in double quotes.
-function canonicalizeString(string) {
-  const escapedString = string.replace(/\\/g, '\\\\').replace(/"/g, '\\"');
-  return '"' + escapedString + '"';
-}
-
-module.exports = {
-  canonicalize,
-};
diff --git a/node_modules/sigstore/node_modules/@tufjs/canonical-json/package.json b/node_modules/sigstore/node_modules/@tufjs/canonical-json/package.json
deleted file mode 100644
index 688c9b93c3a4e..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/canonical-json/package.json
+++ /dev/null
@@ -1,39 +0,0 @@
-{
-  "name": "@tufjs/canonical-json",
-  "version": "1.0.0",
-  "description": "OLPC JSON canonicalization",
-  "main": "lib/index.js",
-  "typings": "lib/index.d.ts",
-  "license": "MIT",
-  "keywords": [
-    "json",
-    "canonical",
-    "canonicalize",
-    "canonicalization",
-    "crypto",
-    "signature",
-    "olpc"
-  ],
-  "author": "bdehamer@github.com",
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/theupdateframework/tuf-js.git"
-  },
-  "homepage": "https://github.com/theupdateframework/tuf-js/packages/canonical-json#readme",
-  "bugs": {
-    "url": "https://github.com/theupdateframework/tuf-js/issues"
-  },
-  "files": [
-    "lib/"
-  ],
-  "scripts": {
-    "test": "jest"
-  },
-  "devDependencies": {
-    "@types/node": "^18.14.1",
-    "typescript": "^4.9.5"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  }
-}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/LICENSE b/node_modules/sigstore/node_modules/@tufjs/models/LICENSE
deleted file mode 100644
index 420700f5d3765..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-MIT License
-
-Copyright (c) 2022 GitHub and the TUF Contributors
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/base.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/base.js
deleted file mode 100644
index d89a089c33092..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/base.js
+++ /dev/null
@@ -1,83 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Signed = exports.isMetadataKind = exports.MetadataKind = void 0;
-const util_1 = __importDefault(require("util"));
-const error_1 = require("./error");
-const utils_1 = require("./utils");
-const SPECIFICATION_VERSION = ['1', '0', '31'];
-var MetadataKind;
-(function (MetadataKind) {
-    MetadataKind["Root"] = "root";
-    MetadataKind["Timestamp"] = "timestamp";
-    MetadataKind["Snapshot"] = "snapshot";
-    MetadataKind["Targets"] = "targets";
-})(MetadataKind = exports.MetadataKind || (exports.MetadataKind = {}));
-function isMetadataKind(value) {
-    return (typeof value === 'string' &&
-        Object.values(MetadataKind).includes(value));
-}
-exports.isMetadataKind = isMetadataKind;
-/***
- * A base class for the signed part of TUF metadata.
- *
- * Objects with base class Signed are usually included in a ``Metadata`` object
- * on the signed attribute. This class provides attributes and methods that
- * are common for all TUF metadata types (roles).
- */
-class Signed {
-    constructor(options) {
-        this.specVersion = options.specVersion || SPECIFICATION_VERSION.join('.');
-        const specList = this.specVersion.split('.');
-        if (!(specList.length === 2 || specList.length === 3) ||
-            !specList.every((item) => isNumeric(item))) {
-            throw new error_1.ValueError('Failed to parse specVersion');
-        }
-        // major version must match
-        if (specList[0] != SPECIFICATION_VERSION[0]) {
-            throw new error_1.ValueError('Unsupported specVersion');
-        }
-        this.expires = options.expires || new Date().toISOString();
-        this.version = options.version || 1;
-        this.unrecognizedFields = options.unrecognizedFields || {};
-    }
-    equals(other) {
-        if (!(other instanceof Signed)) {
-            return false;
-        }
-        return (this.specVersion === other.specVersion &&
-            this.expires === other.expires &&
-            this.version === other.version &&
-            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
-    }
-    isExpired(referenceTime) {
-        if (!referenceTime) {
-            referenceTime = new Date();
-        }
-        return referenceTime >= new Date(this.expires);
-    }
-    static commonFieldsFromJSON(data) {
-        const { spec_version, expires, version, ...rest } = data;
-        if (utils_1.guard.isDefined(spec_version) && !(typeof spec_version === 'string')) {
-            throw new TypeError('spec_version must be a string');
-        }
-        if (utils_1.guard.isDefined(expires) && !(typeof expires === 'string')) {
-            throw new TypeError('expires must be a string');
-        }
-        if (utils_1.guard.isDefined(version) && !(typeof version === 'number')) {
-            throw new TypeError('version must be a number');
-        }
-        return {
-            specVersion: spec_version,
-            expires,
-            version,
-            unrecognizedFields: rest,
-        };
-    }
-}
-exports.Signed = Signed;
-function isNumeric(str) {
-    return !isNaN(Number(str));
-}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/delegations.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/delegations.js
deleted file mode 100644
index 7165f1e244393..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/delegations.js
+++ /dev/null
@@ -1,115 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Delegations = void 0;
-const util_1 = __importDefault(require("util"));
-const error_1 = require("./error");
-const key_1 = require("./key");
-const role_1 = require("./role");
-const utils_1 = require("./utils");
-/**
- * A container object storing information about all delegations.
- *
- * Targets roles that are trusted to provide signed metadata files
- * describing targets with designated pathnames and/or further delegations.
- */
-class Delegations {
-    constructor(options) {
-        this.keys = options.keys;
-        this.unrecognizedFields = options.unrecognizedFields || {};
-        if (options.roles) {
-            if (Object.keys(options.roles).some((roleName) => role_1.TOP_LEVEL_ROLE_NAMES.includes(roleName))) {
-                throw new error_1.ValueError('Delegated role name conflicts with top-level role name');
-            }
-        }
-        this.succinctRoles = options.succinctRoles;
-        this.roles = options.roles;
-    }
-    equals(other) {
-        if (!(other instanceof Delegations)) {
-            return false;
-        }
-        return (util_1.default.isDeepStrictEqual(this.keys, other.keys) &&
-            util_1.default.isDeepStrictEqual(this.roles, other.roles) &&
-            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields) &&
-            util_1.default.isDeepStrictEqual(this.succinctRoles, other.succinctRoles));
-    }
-    *rolesForTarget(targetPath) {
-        if (this.roles) {
-            for (const role of Object.values(this.roles)) {
-                if (role.isDelegatedPath(targetPath)) {
-                    yield { role: role.name, terminating: role.terminating };
-                }
-            }
-        }
-        else if (this.succinctRoles) {
-            yield {
-                role: this.succinctRoles.getRoleForTarget(targetPath),
-                terminating: true,
-            };
-        }
-    }
-    toJSON() {
-        const json = {
-            keys: keysToJSON(this.keys),
-            ...this.unrecognizedFields,
-        };
-        if (this.roles) {
-            json.roles = rolesToJSON(this.roles);
-        }
-        else if (this.succinctRoles) {
-            json.succinct_roles = this.succinctRoles.toJSON();
-        }
-        return json;
-    }
-    static fromJSON(data) {
-        const { keys, roles, succinct_roles, ...unrecognizedFields } = data;
-        let succinctRoles;
-        if (utils_1.guard.isObject(succinct_roles)) {
-            succinctRoles = role_1.SuccinctRoles.fromJSON(succinct_roles);
-        }
-        return new Delegations({
-            keys: keysFromJSON(keys),
-            roles: rolesFromJSON(roles),
-            unrecognizedFields,
-            succinctRoles,
-        });
-    }
-}
-exports.Delegations = Delegations;
-function keysToJSON(keys) {
-    return Object.entries(keys).reduce((acc, [keyId, key]) => ({
-        ...acc,
-        [keyId]: key.toJSON(),
-    }), {});
-}
-function rolesToJSON(roles) {
-    return Object.values(roles).map((role) => role.toJSON());
-}
-function keysFromJSON(data) {
-    if (!utils_1.guard.isObjectRecord(data)) {
-        throw new TypeError('keys is malformed');
-    }
-    return Object.entries(data).reduce((acc, [keyID, keyData]) => ({
-        ...acc,
-        [keyID]: key_1.Key.fromJSON(keyID, keyData),
-    }), {});
-}
-function rolesFromJSON(data) {
-    let roleMap;
-    if (utils_1.guard.isDefined(data)) {
-        if (!utils_1.guard.isObjectArray(data)) {
-            throw new TypeError('roles is malformed');
-        }
-        roleMap = data.reduce((acc, role) => {
-            const delegatedRole = role_1.DelegatedRole.fromJSON(role);
-            return {
-                ...acc,
-                [delegatedRole.name]: delegatedRole,
-            };
-        }, {});
-    }
-    return roleMap;
-}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/error.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/error.js
deleted file mode 100644
index ba80698747ba0..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/error.js
+++ /dev/null
@@ -1,27 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.UnsupportedAlgorithmError = exports.CryptoError = exports.LengthOrHashMismatchError = exports.UnsignedMetadataError = exports.RepositoryError = exports.ValueError = void 0;
-// An error about insufficient values
-class ValueError extends Error {
-}
-exports.ValueError = ValueError;
-// An error with a repository's state, such as a missing file.
-// It covers all exceptions that come from the repository side when
-// looking from the perspective of users of metadata API or ngclient.
-class RepositoryError extends Error {
-}
-exports.RepositoryError = RepositoryError;
-// An error about metadata object with insufficient threshold of signatures.
-class UnsignedMetadataError extends RepositoryError {
-}
-exports.UnsignedMetadataError = UnsignedMetadataError;
-// An error while checking the length and hash values of an object.
-class LengthOrHashMismatchError extends RepositoryError {
-}
-exports.LengthOrHashMismatchError = LengthOrHashMismatchError;
-class CryptoError extends Error {
-}
-exports.CryptoError = CryptoError;
-class UnsupportedAlgorithmError extends CryptoError {
-}
-exports.UnsupportedAlgorithmError = UnsupportedAlgorithmError;
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/file.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/file.js
deleted file mode 100644
index b35fe5950bbb7..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/file.js
+++ /dev/null
@@ -1,183 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TargetFile = exports.MetaFile = void 0;
-const crypto_1 = __importDefault(require("crypto"));
-const util_1 = __importDefault(require("util"));
-const error_1 = require("./error");
-const utils_1 = require("./utils");
-// A container with information about a particular metadata file.
-//
-// This class is used for Timestamp and Snapshot metadata.
-class MetaFile {
-    constructor(opts) {
-        if (opts.version <= 0) {
-            throw new error_1.ValueError('Metafile version must be at least 1');
-        }
-        if (opts.length !== undefined) {
-            validateLength(opts.length);
-        }
-        this.version = opts.version;
-        this.length = opts.length;
-        this.hashes = opts.hashes;
-        this.unrecognizedFields = opts.unrecognizedFields || {};
-    }
-    equals(other) {
-        if (!(other instanceof MetaFile)) {
-            return false;
-        }
-        return (this.version === other.version &&
-            this.length === other.length &&
-            util_1.default.isDeepStrictEqual(this.hashes, other.hashes) &&
-            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
-    }
-    verify(data) {
-        // Verifies that the given data matches the expected length.
-        if (this.length !== undefined) {
-            if (data.length !== this.length) {
-                throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${data.length}`);
-            }
-        }
-        // Verifies that the given data matches the supplied hashes.
-        if (this.hashes) {
-            Object.entries(this.hashes).forEach(([key, value]) => {
-                let hash;
-                try {
-                    hash = crypto_1.default.createHash(key);
-                }
-                catch (e) {
-                    throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`);
-                }
-                const observedHash = hash.update(data).digest('hex');
-                if (observedHash !== value) {
-                    throw new error_1.LengthOrHashMismatchError(`Expected hash ${value} but got ${observedHash}`);
-                }
-            });
-        }
-    }
-    toJSON() {
-        const json = {
-            version: this.version,
-            ...this.unrecognizedFields,
-        };
-        if (this.length !== undefined) {
-            json.length = this.length;
-        }
-        if (this.hashes) {
-            json.hashes = this.hashes;
-        }
-        return json;
-    }
-    static fromJSON(data) {
-        const { version, length, hashes, ...rest } = data;
-        if (typeof version !== 'number') {
-            throw new TypeError('version must be a number');
-        }
-        if (utils_1.guard.isDefined(length) && typeof length !== 'number') {
-            throw new TypeError('length must be a number');
-        }
-        if (utils_1.guard.isDefined(hashes) && !utils_1.guard.isStringRecord(hashes)) {
-            throw new TypeError('hashes must be string keys and values');
-        }
-        return new MetaFile({
-            version,
-            length,
-            hashes,
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.MetaFile = MetaFile;
-// Container for info about a particular target file.
-//
-// This class is used for Target metadata.
-class TargetFile {
-    constructor(opts) {
-        validateLength(opts.length);
-        this.length = opts.length;
-        this.path = opts.path;
-        this.hashes = opts.hashes;
-        this.unrecognizedFields = opts.unrecognizedFields || {};
-    }
-    get custom() {
-        const custom = this.unrecognizedFields['custom'];
-        if (!custom || Array.isArray(custom) || !(typeof custom === 'object')) {
-            return {};
-        }
-        return custom;
-    }
-    equals(other) {
-        if (!(other instanceof TargetFile)) {
-            return false;
-        }
-        return (this.length === other.length &&
-            this.path === other.path &&
-            util_1.default.isDeepStrictEqual(this.hashes, other.hashes) &&
-            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
-    }
-    async verify(stream) {
-        let observedLength = 0;
-        // Create a digest for each hash algorithm
-        const digests = Object.keys(this.hashes).reduce((acc, key) => {
-            try {
-                acc[key] = crypto_1.default.createHash(key);
-            }
-            catch (e) {
-                throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`);
-            }
-            return acc;
-        }, {});
-        // Read stream chunk by chunk
-        for await (const chunk of stream) {
-            // Keep running tally of stream length
-            observedLength += chunk.length;
-            // Append chunk to each digest
-            Object.values(digests).forEach((digest) => {
-                digest.update(chunk);
-            });
-        }
-        // Verify length matches expected value
-        if (observedLength !== this.length) {
-            throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${observedLength}`);
-        }
-        // Verify each digest matches expected value
-        Object.entries(digests).forEach(([key, value]) => {
-            const expected = this.hashes[key];
-            const actual = value.digest('hex');
-            if (actual !== expected) {
-                throw new error_1.LengthOrHashMismatchError(`Expected hash ${expected} but got ${actual}`);
-            }
-        });
-    }
-    toJSON() {
-        return {
-            length: this.length,
-            hashes: this.hashes,
-            ...this.unrecognizedFields,
-        };
-    }
-    static fromJSON(path, data) {
-        const { length, hashes, ...rest } = data;
-        if (typeof length !== 'number') {
-            throw new TypeError('length must be a number');
-        }
-        if (!utils_1.guard.isStringRecord(hashes)) {
-            throw new TypeError('hashes must have string keys and values');
-        }
-        return new TargetFile({
-            length,
-            path,
-            hashes,
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.TargetFile = TargetFile;
-// Check that supplied length if valid
-function validateLength(length) {
-    if (length < 0) {
-        throw new error_1.ValueError('Length must be at least 0');
-    }
-}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/index.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/index.js
deleted file mode 100644
index a4dc783659f04..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/index.js
+++ /dev/null
@@ -1,24 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Timestamp = exports.Targets = exports.Snapshot = exports.Signature = exports.Root = exports.Metadata = exports.Key = exports.TargetFile = exports.MetaFile = exports.ValueError = exports.MetadataKind = void 0;
-var base_1 = require("./base");
-Object.defineProperty(exports, "MetadataKind", { enumerable: true, get: function () { return base_1.MetadataKind; } });
-var error_1 = require("./error");
-Object.defineProperty(exports, "ValueError", { enumerable: true, get: function () { return error_1.ValueError; } });
-var file_1 = require("./file");
-Object.defineProperty(exports, "MetaFile", { enumerable: true, get: function () { return file_1.MetaFile; } });
-Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return file_1.TargetFile; } });
-var key_1 = require("./key");
-Object.defineProperty(exports, "Key", { enumerable: true, get: function () { return key_1.Key; } });
-var metadata_1 = require("./metadata");
-Object.defineProperty(exports, "Metadata", { enumerable: true, get: function () { return metadata_1.Metadata; } });
-var root_1 = require("./root");
-Object.defineProperty(exports, "Root", { enumerable: true, get: function () { return root_1.Root; } });
-var signature_1 = require("./signature");
-Object.defineProperty(exports, "Signature", { enumerable: true, get: function () { return signature_1.Signature; } });
-var snapshot_1 = require("./snapshot");
-Object.defineProperty(exports, "Snapshot", { enumerable: true, get: function () { return snapshot_1.Snapshot; } });
-var targets_1 = require("./targets");
-Object.defineProperty(exports, "Targets", { enumerable: true, get: function () { return targets_1.Targets; } });
-var timestamp_1 = require("./timestamp");
-Object.defineProperty(exports, "Timestamp", { enumerable: true, get: function () { return timestamp_1.Timestamp; } });
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/key.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/key.js
deleted file mode 100644
index 5e55b09d7c6dd..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/key.js
+++ /dev/null
@@ -1,85 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Key = void 0;
-const util_1 = __importDefault(require("util"));
-const error_1 = require("./error");
-const utils_1 = require("./utils");
-const key_1 = require("./utils/key");
-// A container class representing the public portion of a Key.
-class Key {
-    constructor(options) {
-        const { keyID, keyType, scheme, keyVal, unrecognizedFields } = options;
-        this.keyID = keyID;
-        this.keyType = keyType;
-        this.scheme = scheme;
-        this.keyVal = keyVal;
-        this.unrecognizedFields = unrecognizedFields || {};
-    }
-    // Verifies the that the metadata.signatures contains a signature made with
-    // this key and is correctly signed.
-    verifySignature(metadata) {
-        const signature = metadata.signatures[this.keyID];
-        if (!signature)
-            throw new error_1.UnsignedMetadataError('no signature for key found in metadata');
-        if (!this.keyVal.public)
-            throw new error_1.UnsignedMetadataError('no public key found');
-        const publicKey = (0, key_1.getPublicKey)({
-            keyType: this.keyType,
-            scheme: this.scheme,
-            keyVal: this.keyVal.public,
-        });
-        const signedData = metadata.signed.toJSON();
-        try {
-            if (!utils_1.crypto.verifySignature(signedData, publicKey, signature.sig)) {
-                throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`);
-            }
-        }
-        catch (error) {
-            if (error instanceof error_1.UnsignedMetadataError) {
-                throw error;
-            }
-            throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`);
-        }
-    }
-    equals(other) {
-        if (!(other instanceof Key)) {
-            return false;
-        }
-        return (this.keyID === other.keyID &&
-            this.keyType === other.keyType &&
-            this.scheme === other.scheme &&
-            util_1.default.isDeepStrictEqual(this.keyVal, other.keyVal) &&
-            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
-    }
-    toJSON() {
-        return {
-            keytype: this.keyType,
-            scheme: this.scheme,
-            keyval: this.keyVal,
-            ...this.unrecognizedFields,
-        };
-    }
-    static fromJSON(keyID, data) {
-        const { keytype, scheme, keyval, ...rest } = data;
-        if (typeof keytype !== 'string') {
-            throw new TypeError('keytype must be a string');
-        }
-        if (typeof scheme !== 'string') {
-            throw new TypeError('scheme must be a string');
-        }
-        if (!utils_1.guard.isStringRecord(keyval)) {
-            throw new TypeError('keyval must be a string record');
-        }
-        return new Key({
-            keyID,
-            keyType: keytype,
-            scheme,
-            keyVal: keyval,
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.Key = Key;
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/metadata.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/metadata.js
deleted file mode 100644
index 9668b6f14fa70..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/metadata.js
+++ /dev/null
@@ -1,158 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Metadata = void 0;
-const canonical_json_1 = require("@tufjs/canonical-json");
-const util_1 = __importDefault(require("util"));
-const base_1 = require("./base");
-const error_1 = require("./error");
-const root_1 = require("./root");
-const signature_1 = require("./signature");
-const snapshot_1 = require("./snapshot");
-const targets_1 = require("./targets");
-const timestamp_1 = require("./timestamp");
-const utils_1 = require("./utils");
-/***
- * A container for signed TUF metadata.
- *
- * Provides methods to convert to and from json, read and write to and
- * from JSON and to create and verify metadata signatures.
- *
- * ``Metadata[T]`` is a generic container type where T can be any one type of
- * [``Root``, ``Timestamp``, ``Snapshot``, ``Targets``]. The purpose of this
- * is to allow static type checking of the signed attribute in code using
- * Metadata::
- *
- * root_md = Metadata[Root].fromJSON("root.json")
- * # root_md type is now Metadata[Root]. This means signed and its
- * # attributes like consistent_snapshot are now statically typed and the
- * # types can be verified by static type checkers and shown by IDEs
- *
- * Using a type constraint is not required but not doing so means T is not a
- * specific type so static typing cannot happen. Note that the type constraint
- * ``[Root]`` is not validated at runtime (as pure annotations are not available
- * then).
- *
- * Apart from ``expires`` all of the arguments to the inner constructors have
- * reasonable default values for new metadata.
- */
-class Metadata {
-    constructor(signed, signatures, unrecognizedFields) {
-        this.signed = signed;
-        this.signatures = signatures || {};
-        this.unrecognizedFields = unrecognizedFields || {};
-    }
-    sign(signer, append = true) {
-        const bytes = Buffer.from((0, canonical_json_1.canonicalize)(this.signed.toJSON()));
-        const signature = signer(bytes);
-        if (!append) {
-            this.signatures = {};
-        }
-        this.signatures[signature.keyID] = signature;
-    }
-    verifyDelegate(delegatedRole, delegatedMetadata) {
-        let role;
-        let keys = {};
-        switch (this.signed.type) {
-            case base_1.MetadataKind.Root:
-                keys = this.signed.keys;
-                role = this.signed.roles[delegatedRole];
-                break;
-            case base_1.MetadataKind.Targets:
-                if (!this.signed.delegations) {
-                    throw new error_1.ValueError(`No delegations found for ${delegatedRole}`);
-                }
-                keys = this.signed.delegations.keys;
-                if (this.signed.delegations.roles) {
-                    role = this.signed.delegations.roles[delegatedRole];
-                }
-                else if (this.signed.delegations.succinctRoles) {
-                    if (this.signed.delegations.succinctRoles.isDelegatedRole(delegatedRole)) {
-                        role = this.signed.delegations.succinctRoles;
-                    }
-                }
-                break;
-            default:
-                throw new TypeError('invalid metadata type');
-        }
-        if (!role) {
-            throw new error_1.ValueError(`no delegation found for ${delegatedRole}`);
-        }
-        const signingKeys = new Set();
-        role.keyIDs.forEach((keyID) => {
-            const key = keys[keyID];
-            // If we dont' have the key, continue checking other keys
-            if (!key) {
-                return;
-            }
-            try {
-                key.verifySignature(delegatedMetadata);
-                signingKeys.add(key.keyID);
-            }
-            catch (error) {
-                // continue
-            }
-        });
-        if (signingKeys.size < role.threshold) {
-            throw new error_1.UnsignedMetadataError(`${delegatedRole} was signed by ${signingKeys.size}/${role.threshold} keys`);
-        }
-    }
-    equals(other) {
-        if (!(other instanceof Metadata)) {
-            return false;
-        }
-        return (this.signed.equals(other.signed) &&
-            util_1.default.isDeepStrictEqual(this.signatures, other.signatures) &&
-            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
-    }
-    toJSON() {
-        const signatures = Object.values(this.signatures).map((signature) => {
-            return signature.toJSON();
-        });
-        return {
-            signatures,
-            signed: this.signed.toJSON(),
-            ...this.unrecognizedFields,
-        };
-    }
-    static fromJSON(type, data) {
-        const { signed, signatures, ...rest } = data;
-        if (!utils_1.guard.isDefined(signed) || !utils_1.guard.isObject(signed)) {
-            throw new TypeError('signed is not defined');
-        }
-        if (type !== signed._type) {
-            throw new error_1.ValueError(`expected '${type}', got ${signed['_type']}`);
-        }
-        let signedObj;
-        switch (type) {
-            case base_1.MetadataKind.Root:
-                signedObj = root_1.Root.fromJSON(signed);
-                break;
-            case base_1.MetadataKind.Timestamp:
-                signedObj = timestamp_1.Timestamp.fromJSON(signed);
-                break;
-            case base_1.MetadataKind.Snapshot:
-                signedObj = snapshot_1.Snapshot.fromJSON(signed);
-                break;
-            case base_1.MetadataKind.Targets:
-                signedObj = targets_1.Targets.fromJSON(signed);
-                break;
-            default:
-                throw new TypeError('invalid metadata type');
-        }
-        const sigMap = signaturesFromJSON(signatures);
-        return new Metadata(signedObj, sigMap, rest);
-    }
-}
-exports.Metadata = Metadata;
-function signaturesFromJSON(data) {
-    if (!utils_1.guard.isObjectArray(data)) {
-        throw new TypeError('signatures is not an array');
-    }
-    return data.reduce((acc, sigData) => {
-        const signature = signature_1.Signature.fromJSON(sigData);
-        return { ...acc, [signature.keyID]: signature };
-    }, {});
-}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/role.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/role.js
deleted file mode 100644
index f7ddbc6fe3f38..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/role.js
+++ /dev/null
@@ -1,299 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.SuccinctRoles = exports.DelegatedRole = exports.Role = exports.TOP_LEVEL_ROLE_NAMES = void 0;
-const crypto_1 = __importDefault(require("crypto"));
-const minimatch_1 = require("minimatch");
-const util_1 = __importDefault(require("util"));
-const error_1 = require("./error");
-const utils_1 = require("./utils");
-exports.TOP_LEVEL_ROLE_NAMES = [
-    'root',
-    'targets',
-    'snapshot',
-    'timestamp',
-];
-/**
- * Container that defines which keys are required to sign roles metadata.
- *
- * Role defines how many keys are required to successfully sign the roles
- * metadata, and which keys are accepted.
- */
-class Role {
-    constructor(options) {
-        const { keyIDs, threshold, unrecognizedFields } = options;
-        if (hasDuplicates(keyIDs)) {
-            throw new error_1.ValueError('duplicate key IDs found');
-        }
-        if (threshold < 1) {
-            throw new error_1.ValueError('threshold must be at least 1');
-        }
-        this.keyIDs = keyIDs;
-        this.threshold = threshold;
-        this.unrecognizedFields = unrecognizedFields || {};
-    }
-    equals(other) {
-        if (!(other instanceof Role)) {
-            return false;
-        }
-        return (this.threshold === other.threshold &&
-            util_1.default.isDeepStrictEqual(this.keyIDs, other.keyIDs) &&
-            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
-    }
-    toJSON() {
-        return {
-            keyids: this.keyIDs,
-            threshold: this.threshold,
-            ...this.unrecognizedFields,
-        };
-    }
-    static fromJSON(data) {
-        const { keyids, threshold, ...rest } = data;
-        if (!utils_1.guard.isStringArray(keyids)) {
-            throw new TypeError('keyids must be an array');
-        }
-        if (typeof threshold !== 'number') {
-            throw new TypeError('threshold must be a number');
-        }
-        return new Role({
-            keyIDs: keyids,
-            threshold,
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.Role = Role;
-function hasDuplicates(array) {
-    return new Set(array).size !== array.length;
-}
-/**
- * A container with information about a delegated role.
- *
- * A delegation can happen in two ways:
- *   - ``paths`` is set: delegates targets matching any path pattern in ``paths``
- *   - ``pathHashPrefixes`` is set: delegates targets whose target path hash
- *      starts with any of the prefixes in ``pathHashPrefixes``
- *
- *   ``paths`` and ``pathHashPrefixes`` are mutually exclusive: both cannot be
- *   set, at least one of them must be set.
- */
-class DelegatedRole extends Role {
-    constructor(opts) {
-        super(opts);
-        const { name, terminating, paths, pathHashPrefixes } = opts;
-        this.name = name;
-        this.terminating = terminating;
-        if (opts.paths && opts.pathHashPrefixes) {
-            throw new error_1.ValueError('paths and pathHashPrefixes are mutually exclusive');
-        }
-        this.paths = paths;
-        this.pathHashPrefixes = pathHashPrefixes;
-    }
-    equals(other) {
-        if (!(other instanceof DelegatedRole)) {
-            return false;
-        }
-        return (super.equals(other) &&
-            this.name === other.name &&
-            this.terminating === other.terminating &&
-            util_1.default.isDeepStrictEqual(this.paths, other.paths) &&
-            util_1.default.isDeepStrictEqual(this.pathHashPrefixes, other.pathHashPrefixes));
-    }
-    isDelegatedPath(targetFilepath) {
-        if (this.paths) {
-            return this.paths.some((pathPattern) => isTargetInPathPattern(targetFilepath, pathPattern));
-        }
-        if (this.pathHashPrefixes) {
-            const hasher = crypto_1.default.createHash('sha256');
-            const pathHash = hasher.update(targetFilepath).digest('hex');
-            return this.pathHashPrefixes.some((pathHashPrefix) => pathHash.startsWith(pathHashPrefix));
-        }
-        return false;
-    }
-    toJSON() {
-        const json = {
-            ...super.toJSON(),
-            name: this.name,
-            terminating: this.terminating,
-        };
-        if (this.paths) {
-            json.paths = this.paths;
-        }
-        if (this.pathHashPrefixes) {
-            json.path_hash_prefixes = this.pathHashPrefixes;
-        }
-        return json;
-    }
-    static fromJSON(data) {
-        const { keyids, threshold, name, terminating, paths, path_hash_prefixes, ...rest } = data;
-        if (!utils_1.guard.isStringArray(keyids)) {
-            throw new TypeError('keyids must be an array of strings');
-        }
-        if (typeof threshold !== 'number') {
-            throw new TypeError('threshold must be a number');
-        }
-        if (typeof name !== 'string') {
-            throw new TypeError('name must be a string');
-        }
-        if (typeof terminating !== 'boolean') {
-            throw new TypeError('terminating must be a boolean');
-        }
-        if (utils_1.guard.isDefined(paths) && !utils_1.guard.isStringArray(paths)) {
-            throw new TypeError('paths must be an array of strings');
-        }
-        if (utils_1.guard.isDefined(path_hash_prefixes) &&
-            !utils_1.guard.isStringArray(path_hash_prefixes)) {
-            throw new TypeError('path_hash_prefixes must be an array of strings');
-        }
-        return new DelegatedRole({
-            keyIDs: keyids,
-            threshold,
-            name,
-            terminating,
-            paths,
-            pathHashPrefixes: path_hash_prefixes,
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.DelegatedRole = DelegatedRole;
-// JS version of Ruby's Array#zip
-const zip = (a, b) => a.map((k, i) => [k, b[i]]);
-function isTargetInPathPattern(target, pattern) {
-    const targetParts = target.split('/');
-    const patternParts = pattern.split('/');
-    if (patternParts.length != targetParts.length) {
-        return false;
-    }
-    return zip(targetParts, patternParts).every(([targetPart, patternPart]) => (0, minimatch_1.minimatch)(targetPart, patternPart));
-}
-/**
- * Succinctly defines a hash bin delegation graph.
- *
- * A ``SuccinctRoles`` object describes a delegation graph that covers all
- * targets, distributing them uniformly over the delegated roles (i.e. bins)
- * in the graph.
- *
- * The total number of bins is 2 to the power of the passed ``bit_length``.
- *
- * Bin names are the concatenation of the passed ``name_prefix`` and a
- * zero-padded hex representation of the bin index separated by a hyphen.
- *
- * The passed ``keyids`` and ``threshold`` is used for each bin, and each bin
- * is 'terminating'.
- *
- * For details: https://github.com/theupdateframework/taps/blob/master/tap15.md
- */
-class SuccinctRoles extends Role {
-    constructor(opts) {
-        super(opts);
-        const { bitLength, namePrefix } = opts;
-        if (bitLength <= 0 || bitLength > 32) {
-            throw new error_1.ValueError('bitLength must be between 1 and 32');
-        }
-        this.bitLength = bitLength;
-        this.namePrefix = namePrefix;
-        // Calculate the suffix_len value based on the total number of bins in
-        // hex. If bit_length = 10 then number_of_bins = 1024 or bin names will
-        // have a suffix between "000" and "3ff" in hex and suffix_len will be 3
-        // meaning the third bin will have a suffix of "003".
-        this.numberOfBins = Math.pow(2, bitLength);
-        // suffix_len is calculated based on "number_of_bins - 1" as the name
-        // of the last bin contains the number "number_of_bins -1" as a suffix.
-        this.suffixLen = (this.numberOfBins - 1).toString(16).length;
-    }
-    equals(other) {
-        if (!(other instanceof SuccinctRoles)) {
-            return false;
-        }
-        return (super.equals(other) &&
-            this.bitLength === other.bitLength &&
-            this.namePrefix === other.namePrefix);
-    }
-    /***
-     * Calculates the name of the delegated role responsible for 'target_filepath'.
-     *
-     * The target at path ''target_filepath' is assigned to a bin by casting
-     * the left-most 'bit_length' of bits of the file path hash digest to
-     * int, using it as bin index between 0 and '2**bit_length - 1'.
-     *
-     * Args:
-     *  target_filepath: URL path to a target file, relative to a base
-     *  targets URL.
-     */
-    getRoleForTarget(targetFilepath) {
-        const hasher = crypto_1.default.createHash('sha256');
-        const hasherBuffer = hasher.update(targetFilepath).digest();
-        // can't ever need more than 4 bytes (32 bits).
-        const hashBytes = hasherBuffer.subarray(0, 4);
-        // Right shift hash bytes, so that we only have the leftmost
-        // bit_length bits that we care about.
-        const shiftValue = 32 - this.bitLength;
-        const binNumber = hashBytes.readUInt32BE() >>> shiftValue;
-        // Add zero padding if necessary and cast to hex the suffix.
-        const suffix = binNumber.toString(16).padStart(this.suffixLen, '0');
-        return `${this.namePrefix}-${suffix}`;
-    }
-    *getRoles() {
-        for (let i = 0; i < this.numberOfBins; i++) {
-            const suffix = i.toString(16).padStart(this.suffixLen, '0');
-            yield `${this.namePrefix}-${suffix}`;
-        }
-    }
-    /***
-     * Determines whether the given ``role_name`` is in one of
-     * the delegated roles that ``SuccinctRoles`` represents.
-     *
-     * Args:
-     *  role_name: The name of the role to check against.
-     */
-    isDelegatedRole(roleName) {
-        const desiredPrefix = this.namePrefix + '-';
-        if (!roleName.startsWith(desiredPrefix)) {
-            return false;
-        }
-        const suffix = roleName.slice(desiredPrefix.length, roleName.length);
-        if (suffix.length != this.suffixLen) {
-            return false;
-        }
-        // make sure the suffix is a hex string
-        if (!suffix.match(/^[0-9a-fA-F]+$/)) {
-            return false;
-        }
-        const num = parseInt(suffix, 16);
-        return 0 <= num && num < this.numberOfBins;
-    }
-    toJSON() {
-        const json = {
-            ...super.toJSON(),
-            bit_length: this.bitLength,
-            name_prefix: this.namePrefix,
-        };
-        return json;
-    }
-    static fromJSON(data) {
-        const { keyids, threshold, bit_length, name_prefix, ...rest } = data;
-        if (!utils_1.guard.isStringArray(keyids)) {
-            throw new TypeError('keyids must be an array of strings');
-        }
-        if (typeof threshold !== 'number') {
-            throw new TypeError('threshold must be a number');
-        }
-        if (typeof bit_length !== 'number') {
-            throw new TypeError('bit_length must be a number');
-        }
-        if (typeof name_prefix !== 'string') {
-            throw new TypeError('name_prefix must be a string');
-        }
-        return new SuccinctRoles({
-            keyIDs: keyids,
-            threshold,
-            bitLength: bit_length,
-            namePrefix: name_prefix,
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.SuccinctRoles = SuccinctRoles;
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/root.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/root.js
deleted file mode 100644
index 36d0ef0f186d1..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/root.js
+++ /dev/null
@@ -1,116 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Root = void 0;
-const util_1 = __importDefault(require("util"));
-const base_1 = require("./base");
-const error_1 = require("./error");
-const key_1 = require("./key");
-const role_1 = require("./role");
-const utils_1 = require("./utils");
-/**
- * A container for the signed part of root metadata.
- *
- * The top-level role and metadata file signed by the root keys.
- * This role specifies trusted keys for all other top-level roles, which may further delegate trust.
- */
-class Root extends base_1.Signed {
-    constructor(options) {
-        super(options);
-        this.type = base_1.MetadataKind.Root;
-        this.keys = options.keys || {};
-        this.consistentSnapshot = options.consistentSnapshot ?? true;
-        if (!options.roles) {
-            this.roles = role_1.TOP_LEVEL_ROLE_NAMES.reduce((acc, role) => ({
-                ...acc,
-                [role]: new role_1.Role({ keyIDs: [], threshold: 1 }),
-            }), {});
-        }
-        else {
-            const roleNames = new Set(Object.keys(options.roles));
-            if (!role_1.TOP_LEVEL_ROLE_NAMES.every((role) => roleNames.has(role))) {
-                throw new error_1.ValueError('missing top-level role');
-            }
-            this.roles = options.roles;
-        }
-    }
-    addKey(key, role) {
-        if (!this.roles[role]) {
-            throw new error_1.ValueError(`role ${role} does not exist`);
-        }
-        if (!this.roles[role].keyIDs.includes(key.keyID)) {
-            this.roles[role].keyIDs.push(key.keyID);
-        }
-        this.keys[key.keyID] = key;
-    }
-    equals(other) {
-        if (!(other instanceof Root)) {
-            return false;
-        }
-        return (super.equals(other) &&
-            this.consistentSnapshot === other.consistentSnapshot &&
-            util_1.default.isDeepStrictEqual(this.keys, other.keys) &&
-            util_1.default.isDeepStrictEqual(this.roles, other.roles));
-    }
-    toJSON() {
-        return {
-            _type: this.type,
-            spec_version: this.specVersion,
-            version: this.version,
-            expires: this.expires,
-            keys: keysToJSON(this.keys),
-            roles: rolesToJSON(this.roles),
-            consistent_snapshot: this.consistentSnapshot,
-            ...this.unrecognizedFields,
-        };
-    }
-    static fromJSON(data) {
-        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
-        const { keys, roles, consistent_snapshot, ...rest } = unrecognizedFields;
-        if (typeof consistent_snapshot !== 'boolean') {
-            throw new TypeError('consistent_snapshot must be a boolean');
-        }
-        return new Root({
-            ...commonFields,
-            keys: keysFromJSON(keys),
-            roles: rolesFromJSON(roles),
-            consistentSnapshot: consistent_snapshot,
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.Root = Root;
-function keysToJSON(keys) {
-    return Object.entries(keys).reduce((acc, [keyID, key]) => ({ ...acc, [keyID]: key.toJSON() }), {});
-}
-function rolesToJSON(roles) {
-    return Object.entries(roles).reduce((acc, [roleName, role]) => ({ ...acc, [roleName]: role.toJSON() }), {});
-}
-function keysFromJSON(data) {
-    let keys;
-    if (utils_1.guard.isDefined(data)) {
-        if (!utils_1.guard.isObjectRecord(data)) {
-            throw new TypeError('keys must be an object');
-        }
-        keys = Object.entries(data).reduce((acc, [keyID, keyData]) => ({
-            ...acc,
-            [keyID]: key_1.Key.fromJSON(keyID, keyData),
-        }), {});
-    }
-    return keys;
-}
-function rolesFromJSON(data) {
-    let roles;
-    if (utils_1.guard.isDefined(data)) {
-        if (!utils_1.guard.isObjectRecord(data)) {
-            throw new TypeError('roles must be an object');
-        }
-        roles = Object.entries(data).reduce((acc, [roleName, roleData]) => ({
-            ...acc,
-            [roleName]: role_1.Role.fromJSON(roleData),
-        }), {});
-    }
-    return roles;
-}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/signature.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/signature.js
deleted file mode 100644
index 33eb204eb0835..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/signature.js
+++ /dev/null
@@ -1,38 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Signature = void 0;
-/**
- * A container class containing information about a signature.
- *
- * Contains a signature and the keyid uniquely identifying the key used
- * to generate the signature.
- *
- * Provide a `fromJSON` method to create a Signature from a JSON object.
- */
-class Signature {
-    constructor(options) {
-        const { keyID, sig } = options;
-        this.keyID = keyID;
-        this.sig = sig;
-    }
-    toJSON() {
-        return {
-            keyid: this.keyID,
-            sig: this.sig,
-        };
-    }
-    static fromJSON(data) {
-        const { keyid, sig } = data;
-        if (typeof keyid !== 'string') {
-            throw new TypeError('keyid must be a string');
-        }
-        if (typeof sig !== 'string') {
-            throw new TypeError('sig must be a string');
-        }
-        return new Signature({
-            keyID: keyid,
-            sig: sig,
-        });
-    }
-}
-exports.Signature = Signature;
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/snapshot.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/snapshot.js
deleted file mode 100644
index e90ea8e729e4e..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/snapshot.js
+++ /dev/null
@@ -1,71 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Snapshot = void 0;
-const util_1 = __importDefault(require("util"));
-const base_1 = require("./base");
-const file_1 = require("./file");
-const utils_1 = require("./utils");
-/**
- * A container for the signed part of snapshot metadata.
- *
- * Snapshot contains information about all target Metadata files.
- * A top-level role that specifies the latest versions of all targets metadata files,
- * and hence the latest versions of all targets (including any dependencies between them) on the repository.
- */
-class Snapshot extends base_1.Signed {
-    constructor(opts) {
-        super(opts);
-        this.type = base_1.MetadataKind.Snapshot;
-        this.meta = opts.meta || { 'targets.json': new file_1.MetaFile({ version: 1 }) };
-    }
-    equals(other) {
-        if (!(other instanceof Snapshot)) {
-            return false;
-        }
-        return super.equals(other) && util_1.default.isDeepStrictEqual(this.meta, other.meta);
-    }
-    toJSON() {
-        return {
-            _type: this.type,
-            meta: metaToJSON(this.meta),
-            spec_version: this.specVersion,
-            version: this.version,
-            expires: this.expires,
-            ...this.unrecognizedFields,
-        };
-    }
-    static fromJSON(data) {
-        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
-        const { meta, ...rest } = unrecognizedFields;
-        return new Snapshot({
-            ...commonFields,
-            meta: metaFromJSON(meta),
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.Snapshot = Snapshot;
-function metaToJSON(meta) {
-    return Object.entries(meta).reduce((acc, [path, metadata]) => ({
-        ...acc,
-        [path]: metadata.toJSON(),
-    }), {});
-}
-function metaFromJSON(data) {
-    let meta;
-    if (utils_1.guard.isDefined(data)) {
-        if (!utils_1.guard.isObjectRecord(data)) {
-            throw new TypeError('meta field is malformed');
-        }
-        else {
-            meta = Object.entries(data).reduce((acc, [path, metadata]) => ({
-                ...acc,
-                [path]: file_1.MetaFile.fromJSON(metadata),
-            }), {});
-        }
-    }
-    return meta;
-}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/targets.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/targets.js
deleted file mode 100644
index 54bd8f8c554af..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/targets.js
+++ /dev/null
@@ -1,92 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Targets = void 0;
-const util_1 = __importDefault(require("util"));
-const base_1 = require("./base");
-const delegations_1 = require("./delegations");
-const file_1 = require("./file");
-const utils_1 = require("./utils");
-// Container for the signed part of targets metadata.
-//
-// Targets contains verifying information about target files and also delegates
-// responsible to other Targets roles.
-class Targets extends base_1.Signed {
-    constructor(options) {
-        super(options);
-        this.type = base_1.MetadataKind.Targets;
-        this.targets = options.targets || {};
-        this.delegations = options.delegations;
-    }
-    addTarget(target) {
-        this.targets[target.path] = target;
-    }
-    equals(other) {
-        if (!(other instanceof Targets)) {
-            return false;
-        }
-        return (super.equals(other) &&
-            util_1.default.isDeepStrictEqual(this.targets, other.targets) &&
-            util_1.default.isDeepStrictEqual(this.delegations, other.delegations));
-    }
-    toJSON() {
-        const json = {
-            _type: this.type,
-            spec_version: this.specVersion,
-            version: this.version,
-            expires: this.expires,
-            targets: targetsToJSON(this.targets),
-            ...this.unrecognizedFields,
-        };
-        if (this.delegations) {
-            json.delegations = this.delegations.toJSON();
-        }
-        return json;
-    }
-    static fromJSON(data) {
-        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
-        const { targets, delegations, ...rest } = unrecognizedFields;
-        return new Targets({
-            ...commonFields,
-            targets: targetsFromJSON(targets),
-            delegations: delegationsFromJSON(delegations),
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.Targets = Targets;
-function targetsToJSON(targets) {
-    return Object.entries(targets).reduce((acc, [path, target]) => ({
-        ...acc,
-        [path]: target.toJSON(),
-    }), {});
-}
-function targetsFromJSON(data) {
-    let targets;
-    if (utils_1.guard.isDefined(data)) {
-        if (!utils_1.guard.isObjectRecord(data)) {
-            throw new TypeError('targets must be an object');
-        }
-        else {
-            targets = Object.entries(data).reduce((acc, [path, target]) => ({
-                ...acc,
-                [path]: file_1.TargetFile.fromJSON(path, target),
-            }), {});
-        }
-    }
-    return targets;
-}
-function delegationsFromJSON(data) {
-    let delegations;
-    if (utils_1.guard.isDefined(data)) {
-        if (!utils_1.guard.isObject(data)) {
-            throw new TypeError('delegations must be an object');
-        }
-        else {
-            delegations = delegations_1.Delegations.fromJSON(data);
-        }
-    }
-    return delegations;
-}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/timestamp.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/timestamp.js
deleted file mode 100644
index 9880c4c9fc254..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/timestamp.js
+++ /dev/null
@@ -1,58 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Timestamp = void 0;
-const base_1 = require("./base");
-const file_1 = require("./file");
-const utils_1 = require("./utils");
-/**
- * A container for the signed part of timestamp metadata.
- *
- * A top-level that specifies the latest version of the snapshot role metadata file,
- * and hence the latest versions of all metadata and targets on the repository.
- */
-class Timestamp extends base_1.Signed {
-    constructor(options) {
-        super(options);
-        this.type = base_1.MetadataKind.Timestamp;
-        this.snapshotMeta = options.snapshotMeta || new file_1.MetaFile({ version: 1 });
-    }
-    equals(other) {
-        if (!(other instanceof Timestamp)) {
-            return false;
-        }
-        return super.equals(other) && this.snapshotMeta.equals(other.snapshotMeta);
-    }
-    toJSON() {
-        return {
-            _type: this.type,
-            spec_version: this.specVersion,
-            version: this.version,
-            expires: this.expires,
-            meta: { 'snapshot.json': this.snapshotMeta.toJSON() },
-            ...this.unrecognizedFields,
-        };
-    }
-    static fromJSON(data) {
-        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
-        const { meta, ...rest } = unrecognizedFields;
-        return new Timestamp({
-            ...commonFields,
-            snapshotMeta: snapshotMetaFromJSON(meta),
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.Timestamp = Timestamp;
-function snapshotMetaFromJSON(data) {
-    let snapshotMeta;
-    if (utils_1.guard.isDefined(data)) {
-        const snapshotData = data['snapshot.json'];
-        if (!utils_1.guard.isDefined(snapshotData) || !utils_1.guard.isObject(snapshotData)) {
-            throw new TypeError('missing snapshot.json in meta');
-        }
-        else {
-            snapshotMeta = file_1.MetaFile.fromJSON(snapshotData);
-        }
-    }
-    return snapshotMeta;
-}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/guard.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/guard.js
deleted file mode 100644
index efe558852303c..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/guard.js
+++ /dev/null
@@ -1,33 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.isObjectRecord = exports.isStringRecord = exports.isObjectArray = exports.isStringArray = exports.isObject = exports.isDefined = void 0;
-function isDefined(val) {
-    return val !== undefined;
-}
-exports.isDefined = isDefined;
-function isObject(value) {
-    return typeof value === 'object' && value !== null;
-}
-exports.isObject = isObject;
-function isStringArray(value) {
-    return Array.isArray(value) && value.every((v) => typeof v === 'string');
-}
-exports.isStringArray = isStringArray;
-function isObjectArray(value) {
-    return Array.isArray(value) && value.every(isObject);
-}
-exports.isObjectArray = isObjectArray;
-function isStringRecord(value) {
-    return (typeof value === 'object' &&
-        value !== null &&
-        Object.keys(value).every((k) => typeof k === 'string') &&
-        Object.values(value).every((v) => typeof v === 'string'));
-}
-exports.isStringRecord = isStringRecord;
-function isObjectRecord(value) {
-    return (typeof value === 'object' &&
-        value !== null &&
-        Object.keys(value).every((k) => typeof k === 'string') &&
-        Object.values(value).every((v) => typeof v === 'object' && v !== null));
-}
-exports.isObjectRecord = isObjectRecord;
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/index.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/index.js
deleted file mode 100644
index 872aae28049c9..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/index.js
+++ /dev/null
@@ -1,28 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.crypto = exports.guard = void 0;
-exports.guard = __importStar(require("./guard"));
-exports.crypto = __importStar(require("./verify"));
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/key.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/key.js
deleted file mode 100644
index 1f795ba1a2733..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/key.js
+++ /dev/null
@@ -1,143 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.getPublicKey = void 0;
-const crypto_1 = __importDefault(require("crypto"));
-const error_1 = require("../error");
-const oid_1 = require("./oid");
-const ASN1_TAG_SEQUENCE = 0x30;
-const ANS1_TAG_BIT_STRING = 0x03;
-const NULL_BYTE = 0x00;
-const OID_EDDSA = '1.3.101.112';
-const OID_EC_PUBLIC_KEY = '1.2.840.10045.2.1';
-const OID_EC_CURVE_P256V1 = '1.2.840.10045.3.1.7';
-const PEM_HEADER = '-----BEGIN PUBLIC KEY-----';
-function getPublicKey(keyInfo) {
-    switch (keyInfo.keyType) {
-        case 'rsa':
-            return getRSAPublicKey(keyInfo);
-        case 'ed25519':
-            return getED25519PublicKey(keyInfo);
-        case 'ecdsa':
-        case 'ecdsa-sha2-nistp256':
-        case 'ecdsa-sha2-nistp384':
-            return getECDCSAPublicKey(keyInfo);
-        default:
-            throw new error_1.UnsupportedAlgorithmError(`Unsupported key type: ${keyInfo.keyType}`);
-    }
-}
-exports.getPublicKey = getPublicKey;
-function getRSAPublicKey(keyInfo) {
-    // Only support PEM-encoded RSA keys
-    if (!keyInfo.keyVal.startsWith(PEM_HEADER)) {
-        throw new error_1.CryptoError('Invalid key format');
-    }
-    const key = crypto_1.default.createPublicKey(keyInfo.keyVal);
-    switch (keyInfo.scheme) {
-        case 'rsassa-pss-sha256':
-            return {
-                key: key,
-                padding: crypto_1.default.constants.RSA_PKCS1_PSS_PADDING,
-            };
-        default:
-            throw new error_1.UnsupportedAlgorithmError(`Unsupported RSA scheme: ${keyInfo.scheme}`);
-    }
-}
-function getED25519PublicKey(keyInfo) {
-    let key;
-    // If key is already PEM-encoded we can just parse it
-    if (keyInfo.keyVal.startsWith(PEM_HEADER)) {
-        key = crypto_1.default.createPublicKey(keyInfo.keyVal);
-    }
-    else {
-        // If key is not PEM-encoded it had better be hex
-        if (!isHex(keyInfo.keyVal)) {
-            throw new error_1.CryptoError('Invalid key format');
-        }
-        key = crypto_1.default.createPublicKey({
-            key: ed25519.hexToDER(keyInfo.keyVal),
-            format: 'der',
-            type: 'spki',
-        });
-    }
-    return { key };
-}
-function getECDCSAPublicKey(keyInfo) {
-    let key;
-    // If key is already PEM-encoded we can just parse it
-    if (keyInfo.keyVal.startsWith(PEM_HEADER)) {
-        key = crypto_1.default.createPublicKey(keyInfo.keyVal);
-    }
-    else {
-        // If key is not PEM-encoded it had better be hex
-        if (!isHex(keyInfo.keyVal)) {
-            throw new error_1.CryptoError('Invalid key format');
-        }
-        key = crypto_1.default.createPublicKey({
-            key: ecdsa.hexToDER(keyInfo.keyVal),
-            format: 'der',
-            type: 'spki',
-        });
-    }
-    return { key };
-}
-const ed25519 = {
-    // Translates a hex key into a crypto KeyObject
-    // https://keygen.sh/blog/how-to-use-hexadecimal-ed25519-keys-in-node/
-    hexToDER: (hex) => {
-        const key = Buffer.from(hex, 'hex');
-        const oid = (0, oid_1.encodeOIDString)(OID_EDDSA);
-        // Create a byte sequence containing the OID and key
-        const elements = Buffer.concat([
-            Buffer.concat([
-                Buffer.from([ASN1_TAG_SEQUENCE]),
-                Buffer.from([oid.length]),
-                oid,
-            ]),
-            Buffer.concat([
-                Buffer.from([ANS1_TAG_BIT_STRING]),
-                Buffer.from([key.length + 1]),
-                Buffer.from([NULL_BYTE]),
-                key,
-            ]),
-        ]);
-        // Wrap up by creating a sequence of elements
-        const der = Buffer.concat([
-            Buffer.from([ASN1_TAG_SEQUENCE]),
-            Buffer.from([elements.length]),
-            elements,
-        ]);
-        return der;
-    },
-};
-const ecdsa = {
-    hexToDER: (hex) => {
-        const key = Buffer.from(hex, 'hex');
-        const bitString = Buffer.concat([
-            Buffer.from([ANS1_TAG_BIT_STRING]),
-            Buffer.from([key.length + 1]),
-            Buffer.from([NULL_BYTE]),
-            key,
-        ]);
-        const oids = Buffer.concat([
-            (0, oid_1.encodeOIDString)(OID_EC_PUBLIC_KEY),
-            (0, oid_1.encodeOIDString)(OID_EC_CURVE_P256V1),
-        ]);
-        const oidSequence = Buffer.concat([
-            Buffer.from([ASN1_TAG_SEQUENCE]),
-            Buffer.from([oids.length]),
-            oids,
-        ]);
-        // Wrap up by creating a sequence of elements
-        const der = Buffer.concat([
-            Buffer.from([ASN1_TAG_SEQUENCE]),
-            Buffer.from([oidSequence.length + bitString.length]),
-            oidSequence,
-            bitString,
-        ]);
-        return der;
-    },
-};
-const isHex = (key) => /^[0-9a-fA-F]+$/.test(key);
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/oid.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/oid.js
deleted file mode 100644
index e1bb7af5e54fb..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/oid.js
+++ /dev/null
@@ -1,27 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.encodeOIDString = void 0;
-const ANS1_TAG_OID = 0x06;
-function encodeOIDString(oid) {
-    const parts = oid.split('.');
-    // The first two subidentifiers are encoded into the first byte
-    const first = parseInt(parts[0], 10) * 40 + parseInt(parts[1], 10);
-    const rest = [];
-    parts.slice(2).forEach((part) => {
-        const bytes = encodeVariableLengthInteger(parseInt(part, 10));
-        rest.push(...bytes);
-    });
-    const der = Buffer.from([first, ...rest]);
-    return Buffer.from([ANS1_TAG_OID, der.length, ...der]);
-}
-exports.encodeOIDString = encodeOIDString;
-function encodeVariableLengthInteger(value) {
-    const bytes = [];
-    let mask = 0x00;
-    while (value > 0) {
-        bytes.unshift((value & 0x7f) | mask);
-        value >>= 7;
-        mask = 0x80;
-    }
-    return bytes;
-}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/types.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/types.js
deleted file mode 100644
index c8ad2e549bdc6..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/types.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/verify.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/verify.js
deleted file mode 100644
index 8232b6f6a97ab..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/verify.js
+++ /dev/null
@@ -1,13 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifySignature = void 0;
-const canonical_json_1 = require("@tufjs/canonical-json");
-const crypto_1 = __importDefault(require("crypto"));
-const verifySignature = (metaDataSignedData, key, signature) => {
-    const canonicalData = Buffer.from((0, canonical_json_1.canonicalize)(metaDataSignedData));
-    return crypto_1.default.verify(undefined, canonicalData, key, Buffer.from(signature, 'hex'));
-};
-exports.verifySignature = verifySignature;
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/package.json b/node_modules/sigstore/node_modules/@tufjs/models/package.json
deleted file mode 100644
index 6711ee0dababc..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/package.json
+++ /dev/null
@@ -1,41 +0,0 @@
-{
-  "name": "@tufjs/models",
-  "version": "1.0.4",
-  "description": "TUF metadata models",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
-  "files": [
-    "dist"
-  ],
-  "scripts": {
-    "build": "tsc --build",
-    "clean": "rm -rf dist && rm tsconfig.tsbuildinfo",
-    "test": "jest"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/theupdateframework/tuf-js.git"
-  },
-  "keywords": [
-    "tuf",
-    "security",
-    "update"
-  ],
-  "author": "bdehamer@github.com",
-  "license": "MIT",
-  "bugs": {
-    "url": "https://github.com/theupdateframework/tuf-js/issues"
-  },
-  "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/models#readme",
-  "devDependencies": {
-    "@types/node": "^18.16.3",
-    "typescript": "^5.0.4"
-  },
-  "dependencies": {
-    "@tufjs/canonical-json": "1.0.0",
-    "minimatch": "^9.0.0"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  }
-}
diff --git a/node_modules/sigstore/node_modules/cacache/LICENSE.md b/node_modules/sigstore/node_modules/cacache/LICENSE.md
deleted file mode 100644
index 8d28acf866d93..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/cacache/lib/content/path.js b/node_modules/sigstore/node_modules/cacache/lib/content/path.js
deleted file mode 100644
index ad5a76a4f73f2..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/lib/content/path.js
+++ /dev/null
@@ -1,29 +0,0 @@
-'use strict'
-
-const contentVer = require('../../package.json')['cache-version'].content
-const hashToSegments = require('../util/hash-to-segments')
-const path = require('path')
-const ssri = require('ssri')
-
-// Current format of content file path:
-//
-// sha512-BaSE64Hex= ->
-// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
-//
-module.exports = contentPath
-
-function contentPath (cache, integrity) {
-  const sri = ssri.parse(integrity, { single: true })
-  // contentPath is the *strongest* algo given
-  return path.join(
-    contentDir(cache),
-    sri.algorithm,
-    ...hashToSegments(sri.hexDigest())
-  )
-}
-
-module.exports.contentDir = contentDir
-
-function contentDir (cache) {
-  return path.join(cache, `content-v${contentVer}`)
-}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/content/read.js b/node_modules/sigstore/node_modules/cacache/lib/content/read.js
deleted file mode 100644
index f41b539df65dc..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/lib/content/read.js
+++ /dev/null
@@ -1,166 +0,0 @@
-'use strict'
-
-const fs = require('fs/promises')
-const fsm = require('fs-minipass')
-const ssri = require('ssri')
-const contentPath = require('./path')
-const Pipeline = require('minipass-pipeline')
-
-module.exports = read
-
-const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
-async function read (cache, integrity, opts = {}) {
-  const { size } = opts
-  const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
-    // get size
-    const stat = await fs.stat(cpath)
-    return { stat, cpath, sri }
-  })
-  if (typeof size === 'number' && stat.size !== size) {
-    throw sizeError(size, stat.size)
-  }
-
-  if (stat.size > MAX_SINGLE_READ_SIZE) {
-    return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
-  }
-
-  const data = await fs.readFile(cpath, { encoding: null })
-  if (!ssri.checkData(data, sri)) {
-    throw integrityError(sri, cpath)
-  }
-
-  return data
-}
-
-const readPipeline = (cpath, size, sri, stream) => {
-  stream.push(
-    new fsm.ReadStream(cpath, {
-      size,
-      readSize: MAX_SINGLE_READ_SIZE,
-    }),
-    ssri.integrityStream({
-      integrity: sri,
-      size,
-    })
-  )
-  return stream
-}
-
-module.exports.stream = readStream
-module.exports.readStream = readStream
-
-function readStream (cache, integrity, opts = {}) {
-  const { size } = opts
-  const stream = new Pipeline()
-  // Set all this up to run on the stream and then just return the stream
-  Promise.resolve().then(async () => {
-    const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
-      // just stat to ensure it exists
-      const stat = await fs.stat(cpath)
-      return { stat, cpath, sri }
-    })
-    if (typeof size === 'number' && size !== stat.size) {
-      return stream.emit('error', sizeError(size, stat.size))
-    }
-
-    return readPipeline(cpath, stat.size, sri, stream)
-  }).catch(err => stream.emit('error', err))
-
-  return stream
-}
-
-module.exports.copy = copy
-
-function copy (cache, integrity, dest) {
-  return withContentSri(cache, integrity, (cpath, sri) => {
-    return fs.copyFile(cpath, dest)
-  })
-}
-
-module.exports.hasContent = hasContent
-
-async function hasContent (cache, integrity) {
-  if (!integrity) {
-    return false
-  }
-
-  try {
-    return await withContentSri(cache, integrity, async (cpath, sri) => {
-      const stat = await fs.stat(cpath)
-      return { size: stat.size, sri, stat }
-    })
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return false
-    }
-
-    if (err.code === 'EPERM') {
-      /* istanbul ignore else */
-      if (process.platform !== 'win32') {
-        throw err
-      } else {
-        return false
-      }
-    }
-  }
-}
-
-async function withContentSri (cache, integrity, fn) {
-  const sri = ssri.parse(integrity)
-  // If `integrity` has multiple entries, pick the first digest
-  // with available local data.
-  const algo = sri.pickAlgorithm()
-  const digests = sri[algo]
-
-  if (digests.length <= 1) {
-    const cpath = contentPath(cache, digests[0])
-    return fn(cpath, digests[0])
-  } else {
-    // Can't use race here because a generic error can happen before
-    // a ENOENT error, and can happen before a valid result
-    const results = await Promise.all(digests.map(async (meta) => {
-      try {
-        return await withContentSri(cache, meta, fn)
-      } catch (err) {
-        if (err.code === 'ENOENT') {
-          return Object.assign(
-            new Error('No matching content found for ' + sri.toString()),
-            { code: 'ENOENT' }
-          )
-        }
-        return err
-      }
-    }))
-    // Return the first non error if it is found
-    const result = results.find((r) => !(r instanceof Error))
-    if (result) {
-      return result
-    }
-
-    // Throw the No matching content found error
-    const enoentError = results.find((r) => r.code === 'ENOENT')
-    if (enoentError) {
-      throw enoentError
-    }
-
-    // Throw generic error
-    throw results.find((r) => r instanceof Error)
-  }
-}
-
-function sizeError (expected, found) {
-  /* eslint-disable-next-line max-len */
-  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
-  err.expected = expected
-  err.found = found
-  err.code = 'EBADSIZE'
-  return err
-}
-
-function integrityError (sri, path) {
-  const err = new Error(`Integrity verification failed for ${sri} (${path})`)
-  err.code = 'EINTEGRITY'
-  err.sri = sri
-  err.path = path
-  return err
-}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/content/rm.js b/node_modules/sigstore/node_modules/cacache/lib/content/rm.js
deleted file mode 100644
index ce58d679e4cb2..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/lib/content/rm.js
+++ /dev/null
@@ -1,18 +0,0 @@
-'use strict'
-
-const fs = require('fs/promises')
-const contentPath = require('./path')
-const { hasContent } = require('./read')
-
-module.exports = rm
-
-async function rm (cache, integrity) {
-  const content = await hasContent(cache, integrity)
-  // ~pretty~ sure we can't end up with a content lacking sri, but be safe
-  if (content && content.sri) {
-    await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
-    return true
-  } else {
-    return false
-  }
-}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/content/write.js b/node_modules/sigstore/node_modules/cacache/lib/content/write.js
deleted file mode 100644
index 7146146581287..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/lib/content/write.js
+++ /dev/null
@@ -1,205 +0,0 @@
-'use strict'
-
-const events = require('events')
-
-const contentPath = require('./path')
-const fs = require('fs/promises')
-const { moveFile } = require('@npmcli/fs')
-const { Minipass } = require('minipass')
-const Pipeline = require('minipass-pipeline')
-const Flush = require('minipass-flush')
-const path = require('path')
-const ssri = require('ssri')
-const uniqueFilename = require('unique-filename')
-const fsm = require('fs-minipass')
-
-module.exports = write
-
-// Cache of move operations in process so we don't duplicate
-const moveOperations = new Map()
-
-async function write (cache, data, opts = {}) {
-  const { algorithms, size, integrity } = opts
-
-  if (typeof size === 'number' && data.length !== size) {
-    throw sizeError(size, data.length)
-  }
-
-  const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
-  if (integrity && !ssri.checkData(data, integrity, opts)) {
-    throw checksumError(integrity, sri)
-  }
-
-  for (const algo in sri) {
-    const tmp = await makeTmp(cache, opts)
-    const hash = sri[algo].toString()
-    try {
-      await fs.writeFile(tmp.target, data, { flag: 'wx' })
-      await moveToDestination(tmp, cache, hash, opts)
-    } finally {
-      if (!tmp.moved) {
-        await fs.rm(tmp.target, { recursive: true, force: true })
-      }
-    }
-  }
-  return { integrity: sri, size: data.length }
-}
-
-module.exports.stream = writeStream
-
-// writes proxied to the 'inputStream' that is passed to the Promise
-// 'end' is deferred until content is handled.
-class CacacheWriteStream extends Flush {
-  constructor (cache, opts) {
-    super()
-    this.opts = opts
-    this.cache = cache
-    this.inputStream = new Minipass()
-    this.inputStream.on('error', er => this.emit('error', er))
-    this.inputStream.on('drain', () => this.emit('drain'))
-    this.handleContentP = null
-  }
-
-  write (chunk, encoding, cb) {
-    if (!this.handleContentP) {
-      this.handleContentP = handleContent(
-        this.inputStream,
-        this.cache,
-        this.opts
-      )
-    }
-    return this.inputStream.write(chunk, encoding, cb)
-  }
-
-  flush (cb) {
-    this.inputStream.end(() => {
-      if (!this.handleContentP) {
-        const e = new Error('Cache input stream was empty')
-        e.code = 'ENODATA'
-        // empty streams are probably emitting end right away.
-        // defer this one tick by rejecting a promise on it.
-        return Promise.reject(e).catch(cb)
-      }
-      // eslint-disable-next-line promise/catch-or-return
-      this.handleContentP.then(
-        (res) => {
-          res.integrity && this.emit('integrity', res.integrity)
-          // eslint-disable-next-line promise/always-return
-          res.size !== null && this.emit('size', res.size)
-          cb()
-        },
-        (er) => cb(er)
-      )
-    })
-  }
-}
-
-function writeStream (cache, opts = {}) {
-  return new CacacheWriteStream(cache, opts)
-}
-
-async function handleContent (inputStream, cache, opts) {
-  const tmp = await makeTmp(cache, opts)
-  try {
-    const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
-    await moveToDestination(
-      tmp,
-      cache,
-      res.integrity,
-      opts
-    )
-    return res
-  } finally {
-    if (!tmp.moved) {
-      await fs.rm(tmp.target, { recursive: true, force: true })
-    }
-  }
-}
-
-async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
-  const outStream = new fsm.WriteStream(tmpTarget, {
-    flags: 'wx',
-  })
-
-  if (opts.integrityEmitter) {
-    // we need to create these all simultaneously since they can fire in any order
-    const [integrity, size] = await Promise.all([
-      events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
-      events.once(opts.integrityEmitter, 'size').then(res => res[0]),
-      new Pipeline(inputStream, outStream).promise(),
-    ])
-    return { integrity, size }
-  }
-
-  let integrity
-  let size
-  const hashStream = ssri.integrityStream({
-    integrity: opts.integrity,
-    algorithms: opts.algorithms,
-    size: opts.size,
-  })
-  hashStream.on('integrity', i => {
-    integrity = i
-  })
-  hashStream.on('size', s => {
-    size = s
-  })
-
-  const pipeline = new Pipeline(inputStream, hashStream, outStream)
-  await pipeline.promise()
-  return { integrity, size }
-}
-
-async function makeTmp (cache, opts) {
-  const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
-  await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
-  return {
-    target: tmpTarget,
-    moved: false,
-  }
-}
-
-async function moveToDestination (tmp, cache, sri, opts) {
-  const destination = contentPath(cache, sri)
-  const destDir = path.dirname(destination)
-  if (moveOperations.has(destination)) {
-    return moveOperations.get(destination)
-  }
-  moveOperations.set(
-    destination,
-    fs.mkdir(destDir, { recursive: true })
-      .then(async () => {
-        await moveFile(tmp.target, destination, { overwrite: false })
-        tmp.moved = true
-        return tmp.moved
-      })
-      .catch(err => {
-        if (!err.message.startsWith('The destination file exists')) {
-          throw Object.assign(err, { code: 'EEXIST' })
-        }
-      }).finally(() => {
-        moveOperations.delete(destination)
-      })
-
-  )
-  return moveOperations.get(destination)
-}
-
-function sizeError (expected, found) {
-  /* eslint-disable-next-line max-len */
-  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
-  err.expected = expected
-  err.found = found
-  err.code = 'EBADSIZE'
-  return err
-}
-
-function checksumError (expected, found) {
-  const err = new Error(`Integrity check failed:
-  Wanted: ${expected}
-   Found: ${found}`)
-  err.code = 'EINTEGRITY'
-  err.expected = expected
-  err.found = found
-  return err
-}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/entry-index.js b/node_modules/sigstore/node_modules/cacache/lib/entry-index.js
deleted file mode 100644
index 722a37af5ce15..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/lib/entry-index.js
+++ /dev/null
@@ -1,330 +0,0 @@
-'use strict'
-
-const crypto = require('crypto')
-const {
-  appendFile,
-  mkdir,
-  readFile,
-  readdir,
-  rm,
-  writeFile,
-} = require('fs/promises')
-const { Minipass } = require('minipass')
-const path = require('path')
-const ssri = require('ssri')
-const uniqueFilename = require('unique-filename')
-
-const contentPath = require('./content/path')
-const hashToSegments = require('./util/hash-to-segments')
-const indexV = require('../package.json')['cache-version'].index
-const { moveFile } = require('@npmcli/fs')
-
-module.exports.NotFoundError = class NotFoundError extends Error {
-  constructor (cache, key) {
-    super(`No cache entry for ${key} found in ${cache}`)
-    this.code = 'ENOENT'
-    this.cache = cache
-    this.key = key
-  }
-}
-
-module.exports.compact = compact
-
-async function compact (cache, key, matchFn, opts = {}) {
-  const bucket = bucketPath(cache, key)
-  const entries = await bucketEntries(bucket)
-  const newEntries = []
-  // we loop backwards because the bottom-most result is the newest
-  // since we add new entries with appendFile
-  for (let i = entries.length - 1; i >= 0; --i) {
-    const entry = entries[i]
-    // a null integrity could mean either a delete was appended
-    // or the user has simply stored an index that does not map
-    // to any content. we determine if the user wants to keep the
-    // null integrity based on the validateEntry function passed in options.
-    // if the integrity is null and no validateEntry is provided, we break
-    // as we consider the null integrity to be a deletion of everything
-    // that came before it.
-    if (entry.integrity === null && !opts.validateEntry) {
-      break
-    }
-
-    // if this entry is valid, and it is either the first entry or
-    // the newEntries array doesn't already include an entry that
-    // matches this one based on the provided matchFn, then we add
-    // it to the beginning of our list
-    if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
-      (newEntries.length === 0 ||
-        !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
-      newEntries.unshift(entry)
-    }
-  }
-
-  const newIndex = '\n' + newEntries.map((entry) => {
-    const stringified = JSON.stringify(entry)
-    const hash = hashEntry(stringified)
-    return `${hash}\t${stringified}`
-  }).join('\n')
-
-  const setup = async () => {
-    const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
-    await mkdir(path.dirname(target), { recursive: true })
-    return {
-      target,
-      moved: false,
-    }
-  }
-
-  const teardown = async (tmp) => {
-    if (!tmp.moved) {
-      return rm(tmp.target, { recursive: true, force: true })
-    }
-  }
-
-  const write = async (tmp) => {
-    await writeFile(tmp.target, newIndex, { flag: 'wx' })
-    await mkdir(path.dirname(bucket), { recursive: true })
-    // we use @npmcli/move-file directly here because we
-    // want to overwrite the existing file
-    await moveFile(tmp.target, bucket)
-    tmp.moved = true
-  }
-
-  // write the file atomically
-  const tmp = await setup()
-  try {
-    await write(tmp)
-  } finally {
-    await teardown(tmp)
-  }
-
-  // we reverse the list we generated such that the newest
-  // entries come first in order to make looping through them easier
-  // the true passed to formatEntry tells it to keep null
-  // integrity values, if they made it this far it's because
-  // validateEntry returned true, and as such we should return it
-  return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
-}
-
-module.exports.insert = insert
-
-async function insert (cache, key, integrity, opts = {}) {
-  const { metadata, size, time } = opts
-  const bucket = bucketPath(cache, key)
-  const entry = {
-    key,
-    integrity: integrity && ssri.stringify(integrity),
-    time: time || Date.now(),
-    size,
-    metadata,
-  }
-  try {
-    await mkdir(path.dirname(bucket), { recursive: true })
-    const stringified = JSON.stringify(entry)
-    // NOTE - Cleverness ahoy!
-    //
-    // This works because it's tremendously unlikely for an entry to corrupt
-    // another while still preserving the string length of the JSON in
-    // question. So, we just slap the length in there and verify it on read.
-    //
-    // Thanks to @isaacs for the whiteboarding session that ended up with
-    // this.
-    await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return undefined
-    }
-
-    throw err
-  }
-  return formatEntry(cache, entry)
-}
-
-module.exports.find = find
-
-async function find (cache, key) {
-  const bucket = bucketPath(cache, key)
-  try {
-    const entries = await bucketEntries(bucket)
-    return entries.reduce((latest, next) => {
-      if (next && next.key === key) {
-        return formatEntry(cache, next)
-      } else {
-        return latest
-      }
-    }, null)
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return null
-    } else {
-      throw err
-    }
-  }
-}
-
-module.exports.delete = del
-
-function del (cache, key, opts = {}) {
-  if (!opts.removeFully) {
-    return insert(cache, key, null, opts)
-  }
-
-  const bucket = bucketPath(cache, key)
-  return rm(bucket, { recursive: true, force: true })
-}
-
-module.exports.lsStream = lsStream
-
-function lsStream (cache) {
-  const indexDir = bucketDir(cache)
-  const stream = new Minipass({ objectMode: true })
-
-  // Set all this up to run on the stream and then just return the stream
-  Promise.resolve().then(async () => {
-    const buckets = await readdirOrEmpty(indexDir)
-    await Promise.all(buckets.map(async (bucket) => {
-      const bucketPath = path.join(indexDir, bucket)
-      const subbuckets = await readdirOrEmpty(bucketPath)
-      await Promise.all(subbuckets.map(async (subbucket) => {
-        const subbucketPath = path.join(bucketPath, subbucket)
-
-        // "/cachename//./*"
-        const subbucketEntries = await readdirOrEmpty(subbucketPath)
-        await Promise.all(subbucketEntries.map(async (entry) => {
-          const entryPath = path.join(subbucketPath, entry)
-          try {
-            const entries = await bucketEntries(entryPath)
-            // using a Map here prevents duplicate keys from showing up
-            // twice, I guess?
-            const reduced = entries.reduce((acc, entry) => {
-              acc.set(entry.key, entry)
-              return acc
-            }, new Map())
-            // reduced is a map of key => entry
-            for (const entry of reduced.values()) {
-              const formatted = formatEntry(cache, entry)
-              if (formatted) {
-                stream.write(formatted)
-              }
-            }
-          } catch (err) {
-            if (err.code === 'ENOENT') {
-              return undefined
-            }
-            throw err
-          }
-        }))
-      }))
-    }))
-    stream.end()
-    return stream
-  }).catch(err => stream.emit('error', err))
-
-  return stream
-}
-
-module.exports.ls = ls
-
-async function ls (cache) {
-  const entries = await lsStream(cache).collect()
-  return entries.reduce((acc, xs) => {
-    acc[xs.key] = xs
-    return acc
-  }, {})
-}
-
-module.exports.bucketEntries = bucketEntries
-
-async function bucketEntries (bucket, filter) {
-  const data = await readFile(bucket, 'utf8')
-  return _bucketEntries(data, filter)
-}
-
-function _bucketEntries (data, filter) {
-  const entries = []
-  data.split('\n').forEach((entry) => {
-    if (!entry) {
-      return
-    }
-
-    const pieces = entry.split('\t')
-    if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
-      // Hash is no good! Corruption or malice? Doesn't matter!
-      // EJECT EJECT
-      return
-    }
-    let obj
-    try {
-      obj = JSON.parse(pieces[1])
-    } catch (_) {
-      // eslint-ignore-next-line no-empty-block
-    }
-    // coverage disabled here, no need to test with an entry that parses to something falsey
-    // istanbul ignore else
-    if (obj) {
-      entries.push(obj)
-    }
-  })
-  return entries
-}
-
-module.exports.bucketDir = bucketDir
-
-function bucketDir (cache) {
-  return path.join(cache, `index-v${indexV}`)
-}
-
-module.exports.bucketPath = bucketPath
-
-function bucketPath (cache, key) {
-  const hashed = hashKey(key)
-  return path.join.apply(
-    path,
-    [bucketDir(cache)].concat(hashToSegments(hashed))
-  )
-}
-
-module.exports.hashKey = hashKey
-
-function hashKey (key) {
-  return hash(key, 'sha256')
-}
-
-module.exports.hashEntry = hashEntry
-
-function hashEntry (str) {
-  return hash(str, 'sha1')
-}
-
-function hash (str, digest) {
-  return crypto
-    .createHash(digest)
-    .update(str)
-    .digest('hex')
-}
-
-function formatEntry (cache, entry, keepAll) {
-  // Treat null digests as deletions. They'll shadow any previous entries.
-  if (!entry.integrity && !keepAll) {
-    return null
-  }
-
-  return {
-    key: entry.key,
-    integrity: entry.integrity,
-    path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
-    size: entry.size,
-    time: entry.time,
-    metadata: entry.metadata,
-  }
-}
-
-function readdirOrEmpty (dir) {
-  return readdir(dir).catch((err) => {
-    if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
-      return []
-    }
-
-    throw err
-  })
-}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/get.js b/node_modules/sigstore/node_modules/cacache/lib/get.js
deleted file mode 100644
index 80ec206c7ecaa..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/lib/get.js
+++ /dev/null
@@ -1,170 +0,0 @@
-'use strict'
-
-const Collect = require('minipass-collect')
-const { Minipass } = require('minipass')
-const Pipeline = require('minipass-pipeline')
-
-const index = require('./entry-index')
-const memo = require('./memoization')
-const read = require('./content/read')
-
-async function getData (cache, key, opts = {}) {
-  const { integrity, memoize, size } = opts
-  const memoized = memo.get(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return {
-      metadata: memoized.entry.metadata,
-      data: memoized.data,
-      integrity: memoized.entry.integrity,
-      size: memoized.entry.size,
-    }
-  }
-
-  const entry = await index.find(cache, key, opts)
-  if (!entry) {
-    throw new index.NotFoundError(cache, key)
-  }
-  const data = await read(cache, entry.integrity, { integrity, size })
-  if (memoize) {
-    memo.put(cache, entry, data, opts)
-  }
-
-  return {
-    data,
-    metadata: entry.metadata,
-    size: entry.size,
-    integrity: entry.integrity,
-  }
-}
-module.exports = getData
-
-async function getDataByDigest (cache, key, opts = {}) {
-  const { integrity, memoize, size } = opts
-  const memoized = memo.get.byDigest(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return memoized
-  }
-
-  const res = await read(cache, key, { integrity, size })
-  if (memoize) {
-    memo.put.byDigest(cache, key, res, opts)
-  }
-  return res
-}
-module.exports.byDigest = getDataByDigest
-
-const getMemoizedStream = (memoized) => {
-  const stream = new Minipass()
-  stream.on('newListener', function (ev, cb) {
-    ev === 'metadata' && cb(memoized.entry.metadata)
-    ev === 'integrity' && cb(memoized.entry.integrity)
-    ev === 'size' && cb(memoized.entry.size)
-  })
-  stream.end(memoized.data)
-  return stream
-}
-
-function getStream (cache, key, opts = {}) {
-  const { memoize, size } = opts
-  const memoized = memo.get(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return getMemoizedStream(memoized)
-  }
-
-  const stream = new Pipeline()
-  // Set all this up to run on the stream and then just return the stream
-  Promise.resolve().then(async () => {
-    const entry = await index.find(cache, key)
-    if (!entry) {
-      throw new index.NotFoundError(cache, key)
-    }
-
-    stream.emit('metadata', entry.metadata)
-    stream.emit('integrity', entry.integrity)
-    stream.emit('size', entry.size)
-    stream.on('newListener', function (ev, cb) {
-      ev === 'metadata' && cb(entry.metadata)
-      ev === 'integrity' && cb(entry.integrity)
-      ev === 'size' && cb(entry.size)
-    })
-
-    const src = read.readStream(
-      cache,
-      entry.integrity,
-      { ...opts, size: typeof size !== 'number' ? entry.size : size }
-    )
-
-    if (memoize) {
-      const memoStream = new Collect.PassThrough()
-      memoStream.on('collect', data => memo.put(cache, entry, data, opts))
-      stream.unshift(memoStream)
-    }
-    stream.unshift(src)
-    return stream
-  }).catch((err) => stream.emit('error', err))
-
-  return stream
-}
-
-module.exports.stream = getStream
-
-function getStreamDigest (cache, integrity, opts = {}) {
-  const { memoize } = opts
-  const memoized = memo.get.byDigest(cache, integrity, opts)
-  if (memoized && memoize !== false) {
-    const stream = new Minipass()
-    stream.end(memoized)
-    return stream
-  } else {
-    const stream = read.readStream(cache, integrity, opts)
-    if (!memoize) {
-      return stream
-    }
-
-    const memoStream = new Collect.PassThrough()
-    memoStream.on('collect', data => memo.put.byDigest(
-      cache,
-      integrity,
-      data,
-      opts
-    ))
-    return new Pipeline(stream, memoStream)
-  }
-}
-
-module.exports.stream.byDigest = getStreamDigest
-
-function info (cache, key, opts = {}) {
-  const { memoize } = opts
-  const memoized = memo.get(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return Promise.resolve(memoized.entry)
-  } else {
-    return index.find(cache, key)
-  }
-}
-module.exports.info = info
-
-async function copy (cache, key, dest, opts = {}) {
-  const entry = await index.find(cache, key, opts)
-  if (!entry) {
-    throw new index.NotFoundError(cache, key)
-  }
-  await read.copy(cache, entry.integrity, dest, opts)
-  return {
-    metadata: entry.metadata,
-    size: entry.size,
-    integrity: entry.integrity,
-  }
-}
-
-module.exports.copy = copy
-
-async function copyByDigest (cache, key, dest, opts = {}) {
-  await read.copy(cache, key, dest, opts)
-  return key
-}
-
-module.exports.copy.byDigest = copyByDigest
-
-module.exports.hasContent = read.hasContent
diff --git a/node_modules/sigstore/node_modules/cacache/lib/index.js b/node_modules/sigstore/node_modules/cacache/lib/index.js
deleted file mode 100644
index c9b0da5f3a271..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/lib/index.js
+++ /dev/null
@@ -1,42 +0,0 @@
-'use strict'
-
-const get = require('./get.js')
-const put = require('./put.js')
-const rm = require('./rm.js')
-const verify = require('./verify.js')
-const { clearMemoized } = require('./memoization.js')
-const tmp = require('./util/tmp.js')
-const index = require('./entry-index.js')
-
-module.exports.index = {}
-module.exports.index.compact = index.compact
-module.exports.index.insert = index.insert
-
-module.exports.ls = index.ls
-module.exports.ls.stream = index.lsStream
-
-module.exports.get = get
-module.exports.get.byDigest = get.byDigest
-module.exports.get.stream = get.stream
-module.exports.get.stream.byDigest = get.stream.byDigest
-module.exports.get.copy = get.copy
-module.exports.get.copy.byDigest = get.copy.byDigest
-module.exports.get.info = get.info
-module.exports.get.hasContent = get.hasContent
-
-module.exports.put = put
-module.exports.put.stream = put.stream
-
-module.exports.rm = rm.entry
-module.exports.rm.all = rm.all
-module.exports.rm.entry = module.exports.rm
-module.exports.rm.content = rm.content
-
-module.exports.clearMemoized = clearMemoized
-
-module.exports.tmp = {}
-module.exports.tmp.mkdir = tmp.mkdir
-module.exports.tmp.withTmp = tmp.withTmp
-
-module.exports.verify = verify
-module.exports.verify.lastRun = verify.lastRun
diff --git a/node_modules/sigstore/node_modules/cacache/lib/memoization.js b/node_modules/sigstore/node_modules/cacache/lib/memoization.js
deleted file mode 100644
index 0ff604a479c9c..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/lib/memoization.js
+++ /dev/null
@@ -1,72 +0,0 @@
-'use strict'
-
-const LRU = require('lru-cache')
-
-const MEMOIZED = new LRU({
-  max: 500,
-  maxSize: 50 * 1024 * 1024, // 50MB
-  ttl: 3 * 60 * 1000, // 3 minutes
-  sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
-})
-
-module.exports.clearMemoized = clearMemoized
-
-function clearMemoized () {
-  const old = {}
-  MEMOIZED.forEach((v, k) => {
-    old[k] = v
-  })
-  MEMOIZED.clear()
-  return old
-}
-
-module.exports.put = put
-
-function put (cache, entry, data, opts) {
-  pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
-  putDigest(cache, entry.integrity, data, opts)
-}
-
-module.exports.put.byDigest = putDigest
-
-function putDigest (cache, integrity, data, opts) {
-  pickMem(opts).set(`digest:${cache}:${integrity}`, data)
-}
-
-module.exports.get = get
-
-function get (cache, key, opts) {
-  return pickMem(opts).get(`key:${cache}:${key}`)
-}
-
-module.exports.get.byDigest = getDigest
-
-function getDigest (cache, integrity, opts) {
-  return pickMem(opts).get(`digest:${cache}:${integrity}`)
-}
-
-class ObjProxy {
-  constructor (obj) {
-    this.obj = obj
-  }
-
-  get (key) {
-    return this.obj[key]
-  }
-
-  set (key, val) {
-    this.obj[key] = val
-  }
-}
-
-function pickMem (opts) {
-  if (!opts || !opts.memoize) {
-    return MEMOIZED
-  } else if (opts.memoize.get && opts.memoize.set) {
-    return opts.memoize
-  } else if (typeof opts.memoize === 'object') {
-    return new ObjProxy(opts.memoize)
-  } else {
-    return MEMOIZED
-  }
-}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/put.js b/node_modules/sigstore/node_modules/cacache/lib/put.js
deleted file mode 100644
index 9fc932d5f6dec..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/lib/put.js
+++ /dev/null
@@ -1,80 +0,0 @@
-'use strict'
-
-const index = require('./entry-index')
-const memo = require('./memoization')
-const write = require('./content/write')
-const Flush = require('minipass-flush')
-const { PassThrough } = require('minipass-collect')
-const Pipeline = require('minipass-pipeline')
-
-const putOpts = (opts) => ({
-  algorithms: ['sha512'],
-  ...opts,
-})
-
-module.exports = putData
-
-async function putData (cache, key, data, opts = {}) {
-  const { memoize } = opts
-  opts = putOpts(opts)
-  const res = await write(cache, data, opts)
-  const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
-  if (memoize) {
-    memo.put(cache, entry, data, opts)
-  }
-
-  return res.integrity
-}
-
-module.exports.stream = putStream
-
-function putStream (cache, key, opts = {}) {
-  const { memoize } = opts
-  opts = putOpts(opts)
-  let integrity
-  let size
-  let error
-
-  let memoData
-  const pipeline = new Pipeline()
-  // first item in the pipeline is the memoizer, because we need
-  // that to end first and get the collected data.
-  if (memoize) {
-    const memoizer = new PassThrough().on('collect', data => {
-      memoData = data
-    })
-    pipeline.push(memoizer)
-  }
-
-  // contentStream is a write-only, not a passthrough
-  // no data comes out of it.
-  const contentStream = write.stream(cache, opts)
-    .on('integrity', (int) => {
-      integrity = int
-    })
-    .on('size', (s) => {
-      size = s
-    })
-    .on('error', (err) => {
-      error = err
-    })
-
-  pipeline.push(contentStream)
-
-  // last but not least, we write the index and emit hash and size,
-  // and memoize if we're doing that
-  pipeline.push(new Flush({
-    async flush () {
-      if (!error) {
-        const entry = await index.insert(cache, key, integrity, { ...opts, size })
-        if (memoize && memoData) {
-          memo.put(cache, entry, memoData, opts)
-        }
-        pipeline.emit('integrity', integrity)
-        pipeline.emit('size', size)
-      }
-    },
-  }))
-
-  return pipeline
-}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/rm.js b/node_modules/sigstore/node_modules/cacache/lib/rm.js
deleted file mode 100644
index a94760c7cf243..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/lib/rm.js
+++ /dev/null
@@ -1,31 +0,0 @@
-'use strict'
-
-const { rm } = require('fs/promises')
-const glob = require('./util/glob.js')
-const index = require('./entry-index')
-const memo = require('./memoization')
-const path = require('path')
-const rmContent = require('./content/rm')
-
-module.exports = entry
-module.exports.entry = entry
-
-function entry (cache, key, opts) {
-  memo.clearMemoized()
-  return index.delete(cache, key, opts)
-}
-
-module.exports.content = content
-
-function content (cache, integrity) {
-  memo.clearMemoized()
-  return rmContent(cache, integrity)
-}
-
-module.exports.all = all
-
-async function all (cache) {
-  memo.clearMemoized()
-  const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
-  return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
-}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/util/glob.js b/node_modules/sigstore/node_modules/cacache/lib/util/glob.js
deleted file mode 100644
index 8500c1c16a429..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/lib/util/glob.js
+++ /dev/null
@@ -1,7 +0,0 @@
-'use strict'
-
-const { glob } = require('glob')
-const path = require('path')
-
-const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep)
-module.exports = (path, options) => glob(globify(path), options)
diff --git a/node_modules/sigstore/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/sigstore/node_modules/cacache/lib/util/hash-to-segments.js
deleted file mode 100644
index 445599b503808..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/lib/util/hash-to-segments.js
+++ /dev/null
@@ -1,7 +0,0 @@
-'use strict'
-
-module.exports = hashToSegments
-
-function hashToSegments (hash) {
-  return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
-}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/util/tmp.js b/node_modules/sigstore/node_modules/cacache/lib/util/tmp.js
deleted file mode 100644
index 0bf5302136ebe..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/lib/util/tmp.js
+++ /dev/null
@@ -1,26 +0,0 @@
-'use strict'
-
-const { withTempDir } = require('@npmcli/fs')
-const fs = require('fs/promises')
-const path = require('path')
-
-module.exports.mkdir = mktmpdir
-
-async function mktmpdir (cache, opts = {}) {
-  const { tmpPrefix } = opts
-  const tmpDir = path.join(cache, 'tmp')
-  await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
-  // do not use path.join(), it drops the trailing / if tmpPrefix is unset
-  const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
-  return fs.mkdtemp(target, { owner: 'inherit' })
-}
-
-module.exports.withTmp = withTmp
-
-function withTmp (cache, opts, cb) {
-  if (!cb) {
-    cb = opts
-    opts = {}
-  }
-  return withTempDir(path.join(cache, 'tmp'), cb, opts)
-}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/verify.js b/node_modules/sigstore/node_modules/cacache/lib/verify.js
deleted file mode 100644
index 62e85c946490f..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/lib/verify.js
+++ /dev/null
@@ -1,257 +0,0 @@
-'use strict'
-
-const {
-  mkdir,
-  readFile,
-  rm,
-  stat,
-  truncate,
-  writeFile,
-} = require('fs/promises')
-const pMap = require('p-map')
-const contentPath = require('./content/path')
-const fsm = require('fs-minipass')
-const glob = require('./util/glob.js')
-const index = require('./entry-index')
-const path = require('path')
-const ssri = require('ssri')
-
-const hasOwnProperty = (obj, key) =>
-  Object.prototype.hasOwnProperty.call(obj, key)
-
-const verifyOpts = (opts) => ({
-  concurrency: 20,
-  log: { silly () {} },
-  ...opts,
-})
-
-module.exports = verify
-
-async function verify (cache, opts) {
-  opts = verifyOpts(opts)
-  opts.log.silly('verify', 'verifying cache at', cache)
-
-  const steps = [
-    markStartTime,
-    fixPerms,
-    garbageCollect,
-    rebuildIndex,
-    cleanTmp,
-    writeVerifile,
-    markEndTime,
-  ]
-
-  const stats = {}
-  for (const step of steps) {
-    const label = step.name
-    const start = new Date()
-    const s = await step(cache, opts)
-    if (s) {
-      Object.keys(s).forEach((k) => {
-        stats[k] = s[k]
-      })
-    }
-    const end = new Date()
-    if (!stats.runTime) {
-      stats.runTime = {}
-    }
-    stats.runTime[label] = end - start
-  }
-  stats.runTime.total = stats.endTime - stats.startTime
-  opts.log.silly(
-    'verify',
-    'verification finished for',
-    cache,
-    'in',
-    `${stats.runTime.total}ms`
-  )
-  return stats
-}
-
-async function markStartTime (cache, opts) {
-  return { startTime: new Date() }
-}
-
-async function markEndTime (cache, opts) {
-  return { endTime: new Date() }
-}
-
-async function fixPerms (cache, opts) {
-  opts.log.silly('verify', 'fixing cache permissions')
-  await mkdir(cache, { recursive: true })
-  return null
-}
-
-// Implements a naive mark-and-sweep tracing garbage collector.
-//
-// The algorithm is basically as follows:
-// 1. Read (and filter) all index entries ("pointers")
-// 2. Mark each integrity value as "live"
-// 3. Read entire filesystem tree in `content-vX/` dir
-// 4. If content is live, verify its checksum and delete it if it fails
-// 5. If content is not marked as live, rm it.
-//
-async function garbageCollect (cache, opts) {
-  opts.log.silly('verify', 'garbage collecting content')
-  const indexStream = index.lsStream(cache)
-  const liveContent = new Set()
-  indexStream.on('data', (entry) => {
-    if (opts.filter && !opts.filter(entry)) {
-      return
-    }
-
-    // integrity is stringified, re-parse it so we can get each hash
-    const integrity = ssri.parse(entry.integrity)
-    for (const algo in integrity) {
-      liveContent.add(integrity[algo].toString())
-    }
-  })
-  await new Promise((resolve, reject) => {
-    indexStream.on('end', resolve).on('error', reject)
-  })
-  const contentDir = contentPath.contentDir(cache)
-  const files = await glob(path.join(contentDir, '**'), {
-    follow: false,
-    nodir: true,
-    nosort: true,
-  })
-  const stats = {
-    verifiedContent: 0,
-    reclaimedCount: 0,
-    reclaimedSize: 0,
-    badContentCount: 0,
-    keptSize: 0,
-  }
-  await pMap(
-    files,
-    async (f) => {
-      const split = f.split(/[/\\]/)
-      const digest = split.slice(split.length - 3).join('')
-      const algo = split[split.length - 4]
-      const integrity = ssri.fromHex(digest, algo)
-      if (liveContent.has(integrity.toString())) {
-        const info = await verifyContent(f, integrity)
-        if (!info.valid) {
-          stats.reclaimedCount++
-          stats.badContentCount++
-          stats.reclaimedSize += info.size
-        } else {
-          stats.verifiedContent++
-          stats.keptSize += info.size
-        }
-      } else {
-        // No entries refer to this content. We can delete.
-        stats.reclaimedCount++
-        const s = await stat(f)
-        await rm(f, { recursive: true, force: true })
-        stats.reclaimedSize += s.size
-      }
-      return stats
-    },
-    { concurrency: opts.concurrency }
-  )
-  return stats
-}
-
-async function verifyContent (filepath, sri) {
-  const contentInfo = {}
-  try {
-    const { size } = await stat(filepath)
-    contentInfo.size = size
-    contentInfo.valid = true
-    await ssri.checkStream(new fsm.ReadStream(filepath), sri)
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return { size: 0, valid: false }
-    }
-    if (err.code !== 'EINTEGRITY') {
-      throw err
-    }
-
-    await rm(filepath, { recursive: true, force: true })
-    contentInfo.valid = false
-  }
-  return contentInfo
-}
-
-async function rebuildIndex (cache, opts) {
-  opts.log.silly('verify', 'rebuilding index')
-  const entries = await index.ls(cache)
-  const stats = {
-    missingContent: 0,
-    rejectedEntries: 0,
-    totalEntries: 0,
-  }
-  const buckets = {}
-  for (const k in entries) {
-    /* istanbul ignore else */
-    if (hasOwnProperty(entries, k)) {
-      const hashed = index.hashKey(k)
-      const entry = entries[k]
-      const excluded = opts.filter && !opts.filter(entry)
-      excluded && stats.rejectedEntries++
-      if (buckets[hashed] && !excluded) {
-        buckets[hashed].push(entry)
-      } else if (buckets[hashed] && excluded) {
-        // skip
-      } else if (excluded) {
-        buckets[hashed] = []
-        buckets[hashed]._path = index.bucketPath(cache, k)
-      } else {
-        buckets[hashed] = [entry]
-        buckets[hashed]._path = index.bucketPath(cache, k)
-      }
-    }
-  }
-  await pMap(
-    Object.keys(buckets),
-    (key) => {
-      return rebuildBucket(cache, buckets[key], stats, opts)
-    },
-    { concurrency: opts.concurrency }
-  )
-  return stats
-}
-
-async function rebuildBucket (cache, bucket, stats, opts) {
-  await truncate(bucket._path)
-  // This needs to be serialized because cacache explicitly
-  // lets very racy bucket conflicts clobber each other.
-  for (const entry of bucket) {
-    const content = contentPath(cache, entry.integrity)
-    try {
-      await stat(content)
-      await index.insert(cache, entry.key, entry.integrity, {
-        metadata: entry.metadata,
-        size: entry.size,
-        time: entry.time,
-      })
-      stats.totalEntries++
-    } catch (err) {
-      if (err.code === 'ENOENT') {
-        stats.rejectedEntries++
-        stats.missingContent++
-      } else {
-        throw err
-      }
-    }
-  }
-}
-
-function cleanTmp (cache, opts) {
-  opts.log.silly('verify', 'cleaning tmp directory')
-  return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
-}
-
-async function writeVerifile (cache, opts) {
-  const verifile = path.join(cache, '_lastverified')
-  opts.log.silly('verify', 'writing verifile to ' + verifile)
-  return writeFile(verifile, `${Date.now()}`)
-}
-
-module.exports.lastRun = lastRun
-
-async function lastRun (cache) {
-  const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
-  return new Date(+data)
-}
diff --git a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/LICENSE b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/LICENSE
deleted file mode 100644
index 97f8e32ed82e4..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/index.js b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
deleted file mode 100644
index b6cdae8eb514b..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
+++ /dev/null
@@ -1,1028 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0;
-const proc = typeof process === 'object' && process
-    ? process
-    : {
-        stdout: null,
-        stderr: null,
-    };
-const events_1 = require("events");
-const stream_1 = __importDefault(require("stream"));
-const string_decoder_1 = require("string_decoder");
-/**
- * Return true if the argument is a Minipass stream, Node stream, or something
- * else that Minipass can interact with.
- */
-const isStream = (s) => !!s &&
-    typeof s === 'object' &&
-    (s instanceof Minipass ||
-        s instanceof stream_1.default ||
-        (0, exports.isReadable)(s) ||
-        (0, exports.isWritable)(s));
-exports.isStream = isStream;
-/**
- * Return true if the argument is a valid {@link Minipass.Readable}
- */
-const isReadable = (s) => !!s &&
-    typeof s === 'object' &&
-    s instanceof events_1.EventEmitter &&
-    typeof s.pipe === 'function' &&
-    // node core Writable streams have a pipe() method, but it throws
-    s.pipe !== stream_1.default.Writable.prototype.pipe;
-exports.isReadable = isReadable;
-/**
- * Return true if the argument is a valid {@link Minipass.Writable}
- */
-const isWritable = (s) => !!s &&
-    typeof s === 'object' &&
-    s instanceof events_1.EventEmitter &&
-    typeof s.write === 'function' &&
-    typeof s.end === 'function';
-exports.isWritable = isWritable;
-const EOF = Symbol('EOF');
-const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
-const EMITTED_END = Symbol('emittedEnd');
-const EMITTING_END = Symbol('emittingEnd');
-const EMITTED_ERROR = Symbol('emittedError');
-const CLOSED = Symbol('closed');
-const READ = Symbol('read');
-const FLUSH = Symbol('flush');
-const FLUSHCHUNK = Symbol('flushChunk');
-const ENCODING = Symbol('encoding');
-const DECODER = Symbol('decoder');
-const FLOWING = Symbol('flowing');
-const PAUSED = Symbol('paused');
-const RESUME = Symbol('resume');
-const BUFFER = Symbol('buffer');
-const PIPES = Symbol('pipes');
-const BUFFERLENGTH = Symbol('bufferLength');
-const BUFFERPUSH = Symbol('bufferPush');
-const BUFFERSHIFT = Symbol('bufferShift');
-const OBJECTMODE = Symbol('objectMode');
-// internal event when stream is destroyed
-const DESTROYED = Symbol('destroyed');
-// internal event when stream has an error
-const ERROR = Symbol('error');
-const EMITDATA = Symbol('emitData');
-const EMITEND = Symbol('emitEnd');
-const EMITEND2 = Symbol('emitEnd2');
-const ASYNC = Symbol('async');
-const ABORT = Symbol('abort');
-const ABORTED = Symbol('aborted');
-const SIGNAL = Symbol('signal');
-const DATALISTENERS = Symbol('dataListeners');
-const DISCARDED = Symbol('discarded');
-const defer = (fn) => Promise.resolve().then(fn);
-const nodefer = (fn) => fn();
-const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
-const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
-    (!!b &&
-        typeof b === 'object' &&
-        b.constructor &&
-        b.constructor.name === 'ArrayBuffer' &&
-        b.byteLength >= 0);
-const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
-/**
- * Internal class representing a pipe to a destination stream.
- *
- * @internal
- */
-class Pipe {
-    src;
-    dest;
-    opts;
-    ondrain;
-    constructor(src, dest, opts) {
-        this.src = src;
-        this.dest = dest;
-        this.opts = opts;
-        this.ondrain = () => src[RESUME]();
-        this.dest.on('drain', this.ondrain);
-    }
-    unpipe() {
-        this.dest.removeListener('drain', this.ondrain);
-    }
-    // only here for the prototype
-    /* c8 ignore start */
-    proxyErrors(_er) { }
-    /* c8 ignore stop */
-    end() {
-        this.unpipe();
-        if (this.opts.end)
-            this.dest.end();
-    }
-}
-/**
- * Internal class representing a pipe to a destination stream where
- * errors are proxied.
- *
- * @internal
- */
-class PipeProxyErrors extends Pipe {
-    unpipe() {
-        this.src.removeListener('error', this.proxyErrors);
-        super.unpipe();
-    }
-    constructor(src, dest, opts) {
-        super(src, dest, opts);
-        this.proxyErrors = er => dest.emit('error', er);
-        src.on('error', this.proxyErrors);
-    }
-}
-const isObjectModeOptions = (o) => !!o.objectMode;
-const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
-/**
- * Main export, the Minipass class
- *
- * `RType` is the type of data emitted, defaults to Buffer
- *
- * `WType` is the type of data to be written, if RType is buffer or string,
- * then any {@link Minipass.ContiguousData} is allowed.
- *
- * `Events` is the set of event handler signatures that this object
- * will emit, see {@link Minipass.Events}
- */
-class Minipass extends events_1.EventEmitter {
-    [FLOWING] = false;
-    [PAUSED] = false;
-    [PIPES] = [];
-    [BUFFER] = [];
-    [OBJECTMODE];
-    [ENCODING];
-    [ASYNC];
-    [DECODER];
-    [EOF] = false;
-    [EMITTED_END] = false;
-    [EMITTING_END] = false;
-    [CLOSED] = false;
-    [EMITTED_ERROR] = null;
-    [BUFFERLENGTH] = 0;
-    [DESTROYED] = false;
-    [SIGNAL];
-    [ABORTED] = false;
-    [DATALISTENERS] = 0;
-    [DISCARDED] = false;
-    /**
-     * true if the stream can be written
-     */
-    writable = true;
-    /**
-     * true if the stream can be read
-     */
-    readable = true;
-    /**
-     * If `RType` is Buffer, then options do not need to be provided.
-     * Otherwise, an options object must be provided to specify either
-     * {@link Minipass.SharedOptions.objectMode} or
-     * {@link Minipass.SharedOptions.encoding}, as appropriate.
-     */
-    constructor(...args) {
-        const options = (args[0] ||
-            {});
-        super();
-        if (options.objectMode && typeof options.encoding === 'string') {
-            throw new TypeError('Encoding and objectMode may not be used together');
-        }
-        if (isObjectModeOptions(options)) {
-            this[OBJECTMODE] = true;
-            this[ENCODING] = null;
-        }
-        else if (isEncodingOptions(options)) {
-            this[ENCODING] = options.encoding;
-            this[OBJECTMODE] = false;
-        }
-        else {
-            this[OBJECTMODE] = false;
-            this[ENCODING] = null;
-        }
-        this[ASYNC] = !!options.async;
-        this[DECODER] = this[ENCODING]
-            ? new string_decoder_1.StringDecoder(this[ENCODING])
-            : null;
-        //@ts-ignore - private option for debugging and testing
-        if (options && options.debugExposeBuffer === true) {
-            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
-        }
-        //@ts-ignore - private option for debugging and testing
-        if (options && options.debugExposePipes === true) {
-            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
-        }
-        const { signal } = options;
-        if (signal) {
-            this[SIGNAL] = signal;
-            if (signal.aborted) {
-                this[ABORT]();
-            }
-            else {
-                signal.addEventListener('abort', () => this[ABORT]());
-            }
-        }
-    }
-    /**
-     * The amount of data stored in the buffer waiting to be read.
-     *
-     * For Buffer strings, this will be the total byte length.
-     * For string encoding streams, this will be the string character length,
-     * according to JavaScript's `string.length` logic.
-     * For objectMode streams, this is a count of the items waiting to be
-     * emitted.
-     */
-    get bufferLength() {
-        return this[BUFFERLENGTH];
-    }
-    /**
-     * The `BufferEncoding` currently in use, or `null`
-     */
-    get encoding() {
-        return this[ENCODING];
-    }
-    /**
-     * @deprecated - This is a read only property
-     */
-    set encoding(_enc) {
-        throw new Error('Encoding must be set at instantiation time');
-    }
-    /**
-     * @deprecated - Encoding may only be set at instantiation time
-     */
-    setEncoding(_enc) {
-        throw new Error('Encoding must be set at instantiation time');
-    }
-    /**
-     * True if this is an objectMode stream
-     */
-    get objectMode() {
-        return this[OBJECTMODE];
-    }
-    /**
-     * @deprecated - This is a read-only property
-     */
-    set objectMode(_om) {
-        throw new Error('objectMode must be set at instantiation time');
-    }
-    /**
-     * true if this is an async stream
-     */
-    get ['async']() {
-        return this[ASYNC];
-    }
-    /**
-     * Set to true to make this stream async.
-     *
-     * Once set, it cannot be unset, as this would potentially cause incorrect
-     * behavior.  Ie, a sync stream can be made async, but an async stream
-     * cannot be safely made sync.
-     */
-    set ['async'](a) {
-        this[ASYNC] = this[ASYNC] || !!a;
-    }
-    // drop everything and get out of the flow completely
-    [ABORT]() {
-        this[ABORTED] = true;
-        this.emit('abort', this[SIGNAL]?.reason);
-        this.destroy(this[SIGNAL]?.reason);
-    }
-    /**
-     * True if the stream has been aborted.
-     */
-    get aborted() {
-        return this[ABORTED];
-    }
-    /**
-     * No-op setter. Stream aborted status is set via the AbortSignal provided
-     * in the constructor options.
-     */
-    set aborted(_) { }
-    write(chunk, encoding, cb) {
-        if (this[ABORTED])
-            return false;
-        if (this[EOF])
-            throw new Error('write after end');
-        if (this[DESTROYED]) {
-            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
-            return true;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = 'utf8';
-        }
-        if (!encoding)
-            encoding = 'utf8';
-        const fn = this[ASYNC] ? defer : nodefer;
-        // convert array buffers and typed array views into buffers
-        // at some point in the future, we may want to do the opposite!
-        // leave strings and buffers as-is
-        // anything is only allowed if in object mode, so throw
-        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
-            if (isArrayBufferView(chunk)) {
-                //@ts-ignore - sinful unsafe type changing
-                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
-            }
-            else if (isArrayBufferLike(chunk)) {
-                //@ts-ignore - sinful unsafe type changing
-                chunk = Buffer.from(chunk);
-            }
-            else if (typeof chunk !== 'string') {
-                throw new Error('Non-contiguous data written to non-objectMode stream');
-            }
-        }
-        // handle object mode up front, since it's simpler
-        // this yields better performance, fewer checks later.
-        if (this[OBJECTMODE]) {
-            // maybe impossible?
-            /* c8 ignore start */
-            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
-                this[FLUSH](true);
-            /* c8 ignore stop */
-            if (this[FLOWING])
-                this.emit('data', chunk);
-            else
-                this[BUFFERPUSH](chunk);
-            if (this[BUFFERLENGTH] !== 0)
-                this.emit('readable');
-            if (cb)
-                fn(cb);
-            return this[FLOWING];
-        }
-        // at this point the chunk is a buffer or string
-        // don't buffer it up or send it to the decoder
-        if (!chunk.length) {
-            if (this[BUFFERLENGTH] !== 0)
-                this.emit('readable');
-            if (cb)
-                fn(cb);
-            return this[FLOWING];
-        }
-        // fast-path writing strings of same encoding to a stream with
-        // an empty buffer, skipping the buffer/decoder dance
-        if (typeof chunk === 'string' &&
-            // unless it is a string already ready for us to use
-            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
-            //@ts-ignore - sinful unsafe type change
-            chunk = Buffer.from(chunk, encoding);
-        }
-        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
-            //@ts-ignore - sinful unsafe type change
-            chunk = this[DECODER].write(chunk);
-        }
-        // Note: flushing CAN potentially switch us into not-flowing mode
-        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
-            this[FLUSH](true);
-        if (this[FLOWING])
-            this.emit('data', chunk);
-        else
-            this[BUFFERPUSH](chunk);
-        if (this[BUFFERLENGTH] !== 0)
-            this.emit('readable');
-        if (cb)
-            fn(cb);
-        return this[FLOWING];
-    }
-    /**
-     * Low-level explicit read method.
-     *
-     * In objectMode, the argument is ignored, and one item is returned if
-     * available.
-     *
-     * `n` is the number of bytes (or in the case of encoding streams,
-     * characters) to consume. If `n` is not provided, then the entire buffer
-     * is returned, or `null` is returned if no data is available.
-     *
-     * If `n` is greater that the amount of data in the internal buffer,
-     * then `null` is returned.
-     */
-    read(n) {
-        if (this[DESTROYED])
-            return null;
-        this[DISCARDED] = false;
-        if (this[BUFFERLENGTH] === 0 ||
-            n === 0 ||
-            (n && n > this[BUFFERLENGTH])) {
-            this[MAYBE_EMIT_END]();
-            return null;
-        }
-        if (this[OBJECTMODE])
-            n = null;
-        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
-            // not object mode, so if we have an encoding, then RType is string
-            // otherwise, must be Buffer
-            this[BUFFER] = [
-                (this[ENCODING]
-                    ? this[BUFFER].join('')
-                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
-            ];
-        }
-        const ret = this[READ](n || null, this[BUFFER][0]);
-        this[MAYBE_EMIT_END]();
-        return ret;
-    }
-    [READ](n, chunk) {
-        if (this[OBJECTMODE])
-            this[BUFFERSHIFT]();
-        else {
-            const c = chunk;
-            if (n === c.length || n === null)
-                this[BUFFERSHIFT]();
-            else if (typeof c === 'string') {
-                this[BUFFER][0] = c.slice(n);
-                chunk = c.slice(0, n);
-                this[BUFFERLENGTH] -= n;
-            }
-            else {
-                this[BUFFER][0] = c.subarray(n);
-                chunk = c.subarray(0, n);
-                this[BUFFERLENGTH] -= n;
-            }
-        }
-        this.emit('data', chunk);
-        if (!this[BUFFER].length && !this[EOF])
-            this.emit('drain');
-        return chunk;
-    }
-    end(chunk, encoding, cb) {
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = 'utf8';
-        }
-        if (chunk !== undefined)
-            this.write(chunk, encoding);
-        if (cb)
-            this.once('end', cb);
-        this[EOF] = true;
-        this.writable = false;
-        // if we haven't written anything, then go ahead and emit,
-        // even if we're not reading.
-        // we'll re-emit if a new 'end' listener is added anyway.
-        // This makes MP more suitable to write-only use cases.
-        if (this[FLOWING] || !this[PAUSED])
-            this[MAYBE_EMIT_END]();
-        return this;
-    }
-    // don't let the internal resume be overwritten
-    [RESUME]() {
-        if (this[DESTROYED])
-            return;
-        if (!this[DATALISTENERS] && !this[PIPES].length) {
-            this[DISCARDED] = true;
-        }
-        this[PAUSED] = false;
-        this[FLOWING] = true;
-        this.emit('resume');
-        if (this[BUFFER].length)
-            this[FLUSH]();
-        else if (this[EOF])
-            this[MAYBE_EMIT_END]();
-        else
-            this.emit('drain');
-    }
-    /**
-     * Resume the stream if it is currently in a paused state
-     *
-     * If called when there are no pipe destinations or `data` event listeners,
-     * this will place the stream in a "discarded" state, where all data will
-     * be thrown away. The discarded state is removed if a pipe destination or
-     * data handler is added, if pause() is called, or if any synchronous or
-     * asynchronous iteration is started.
-     */
-    resume() {
-        return this[RESUME]();
-    }
-    /**
-     * Pause the stream
-     */
-    pause() {
-        this[FLOWING] = false;
-        this[PAUSED] = true;
-        this[DISCARDED] = false;
-    }
-    /**
-     * true if the stream has been forcibly destroyed
-     */
-    get destroyed() {
-        return this[DESTROYED];
-    }
-    /**
-     * true if the stream is currently in a flowing state, meaning that
-     * any writes will be immediately emitted.
-     */
-    get flowing() {
-        return this[FLOWING];
-    }
-    /**
-     * true if the stream is currently in a paused state
-     */
-    get paused() {
-        return this[PAUSED];
-    }
-    [BUFFERPUSH](chunk) {
-        if (this[OBJECTMODE])
-            this[BUFFERLENGTH] += 1;
-        else
-            this[BUFFERLENGTH] += chunk.length;
-        this[BUFFER].push(chunk);
-    }
-    [BUFFERSHIFT]() {
-        if (this[OBJECTMODE])
-            this[BUFFERLENGTH] -= 1;
-        else
-            this[BUFFERLENGTH] -= this[BUFFER][0].length;
-        return this[BUFFER].shift();
-    }
-    [FLUSH](noDrain = false) {
-        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
-            this[BUFFER].length);
-        if (!noDrain && !this[BUFFER].length && !this[EOF])
-            this.emit('drain');
-    }
-    [FLUSHCHUNK](chunk) {
-        this.emit('data', chunk);
-        return this[FLOWING];
-    }
-    /**
-     * Pipe all data emitted by this stream into the destination provided.
-     *
-     * Triggers the flow of data.
-     */
-    pipe(dest, opts) {
-        if (this[DESTROYED])
-            return dest;
-        this[DISCARDED] = false;
-        const ended = this[EMITTED_END];
-        opts = opts || {};
-        if (dest === proc.stdout || dest === proc.stderr)
-            opts.end = false;
-        else
-            opts.end = opts.end !== false;
-        opts.proxyErrors = !!opts.proxyErrors;
-        // piping an ended stream ends immediately
-        if (ended) {
-            if (opts.end)
-                dest.end();
-        }
-        else {
-            // "as" here just ignores the WType, which pipes don't care about,
-            // since they're only consuming from us, and writing to the dest
-            this[PIPES].push(!opts.proxyErrors
-                ? new Pipe(this, dest, opts)
-                : new PipeProxyErrors(this, dest, opts));
-            if (this[ASYNC])
-                defer(() => this[RESUME]());
-            else
-                this[RESUME]();
-        }
-        return dest;
-    }
-    /**
-     * Fully unhook a piped destination stream.
-     *
-     * If the destination stream was the only consumer of this stream (ie,
-     * there are no other piped destinations or `'data'` event listeners)
-     * then the flow of data will stop until there is another consumer or
-     * {@link Minipass#resume} is explicitly called.
-     */
-    unpipe(dest) {
-        const p = this[PIPES].find(p => p.dest === dest);
-        if (p) {
-            if (this[PIPES].length === 1) {
-                if (this[FLOWING] && this[DATALISTENERS] === 0) {
-                    this[FLOWING] = false;
-                }
-                this[PIPES] = [];
-            }
-            else
-                this[PIPES].splice(this[PIPES].indexOf(p), 1);
-            p.unpipe();
-        }
-    }
-    /**
-     * Alias for {@link Minipass#on}
-     */
-    addListener(ev, handler) {
-        return this.on(ev, handler);
-    }
-    /**
-     * Mostly identical to `EventEmitter.on`, with the following
-     * behavior differences to prevent data loss and unnecessary hangs:
-     *
-     * - Adding a 'data' event handler will trigger the flow of data
-     *
-     * - Adding a 'readable' event handler when there is data waiting to be read
-     *   will cause 'readable' to be emitted immediately.
-     *
-     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
-     *   already passed will cause the event to be emitted immediately and all
-     *   handlers removed.
-     *
-     * - Adding an 'error' event handler after an error has been emitted will
-     *   cause the event to be re-emitted immediately with the error previously
-     *   raised.
-     */
-    on(ev, handler) {
-        const ret = super.on(ev, handler);
-        if (ev === 'data') {
-            this[DISCARDED] = false;
-            this[DATALISTENERS]++;
-            if (!this[PIPES].length && !this[FLOWING]) {
-                this[RESUME]();
-            }
-        }
-        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
-            super.emit('readable');
-        }
-        else if (isEndish(ev) && this[EMITTED_END]) {
-            super.emit(ev);
-            this.removeAllListeners(ev);
-        }
-        else if (ev === 'error' && this[EMITTED_ERROR]) {
-            const h = handler;
-            if (this[ASYNC])
-                defer(() => h.call(this, this[EMITTED_ERROR]));
-            else
-                h.call(this, this[EMITTED_ERROR]);
-        }
-        return ret;
-    }
-    /**
-     * Alias for {@link Minipass#off}
-     */
-    removeListener(ev, handler) {
-        return this.off(ev, handler);
-    }
-    /**
-     * Mostly identical to `EventEmitter.off`
-     *
-     * If a 'data' event handler is removed, and it was the last consumer
-     * (ie, there are no pipe destinations or other 'data' event listeners),
-     * then the flow of data will stop until there is another consumer or
-     * {@link Minipass#resume} is explicitly called.
-     */
-    off(ev, handler) {
-        const ret = super.off(ev, handler);
-        // if we previously had listeners, and now we don't, and we don't
-        // have any pipes, then stop the flow, unless it's been explicitly
-        // put in a discarded flowing state via stream.resume().
-        if (ev === 'data') {
-            this[DATALISTENERS] = this.listeners('data').length;
-            if (this[DATALISTENERS] === 0 &&
-                !this[DISCARDED] &&
-                !this[PIPES].length) {
-                this[FLOWING] = false;
-            }
-        }
-        return ret;
-    }
-    /**
-     * Mostly identical to `EventEmitter.removeAllListeners`
-     *
-     * If all 'data' event handlers are removed, and they were the last consumer
-     * (ie, there are no pipe destinations), then the flow of data will stop
-     * until there is another consumer or {@link Minipass#resume} is explicitly
-     * called.
-     */
-    removeAllListeners(ev) {
-        const ret = super.removeAllListeners(ev);
-        if (ev === 'data' || ev === undefined) {
-            this[DATALISTENERS] = 0;
-            if (!this[DISCARDED] && !this[PIPES].length) {
-                this[FLOWING] = false;
-            }
-        }
-        return ret;
-    }
-    /**
-     * true if the 'end' event has been emitted
-     */
-    get emittedEnd() {
-        return this[EMITTED_END];
-    }
-    [MAYBE_EMIT_END]() {
-        if (!this[EMITTING_END] &&
-            !this[EMITTED_END] &&
-            !this[DESTROYED] &&
-            this[BUFFER].length === 0 &&
-            this[EOF]) {
-            this[EMITTING_END] = true;
-            this.emit('end');
-            this.emit('prefinish');
-            this.emit('finish');
-            if (this[CLOSED])
-                this.emit('close');
-            this[EMITTING_END] = false;
-        }
-    }
-    /**
-     * Mostly identical to `EventEmitter.emit`, with the following
-     * behavior differences to prevent data loss and unnecessary hangs:
-     *
-     * If the stream has been destroyed, and the event is something other
-     * than 'close' or 'error', then `false` is returned and no handlers
-     * are called.
-     *
-     * If the event is 'end', and has already been emitted, then the event
-     * is ignored. If the stream is in a paused or non-flowing state, then
-     * the event will be deferred until data flow resumes. If the stream is
-     * async, then handlers will be called on the next tick rather than
-     * immediately.
-     *
-     * If the event is 'close', and 'end' has not yet been emitted, then
-     * the event will be deferred until after 'end' is emitted.
-     *
-     * If the event is 'error', and an AbortSignal was provided for the stream,
-     * and there are no listeners, then the event is ignored, matching the
-     * behavior of node core streams in the presense of an AbortSignal.
-     *
-     * If the event is 'finish' or 'prefinish', then all listeners will be
-     * removed after emitting the event, to prevent double-firing.
-     */
-    emit(ev, ...args) {
-        const data = args[0];
-        // error and close are only events allowed after calling destroy()
-        if (ev !== 'error' &&
-            ev !== 'close' &&
-            ev !== DESTROYED &&
-            this[DESTROYED]) {
-            return false;
-        }
-        else if (ev === 'data') {
-            return !this[OBJECTMODE] && !data
-                ? false
-                : this[ASYNC]
-                    ? (defer(() => this[EMITDATA](data)), true)
-                    : this[EMITDATA](data);
-        }
-        else if (ev === 'end') {
-            return this[EMITEND]();
-        }
-        else if (ev === 'close') {
-            this[CLOSED] = true;
-            // don't emit close before 'end' and 'finish'
-            if (!this[EMITTED_END] && !this[DESTROYED])
-                return false;
-            const ret = super.emit('close');
-            this.removeAllListeners('close');
-            return ret;
-        }
-        else if (ev === 'error') {
-            this[EMITTED_ERROR] = data;
-            super.emit(ERROR, data);
-            const ret = !this[SIGNAL] || this.listeners('error').length
-                ? super.emit('error', data)
-                : false;
-            this[MAYBE_EMIT_END]();
-            return ret;
-        }
-        else if (ev === 'resume') {
-            const ret = super.emit('resume');
-            this[MAYBE_EMIT_END]();
-            return ret;
-        }
-        else if (ev === 'finish' || ev === 'prefinish') {
-            const ret = super.emit(ev);
-            this.removeAllListeners(ev);
-            return ret;
-        }
-        // Some other unknown event
-        const ret = super.emit(ev, ...args);
-        this[MAYBE_EMIT_END]();
-        return ret;
-    }
-    [EMITDATA](data) {
-        for (const p of this[PIPES]) {
-            if (p.dest.write(data) === false)
-                this.pause();
-        }
-        const ret = this[DISCARDED] ? false : super.emit('data', data);
-        this[MAYBE_EMIT_END]();
-        return ret;
-    }
-    [EMITEND]() {
-        if (this[EMITTED_END])
-            return false;
-        this[EMITTED_END] = true;
-        this.readable = false;
-        return this[ASYNC]
-            ? (defer(() => this[EMITEND2]()), true)
-            : this[EMITEND2]();
-    }
-    [EMITEND2]() {
-        if (this[DECODER]) {
-            const data = this[DECODER].end();
-            if (data) {
-                for (const p of this[PIPES]) {
-                    p.dest.write(data);
-                }
-                if (!this[DISCARDED])
-                    super.emit('data', data);
-            }
-        }
-        for (const p of this[PIPES]) {
-            p.end();
-        }
-        const ret = super.emit('end');
-        this.removeAllListeners('end');
-        return ret;
-    }
-    /**
-     * Return a Promise that resolves to an array of all emitted data once
-     * the stream ends.
-     */
-    async collect() {
-        const buf = Object.assign([], {
-            dataLength: 0,
-        });
-        if (!this[OBJECTMODE])
-            buf.dataLength = 0;
-        // set the promise first, in case an error is raised
-        // by triggering the flow here.
-        const p = this.promise();
-        this.on('data', c => {
-            buf.push(c);
-            if (!this[OBJECTMODE])
-                buf.dataLength += c.length;
-        });
-        await p;
-        return buf;
-    }
-    /**
-     * Return a Promise that resolves to the concatenation of all emitted data
-     * once the stream ends.
-     *
-     * Not allowed on objectMode streams.
-     */
-    async concat() {
-        if (this[OBJECTMODE]) {
-            throw new Error('cannot concat in objectMode');
-        }
-        const buf = await this.collect();
-        return (this[ENCODING]
-            ? buf.join('')
-            : Buffer.concat(buf, buf.dataLength));
-    }
-    /**
-     * Return a void Promise that resolves once the stream ends.
-     */
-    async promise() {
-        return new Promise((resolve, reject) => {
-            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
-            this.on('error', er => reject(er));
-            this.on('end', () => resolve());
-        });
-    }
-    /**
-     * Asynchronous `for await of` iteration.
-     *
-     * This will continue emitting all chunks until the stream terminates.
-     */
-    [Symbol.asyncIterator]() {
-        // set this up front, in case the consumer doesn't call next()
-        // right away.
-        this[DISCARDED] = false;
-        let stopped = false;
-        const stop = async () => {
-            this.pause();
-            stopped = true;
-            return { value: undefined, done: true };
-        };
-        const next = () => {
-            if (stopped)
-                return stop();
-            const res = this.read();
-            if (res !== null)
-                return Promise.resolve({ done: false, value: res });
-            if (this[EOF])
-                return stop();
-            let resolve;
-            let reject;
-            const onerr = (er) => {
-                this.off('data', ondata);
-                this.off('end', onend);
-                this.off(DESTROYED, ondestroy);
-                stop();
-                reject(er);
-            };
-            const ondata = (value) => {
-                this.off('error', onerr);
-                this.off('end', onend);
-                this.off(DESTROYED, ondestroy);
-                this.pause();
-                resolve({ value, done: !!this[EOF] });
-            };
-            const onend = () => {
-                this.off('error', onerr);
-                this.off('data', ondata);
-                this.off(DESTROYED, ondestroy);
-                stop();
-                resolve({ done: true, value: undefined });
-            };
-            const ondestroy = () => onerr(new Error('stream destroyed'));
-            return new Promise((res, rej) => {
-                reject = rej;
-                resolve = res;
-                this.once(DESTROYED, ondestroy);
-                this.once('error', onerr);
-                this.once('end', onend);
-                this.once('data', ondata);
-            });
-        };
-        return {
-            next,
-            throw: stop,
-            return: stop,
-            [Symbol.asyncIterator]() {
-                return this;
-            },
-        };
-    }
-    /**
-     * Synchronous `for of` iteration.
-     *
-     * The iteration will terminate when the internal buffer runs out, even
-     * if the stream has not yet terminated.
-     */
-    [Symbol.iterator]() {
-        // set this up front, in case the consumer doesn't call next()
-        // right away.
-        this[DISCARDED] = false;
-        let stopped = false;
-        const stop = () => {
-            this.pause();
-            this.off(ERROR, stop);
-            this.off(DESTROYED, stop);
-            this.off('end', stop);
-            stopped = true;
-            return { done: true, value: undefined };
-        };
-        const next = () => {
-            if (stopped)
-                return stop();
-            const value = this.read();
-            return value === null ? stop() : { done: false, value };
-        };
-        this.once('end', stop);
-        this.once(ERROR, stop);
-        this.once(DESTROYED, stop);
-        return {
-            next,
-            throw: stop,
-            return: stop,
-            [Symbol.iterator]() {
-                return this;
-            },
-        };
-    }
-    /**
-     * Destroy a stream, preventing it from being used for any further purpose.
-     *
-     * If the stream has a `close()` method, then it will be called on
-     * destruction.
-     *
-     * After destruction, any attempt to write data, read data, or emit most
-     * events will be ignored.
-     *
-     * If an error argument is provided, then it will be emitted in an
-     * 'error' event.
-     */
-    destroy(er) {
-        if (this[DESTROYED]) {
-            if (er)
-                this.emit('error', er);
-            else
-                this.emit(DESTROYED);
-            return this;
-        }
-        this[DESTROYED] = true;
-        this[DISCARDED] = true;
-        // throw away all buffered data, it's never coming out
-        this[BUFFER].length = 0;
-        this[BUFFERLENGTH] = 0;
-        const wc = this;
-        if (typeof wc.close === 'function' && !this[CLOSED])
-            wc.close();
-        if (er)
-            this.emit('error', er);
-        // if no error to emit, still reject pending promises
-        else
-            this.emit(DESTROYED);
-        return this;
-    }
-    /**
-     * Alias for {@link isStream}
-     *
-     * Former export location, maintained for backwards compatibility.
-     *
-     * @deprecated
-     */
-    static get isStream() {
-        return exports.isStream;
-    }
-}
-exports.Minipass = Minipass;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/package.json b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/index.js b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
deleted file mode 100644
index b65fafbae43a4..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
+++ /dev/null
@@ -1,1018 +0,0 @@
-const proc = typeof process === 'object' && process
-    ? process
-    : {
-        stdout: null,
-        stderr: null,
-    };
-import { EventEmitter } from 'events';
-import Stream from 'stream';
-import { StringDecoder } from 'string_decoder';
-/**
- * Return true if the argument is a Minipass stream, Node stream, or something
- * else that Minipass can interact with.
- */
-export const isStream = (s) => !!s &&
-    typeof s === 'object' &&
-    (s instanceof Minipass ||
-        s instanceof Stream ||
-        isReadable(s) ||
-        isWritable(s));
-/**
- * Return true if the argument is a valid {@link Minipass.Readable}
- */
-export const isReadable = (s) => !!s &&
-    typeof s === 'object' &&
-    s instanceof EventEmitter &&
-    typeof s.pipe === 'function' &&
-    // node core Writable streams have a pipe() method, but it throws
-    s.pipe !== Stream.Writable.prototype.pipe;
-/**
- * Return true if the argument is a valid {@link Minipass.Writable}
- */
-export const isWritable = (s) => !!s &&
-    typeof s === 'object' &&
-    s instanceof EventEmitter &&
-    typeof s.write === 'function' &&
-    typeof s.end === 'function';
-const EOF = Symbol('EOF');
-const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
-const EMITTED_END = Symbol('emittedEnd');
-const EMITTING_END = Symbol('emittingEnd');
-const EMITTED_ERROR = Symbol('emittedError');
-const CLOSED = Symbol('closed');
-const READ = Symbol('read');
-const FLUSH = Symbol('flush');
-const FLUSHCHUNK = Symbol('flushChunk');
-const ENCODING = Symbol('encoding');
-const DECODER = Symbol('decoder');
-const FLOWING = Symbol('flowing');
-const PAUSED = Symbol('paused');
-const RESUME = Symbol('resume');
-const BUFFER = Symbol('buffer');
-const PIPES = Symbol('pipes');
-const BUFFERLENGTH = Symbol('bufferLength');
-const BUFFERPUSH = Symbol('bufferPush');
-const BUFFERSHIFT = Symbol('bufferShift');
-const OBJECTMODE = Symbol('objectMode');
-// internal event when stream is destroyed
-const DESTROYED = Symbol('destroyed');
-// internal event when stream has an error
-const ERROR = Symbol('error');
-const EMITDATA = Symbol('emitData');
-const EMITEND = Symbol('emitEnd');
-const EMITEND2 = Symbol('emitEnd2');
-const ASYNC = Symbol('async');
-const ABORT = Symbol('abort');
-const ABORTED = Symbol('aborted');
-const SIGNAL = Symbol('signal');
-const DATALISTENERS = Symbol('dataListeners');
-const DISCARDED = Symbol('discarded');
-const defer = (fn) => Promise.resolve().then(fn);
-const nodefer = (fn) => fn();
-const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
-const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
-    (!!b &&
-        typeof b === 'object' &&
-        b.constructor &&
-        b.constructor.name === 'ArrayBuffer' &&
-        b.byteLength >= 0);
-const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
-/**
- * Internal class representing a pipe to a destination stream.
- *
- * @internal
- */
-class Pipe {
-    src;
-    dest;
-    opts;
-    ondrain;
-    constructor(src, dest, opts) {
-        this.src = src;
-        this.dest = dest;
-        this.opts = opts;
-        this.ondrain = () => src[RESUME]();
-        this.dest.on('drain', this.ondrain);
-    }
-    unpipe() {
-        this.dest.removeListener('drain', this.ondrain);
-    }
-    // only here for the prototype
-    /* c8 ignore start */
-    proxyErrors(_er) { }
-    /* c8 ignore stop */
-    end() {
-        this.unpipe();
-        if (this.opts.end)
-            this.dest.end();
-    }
-}
-/**
- * Internal class representing a pipe to a destination stream where
- * errors are proxied.
- *
- * @internal
- */
-class PipeProxyErrors extends Pipe {
-    unpipe() {
-        this.src.removeListener('error', this.proxyErrors);
-        super.unpipe();
-    }
-    constructor(src, dest, opts) {
-        super(src, dest, opts);
-        this.proxyErrors = er => dest.emit('error', er);
-        src.on('error', this.proxyErrors);
-    }
-}
-const isObjectModeOptions = (o) => !!o.objectMode;
-const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
-/**
- * Main export, the Minipass class
- *
- * `RType` is the type of data emitted, defaults to Buffer
- *
- * `WType` is the type of data to be written, if RType is buffer or string,
- * then any {@link Minipass.ContiguousData} is allowed.
- *
- * `Events` is the set of event handler signatures that this object
- * will emit, see {@link Minipass.Events}
- */
-export class Minipass extends EventEmitter {
-    [FLOWING] = false;
-    [PAUSED] = false;
-    [PIPES] = [];
-    [BUFFER] = [];
-    [OBJECTMODE];
-    [ENCODING];
-    [ASYNC];
-    [DECODER];
-    [EOF] = false;
-    [EMITTED_END] = false;
-    [EMITTING_END] = false;
-    [CLOSED] = false;
-    [EMITTED_ERROR] = null;
-    [BUFFERLENGTH] = 0;
-    [DESTROYED] = false;
-    [SIGNAL];
-    [ABORTED] = false;
-    [DATALISTENERS] = 0;
-    [DISCARDED] = false;
-    /**
-     * true if the stream can be written
-     */
-    writable = true;
-    /**
-     * true if the stream can be read
-     */
-    readable = true;
-    /**
-     * If `RType` is Buffer, then options do not need to be provided.
-     * Otherwise, an options object must be provided to specify either
-     * {@link Minipass.SharedOptions.objectMode} or
-     * {@link Minipass.SharedOptions.encoding}, as appropriate.
-     */
-    constructor(...args) {
-        const options = (args[0] ||
-            {});
-        super();
-        if (options.objectMode && typeof options.encoding === 'string') {
-            throw new TypeError('Encoding and objectMode may not be used together');
-        }
-        if (isObjectModeOptions(options)) {
-            this[OBJECTMODE] = true;
-            this[ENCODING] = null;
-        }
-        else if (isEncodingOptions(options)) {
-            this[ENCODING] = options.encoding;
-            this[OBJECTMODE] = false;
-        }
-        else {
-            this[OBJECTMODE] = false;
-            this[ENCODING] = null;
-        }
-        this[ASYNC] = !!options.async;
-        this[DECODER] = this[ENCODING]
-            ? new StringDecoder(this[ENCODING])
-            : null;
-        //@ts-ignore - private option for debugging and testing
-        if (options && options.debugExposeBuffer === true) {
-            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
-        }
-        //@ts-ignore - private option for debugging and testing
-        if (options && options.debugExposePipes === true) {
-            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
-        }
-        const { signal } = options;
-        if (signal) {
-            this[SIGNAL] = signal;
-            if (signal.aborted) {
-                this[ABORT]();
-            }
-            else {
-                signal.addEventListener('abort', () => this[ABORT]());
-            }
-        }
-    }
-    /**
-     * The amount of data stored in the buffer waiting to be read.
-     *
-     * For Buffer strings, this will be the total byte length.
-     * For string encoding streams, this will be the string character length,
-     * according to JavaScript's `string.length` logic.
-     * For objectMode streams, this is a count of the items waiting to be
-     * emitted.
-     */
-    get bufferLength() {
-        return this[BUFFERLENGTH];
-    }
-    /**
-     * The `BufferEncoding` currently in use, or `null`
-     */
-    get encoding() {
-        return this[ENCODING];
-    }
-    /**
-     * @deprecated - This is a read only property
-     */
-    set encoding(_enc) {
-        throw new Error('Encoding must be set at instantiation time');
-    }
-    /**
-     * @deprecated - Encoding may only be set at instantiation time
-     */
-    setEncoding(_enc) {
-        throw new Error('Encoding must be set at instantiation time');
-    }
-    /**
-     * True if this is an objectMode stream
-     */
-    get objectMode() {
-        return this[OBJECTMODE];
-    }
-    /**
-     * @deprecated - This is a read-only property
-     */
-    set objectMode(_om) {
-        throw new Error('objectMode must be set at instantiation time');
-    }
-    /**
-     * true if this is an async stream
-     */
-    get ['async']() {
-        return this[ASYNC];
-    }
-    /**
-     * Set to true to make this stream async.
-     *
-     * Once set, it cannot be unset, as this would potentially cause incorrect
-     * behavior.  Ie, a sync stream can be made async, but an async stream
-     * cannot be safely made sync.
-     */
-    set ['async'](a) {
-        this[ASYNC] = this[ASYNC] || !!a;
-    }
-    // drop everything and get out of the flow completely
-    [ABORT]() {
-        this[ABORTED] = true;
-        this.emit('abort', this[SIGNAL]?.reason);
-        this.destroy(this[SIGNAL]?.reason);
-    }
-    /**
-     * True if the stream has been aborted.
-     */
-    get aborted() {
-        return this[ABORTED];
-    }
-    /**
-     * No-op setter. Stream aborted status is set via the AbortSignal provided
-     * in the constructor options.
-     */
-    set aborted(_) { }
-    write(chunk, encoding, cb) {
-        if (this[ABORTED])
-            return false;
-        if (this[EOF])
-            throw new Error('write after end');
-        if (this[DESTROYED]) {
-            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
-            return true;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = 'utf8';
-        }
-        if (!encoding)
-            encoding = 'utf8';
-        const fn = this[ASYNC] ? defer : nodefer;
-        // convert array buffers and typed array views into buffers
-        // at some point in the future, we may want to do the opposite!
-        // leave strings and buffers as-is
-        // anything is only allowed if in object mode, so throw
-        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
-            if (isArrayBufferView(chunk)) {
-                //@ts-ignore - sinful unsafe type changing
-                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
-            }
-            else if (isArrayBufferLike(chunk)) {
-                //@ts-ignore - sinful unsafe type changing
-                chunk = Buffer.from(chunk);
-            }
-            else if (typeof chunk !== 'string') {
-                throw new Error('Non-contiguous data written to non-objectMode stream');
-            }
-        }
-        // handle object mode up front, since it's simpler
-        // this yields better performance, fewer checks later.
-        if (this[OBJECTMODE]) {
-            // maybe impossible?
-            /* c8 ignore start */
-            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
-                this[FLUSH](true);
-            /* c8 ignore stop */
-            if (this[FLOWING])
-                this.emit('data', chunk);
-            else
-                this[BUFFERPUSH](chunk);
-            if (this[BUFFERLENGTH] !== 0)
-                this.emit('readable');
-            if (cb)
-                fn(cb);
-            return this[FLOWING];
-        }
-        // at this point the chunk is a buffer or string
-        // don't buffer it up or send it to the decoder
-        if (!chunk.length) {
-            if (this[BUFFERLENGTH] !== 0)
-                this.emit('readable');
-            if (cb)
-                fn(cb);
-            return this[FLOWING];
-        }
-        // fast-path writing strings of same encoding to a stream with
-        // an empty buffer, skipping the buffer/decoder dance
-        if (typeof chunk === 'string' &&
-            // unless it is a string already ready for us to use
-            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
-            //@ts-ignore - sinful unsafe type change
-            chunk = Buffer.from(chunk, encoding);
-        }
-        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
-            //@ts-ignore - sinful unsafe type change
-            chunk = this[DECODER].write(chunk);
-        }
-        // Note: flushing CAN potentially switch us into not-flowing mode
-        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
-            this[FLUSH](true);
-        if (this[FLOWING])
-            this.emit('data', chunk);
-        else
-            this[BUFFERPUSH](chunk);
-        if (this[BUFFERLENGTH] !== 0)
-            this.emit('readable');
-        if (cb)
-            fn(cb);
-        return this[FLOWING];
-    }
-    /**
-     * Low-level explicit read method.
-     *
-     * In objectMode, the argument is ignored, and one item is returned if
-     * available.
-     *
-     * `n` is the number of bytes (or in the case of encoding streams,
-     * characters) to consume. If `n` is not provided, then the entire buffer
-     * is returned, or `null` is returned if no data is available.
-     *
-     * If `n` is greater that the amount of data in the internal buffer,
-     * then `null` is returned.
-     */
-    read(n) {
-        if (this[DESTROYED])
-            return null;
-        this[DISCARDED] = false;
-        if (this[BUFFERLENGTH] === 0 ||
-            n === 0 ||
-            (n && n > this[BUFFERLENGTH])) {
-            this[MAYBE_EMIT_END]();
-            return null;
-        }
-        if (this[OBJECTMODE])
-            n = null;
-        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
-            // not object mode, so if we have an encoding, then RType is string
-            // otherwise, must be Buffer
-            this[BUFFER] = [
-                (this[ENCODING]
-                    ? this[BUFFER].join('')
-                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
-            ];
-        }
-        const ret = this[READ](n || null, this[BUFFER][0]);
-        this[MAYBE_EMIT_END]();
-        return ret;
-    }
-    [READ](n, chunk) {
-        if (this[OBJECTMODE])
-            this[BUFFERSHIFT]();
-        else {
-            const c = chunk;
-            if (n === c.length || n === null)
-                this[BUFFERSHIFT]();
-            else if (typeof c === 'string') {
-                this[BUFFER][0] = c.slice(n);
-                chunk = c.slice(0, n);
-                this[BUFFERLENGTH] -= n;
-            }
-            else {
-                this[BUFFER][0] = c.subarray(n);
-                chunk = c.subarray(0, n);
-                this[BUFFERLENGTH] -= n;
-            }
-        }
-        this.emit('data', chunk);
-        if (!this[BUFFER].length && !this[EOF])
-            this.emit('drain');
-        return chunk;
-    }
-    end(chunk, encoding, cb) {
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = 'utf8';
-        }
-        if (chunk !== undefined)
-            this.write(chunk, encoding);
-        if (cb)
-            this.once('end', cb);
-        this[EOF] = true;
-        this.writable = false;
-        // if we haven't written anything, then go ahead and emit,
-        // even if we're not reading.
-        // we'll re-emit if a new 'end' listener is added anyway.
-        // This makes MP more suitable to write-only use cases.
-        if (this[FLOWING] || !this[PAUSED])
-            this[MAYBE_EMIT_END]();
-        return this;
-    }
-    // don't let the internal resume be overwritten
-    [RESUME]() {
-        if (this[DESTROYED])
-            return;
-        if (!this[DATALISTENERS] && !this[PIPES].length) {
-            this[DISCARDED] = true;
-        }
-        this[PAUSED] = false;
-        this[FLOWING] = true;
-        this.emit('resume');
-        if (this[BUFFER].length)
-            this[FLUSH]();
-        else if (this[EOF])
-            this[MAYBE_EMIT_END]();
-        else
-            this.emit('drain');
-    }
-    /**
-     * Resume the stream if it is currently in a paused state
-     *
-     * If called when there are no pipe destinations or `data` event listeners,
-     * this will place the stream in a "discarded" state, where all data will
-     * be thrown away. The discarded state is removed if a pipe destination or
-     * data handler is added, if pause() is called, or if any synchronous or
-     * asynchronous iteration is started.
-     */
-    resume() {
-        return this[RESUME]();
-    }
-    /**
-     * Pause the stream
-     */
-    pause() {
-        this[FLOWING] = false;
-        this[PAUSED] = true;
-        this[DISCARDED] = false;
-    }
-    /**
-     * true if the stream has been forcibly destroyed
-     */
-    get destroyed() {
-        return this[DESTROYED];
-    }
-    /**
-     * true if the stream is currently in a flowing state, meaning that
-     * any writes will be immediately emitted.
-     */
-    get flowing() {
-        return this[FLOWING];
-    }
-    /**
-     * true if the stream is currently in a paused state
-     */
-    get paused() {
-        return this[PAUSED];
-    }
-    [BUFFERPUSH](chunk) {
-        if (this[OBJECTMODE])
-            this[BUFFERLENGTH] += 1;
-        else
-            this[BUFFERLENGTH] += chunk.length;
-        this[BUFFER].push(chunk);
-    }
-    [BUFFERSHIFT]() {
-        if (this[OBJECTMODE])
-            this[BUFFERLENGTH] -= 1;
-        else
-            this[BUFFERLENGTH] -= this[BUFFER][0].length;
-        return this[BUFFER].shift();
-    }
-    [FLUSH](noDrain = false) {
-        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
-            this[BUFFER].length);
-        if (!noDrain && !this[BUFFER].length && !this[EOF])
-            this.emit('drain');
-    }
-    [FLUSHCHUNK](chunk) {
-        this.emit('data', chunk);
-        return this[FLOWING];
-    }
-    /**
-     * Pipe all data emitted by this stream into the destination provided.
-     *
-     * Triggers the flow of data.
-     */
-    pipe(dest, opts) {
-        if (this[DESTROYED])
-            return dest;
-        this[DISCARDED] = false;
-        const ended = this[EMITTED_END];
-        opts = opts || {};
-        if (dest === proc.stdout || dest === proc.stderr)
-            opts.end = false;
-        else
-            opts.end = opts.end !== false;
-        opts.proxyErrors = !!opts.proxyErrors;
-        // piping an ended stream ends immediately
-        if (ended) {
-            if (opts.end)
-                dest.end();
-        }
-        else {
-            // "as" here just ignores the WType, which pipes don't care about,
-            // since they're only consuming from us, and writing to the dest
-            this[PIPES].push(!opts.proxyErrors
-                ? new Pipe(this, dest, opts)
-                : new PipeProxyErrors(this, dest, opts));
-            if (this[ASYNC])
-                defer(() => this[RESUME]());
-            else
-                this[RESUME]();
-        }
-        return dest;
-    }
-    /**
-     * Fully unhook a piped destination stream.
-     *
-     * If the destination stream was the only consumer of this stream (ie,
-     * there are no other piped destinations or `'data'` event listeners)
-     * then the flow of data will stop until there is another consumer or
-     * {@link Minipass#resume} is explicitly called.
-     */
-    unpipe(dest) {
-        const p = this[PIPES].find(p => p.dest === dest);
-        if (p) {
-            if (this[PIPES].length === 1) {
-                if (this[FLOWING] && this[DATALISTENERS] === 0) {
-                    this[FLOWING] = false;
-                }
-                this[PIPES] = [];
-            }
-            else
-                this[PIPES].splice(this[PIPES].indexOf(p), 1);
-            p.unpipe();
-        }
-    }
-    /**
-     * Alias for {@link Minipass#on}
-     */
-    addListener(ev, handler) {
-        return this.on(ev, handler);
-    }
-    /**
-     * Mostly identical to `EventEmitter.on`, with the following
-     * behavior differences to prevent data loss and unnecessary hangs:
-     *
-     * - Adding a 'data' event handler will trigger the flow of data
-     *
-     * - Adding a 'readable' event handler when there is data waiting to be read
-     *   will cause 'readable' to be emitted immediately.
-     *
-     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
-     *   already passed will cause the event to be emitted immediately and all
-     *   handlers removed.
-     *
-     * - Adding an 'error' event handler after an error has been emitted will
-     *   cause the event to be re-emitted immediately with the error previously
-     *   raised.
-     */
-    on(ev, handler) {
-        const ret = super.on(ev, handler);
-        if (ev === 'data') {
-            this[DISCARDED] = false;
-            this[DATALISTENERS]++;
-            if (!this[PIPES].length && !this[FLOWING]) {
-                this[RESUME]();
-            }
-        }
-        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
-            super.emit('readable');
-        }
-        else if (isEndish(ev) && this[EMITTED_END]) {
-            super.emit(ev);
-            this.removeAllListeners(ev);
-        }
-        else if (ev === 'error' && this[EMITTED_ERROR]) {
-            const h = handler;
-            if (this[ASYNC])
-                defer(() => h.call(this, this[EMITTED_ERROR]));
-            else
-                h.call(this, this[EMITTED_ERROR]);
-        }
-        return ret;
-    }
-    /**
-     * Alias for {@link Minipass#off}
-     */
-    removeListener(ev, handler) {
-        return this.off(ev, handler);
-    }
-    /**
-     * Mostly identical to `EventEmitter.off`
-     *
-     * If a 'data' event handler is removed, and it was the last consumer
-     * (ie, there are no pipe destinations or other 'data' event listeners),
-     * then the flow of data will stop until there is another consumer or
-     * {@link Minipass#resume} is explicitly called.
-     */
-    off(ev, handler) {
-        const ret = super.off(ev, handler);
-        // if we previously had listeners, and now we don't, and we don't
-        // have any pipes, then stop the flow, unless it's been explicitly
-        // put in a discarded flowing state via stream.resume().
-        if (ev === 'data') {
-            this[DATALISTENERS] = this.listeners('data').length;
-            if (this[DATALISTENERS] === 0 &&
-                !this[DISCARDED] &&
-                !this[PIPES].length) {
-                this[FLOWING] = false;
-            }
-        }
-        return ret;
-    }
-    /**
-     * Mostly identical to `EventEmitter.removeAllListeners`
-     *
-     * If all 'data' event handlers are removed, and they were the last consumer
-     * (ie, there are no pipe destinations), then the flow of data will stop
-     * until there is another consumer or {@link Minipass#resume} is explicitly
-     * called.
-     */
-    removeAllListeners(ev) {
-        const ret = super.removeAllListeners(ev);
-        if (ev === 'data' || ev === undefined) {
-            this[DATALISTENERS] = 0;
-            if (!this[DISCARDED] && !this[PIPES].length) {
-                this[FLOWING] = false;
-            }
-        }
-        return ret;
-    }
-    /**
-     * true if the 'end' event has been emitted
-     */
-    get emittedEnd() {
-        return this[EMITTED_END];
-    }
-    [MAYBE_EMIT_END]() {
-        if (!this[EMITTING_END] &&
-            !this[EMITTED_END] &&
-            !this[DESTROYED] &&
-            this[BUFFER].length === 0 &&
-            this[EOF]) {
-            this[EMITTING_END] = true;
-            this.emit('end');
-            this.emit('prefinish');
-            this.emit('finish');
-            if (this[CLOSED])
-                this.emit('close');
-            this[EMITTING_END] = false;
-        }
-    }
-    /**
-     * Mostly identical to `EventEmitter.emit`, with the following
-     * behavior differences to prevent data loss and unnecessary hangs:
-     *
-     * If the stream has been destroyed, and the event is something other
-     * than 'close' or 'error', then `false` is returned and no handlers
-     * are called.
-     *
-     * If the event is 'end', and has already been emitted, then the event
-     * is ignored. If the stream is in a paused or non-flowing state, then
-     * the event will be deferred until data flow resumes. If the stream is
-     * async, then handlers will be called on the next tick rather than
-     * immediately.
-     *
-     * If the event is 'close', and 'end' has not yet been emitted, then
-     * the event will be deferred until after 'end' is emitted.
-     *
-     * If the event is 'error', and an AbortSignal was provided for the stream,
-     * and there are no listeners, then the event is ignored, matching the
-     * behavior of node core streams in the presense of an AbortSignal.
-     *
-     * If the event is 'finish' or 'prefinish', then all listeners will be
-     * removed after emitting the event, to prevent double-firing.
-     */
-    emit(ev, ...args) {
-        const data = args[0];
-        // error and close are only events allowed after calling destroy()
-        if (ev !== 'error' &&
-            ev !== 'close' &&
-            ev !== DESTROYED &&
-            this[DESTROYED]) {
-            return false;
-        }
-        else if (ev === 'data') {
-            return !this[OBJECTMODE] && !data
-                ? false
-                : this[ASYNC]
-                    ? (defer(() => this[EMITDATA](data)), true)
-                    : this[EMITDATA](data);
-        }
-        else if (ev === 'end') {
-            return this[EMITEND]();
-        }
-        else if (ev === 'close') {
-            this[CLOSED] = true;
-            // don't emit close before 'end' and 'finish'
-            if (!this[EMITTED_END] && !this[DESTROYED])
-                return false;
-            const ret = super.emit('close');
-            this.removeAllListeners('close');
-            return ret;
-        }
-        else if (ev === 'error') {
-            this[EMITTED_ERROR] = data;
-            super.emit(ERROR, data);
-            const ret = !this[SIGNAL] || this.listeners('error').length
-                ? super.emit('error', data)
-                : false;
-            this[MAYBE_EMIT_END]();
-            return ret;
-        }
-        else if (ev === 'resume') {
-            const ret = super.emit('resume');
-            this[MAYBE_EMIT_END]();
-            return ret;
-        }
-        else if (ev === 'finish' || ev === 'prefinish') {
-            const ret = super.emit(ev);
-            this.removeAllListeners(ev);
-            return ret;
-        }
-        // Some other unknown event
-        const ret = super.emit(ev, ...args);
-        this[MAYBE_EMIT_END]();
-        return ret;
-    }
-    [EMITDATA](data) {
-        for (const p of this[PIPES]) {
-            if (p.dest.write(data) === false)
-                this.pause();
-        }
-        const ret = this[DISCARDED] ? false : super.emit('data', data);
-        this[MAYBE_EMIT_END]();
-        return ret;
-    }
-    [EMITEND]() {
-        if (this[EMITTED_END])
-            return false;
-        this[EMITTED_END] = true;
-        this.readable = false;
-        return this[ASYNC]
-            ? (defer(() => this[EMITEND2]()), true)
-            : this[EMITEND2]();
-    }
-    [EMITEND2]() {
-        if (this[DECODER]) {
-            const data = this[DECODER].end();
-            if (data) {
-                for (const p of this[PIPES]) {
-                    p.dest.write(data);
-                }
-                if (!this[DISCARDED])
-                    super.emit('data', data);
-            }
-        }
-        for (const p of this[PIPES]) {
-            p.end();
-        }
-        const ret = super.emit('end');
-        this.removeAllListeners('end');
-        return ret;
-    }
-    /**
-     * Return a Promise that resolves to an array of all emitted data once
-     * the stream ends.
-     */
-    async collect() {
-        const buf = Object.assign([], {
-            dataLength: 0,
-        });
-        if (!this[OBJECTMODE])
-            buf.dataLength = 0;
-        // set the promise first, in case an error is raised
-        // by triggering the flow here.
-        const p = this.promise();
-        this.on('data', c => {
-            buf.push(c);
-            if (!this[OBJECTMODE])
-                buf.dataLength += c.length;
-        });
-        await p;
-        return buf;
-    }
-    /**
-     * Return a Promise that resolves to the concatenation of all emitted data
-     * once the stream ends.
-     *
-     * Not allowed on objectMode streams.
-     */
-    async concat() {
-        if (this[OBJECTMODE]) {
-            throw new Error('cannot concat in objectMode');
-        }
-        const buf = await this.collect();
-        return (this[ENCODING]
-            ? buf.join('')
-            : Buffer.concat(buf, buf.dataLength));
-    }
-    /**
-     * Return a void Promise that resolves once the stream ends.
-     */
-    async promise() {
-        return new Promise((resolve, reject) => {
-            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
-            this.on('error', er => reject(er));
-            this.on('end', () => resolve());
-        });
-    }
-    /**
-     * Asynchronous `for await of` iteration.
-     *
-     * This will continue emitting all chunks until the stream terminates.
-     */
-    [Symbol.asyncIterator]() {
-        // set this up front, in case the consumer doesn't call next()
-        // right away.
-        this[DISCARDED] = false;
-        let stopped = false;
-        const stop = async () => {
-            this.pause();
-            stopped = true;
-            return { value: undefined, done: true };
-        };
-        const next = () => {
-            if (stopped)
-                return stop();
-            const res = this.read();
-            if (res !== null)
-                return Promise.resolve({ done: false, value: res });
-            if (this[EOF])
-                return stop();
-            let resolve;
-            let reject;
-            const onerr = (er) => {
-                this.off('data', ondata);
-                this.off('end', onend);
-                this.off(DESTROYED, ondestroy);
-                stop();
-                reject(er);
-            };
-            const ondata = (value) => {
-                this.off('error', onerr);
-                this.off('end', onend);
-                this.off(DESTROYED, ondestroy);
-                this.pause();
-                resolve({ value, done: !!this[EOF] });
-            };
-            const onend = () => {
-                this.off('error', onerr);
-                this.off('data', ondata);
-                this.off(DESTROYED, ondestroy);
-                stop();
-                resolve({ done: true, value: undefined });
-            };
-            const ondestroy = () => onerr(new Error('stream destroyed'));
-            return new Promise((res, rej) => {
-                reject = rej;
-                resolve = res;
-                this.once(DESTROYED, ondestroy);
-                this.once('error', onerr);
-                this.once('end', onend);
-                this.once('data', ondata);
-            });
-        };
-        return {
-            next,
-            throw: stop,
-            return: stop,
-            [Symbol.asyncIterator]() {
-                return this;
-            },
-        };
-    }
-    /**
-     * Synchronous `for of` iteration.
-     *
-     * The iteration will terminate when the internal buffer runs out, even
-     * if the stream has not yet terminated.
-     */
-    [Symbol.iterator]() {
-        // set this up front, in case the consumer doesn't call next()
-        // right away.
-        this[DISCARDED] = false;
-        let stopped = false;
-        const stop = () => {
-            this.pause();
-            this.off(ERROR, stop);
-            this.off(DESTROYED, stop);
-            this.off('end', stop);
-            stopped = true;
-            return { done: true, value: undefined };
-        };
-        const next = () => {
-            if (stopped)
-                return stop();
-            const value = this.read();
-            return value === null ? stop() : { done: false, value };
-        };
-        this.once('end', stop);
-        this.once(ERROR, stop);
-        this.once(DESTROYED, stop);
-        return {
-            next,
-            throw: stop,
-            return: stop,
-            [Symbol.iterator]() {
-                return this;
-            },
-        };
-    }
-    /**
-     * Destroy a stream, preventing it from being used for any further purpose.
-     *
-     * If the stream has a `close()` method, then it will be called on
-     * destruction.
-     *
-     * After destruction, any attempt to write data, read data, or emit most
-     * events will be ignored.
-     *
-     * If an error argument is provided, then it will be emitted in an
-     * 'error' event.
-     */
-    destroy(er) {
-        if (this[DESTROYED]) {
-            if (er)
-                this.emit('error', er);
-            else
-                this.emit(DESTROYED);
-            return this;
-        }
-        this[DESTROYED] = true;
-        this[DISCARDED] = true;
-        // throw away all buffered data, it's never coming out
-        this[BUFFER].length = 0;
-        this[BUFFERLENGTH] = 0;
-        const wc = this;
-        if (typeof wc.close === 'function' && !this[CLOSED])
-            wc.close();
-        if (er)
-            this.emit('error', er);
-        // if no error to emit, still reject pending promises
-        else
-            this.emit(DESTROYED);
-        return this;
-    }
-    /**
-     * Alias for {@link isStream}
-     *
-     * Former export location, maintained for backwards compatibility.
-     *
-     * @deprecated
-     */
-    static get isStream() {
-        return isStream;
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/package.json b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/package.json b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/package.json
deleted file mode 100644
index 6faaa247a5bc6..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/package.json
+++ /dev/null
@@ -1,82 +0,0 @@
-{
-  "name": "minipass",
-  "version": "7.0.3",
-  "description": "minimal implementation of a PassThrough stream",
-  "main": "./dist/cjs/index.js",
-  "module": "./dist/mjs/index.js",
-  "types": "./dist/cjs/index.js",
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./dist/mjs/index.d.ts",
-        "default": "./dist/mjs/index.js"
-      },
-      "require": {
-        "types": "./dist/cjs/index.d.ts",
-        "default": "./dist/cjs/index.js"
-      }
-    },
-    "./package.json": "./package.json"
-  },
-  "files": [
-    "dist"
-  ],
-  "scripts": {
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "preprepare": "rm -rf dist",
-    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "c8 tap",
-    "snap": "c8 tap",
-    "format": "prettier --write . --loglevel warn",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
-  },
-  "tap": {
-    "coverage": false,
-    "node-arg": [
-      "--enable-source-maps",
-      "--no-warnings",
-      "--loader",
-      "ts-node/esm"
-    ],
-    "ts": false
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "devDependencies": {
-    "@types/node": "^20.1.2",
-    "@types/tap": "^15.0.8",
-    "c8": "^7.13.0",
-    "prettier": "^2.6.2",
-    "tap": "^16.3.0",
-    "ts-node": "^10.9.1",
-    "typedoc": "^0.24.8",
-    "typescript": "^5.1.3",
-    "end-of-stream": "^1.4.0",
-    "node-abort-controller": "^3.1.1",
-    "sync-content": "^1.0.2",
-    "through2": "^2.0.3"
-  },
-  "repository": "https://github.com/isaacs/minipass",
-  "keywords": [
-    "passthrough",
-    "stream"
-  ],
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "ISC",
-  "engines": {
-    "node": ">=16 || 14 >=14.17"
-  }
-}
diff --git a/node_modules/sigstore/node_modules/cacache/package.json b/node_modules/sigstore/node_modules/cacache/package.json
deleted file mode 100644
index ab58cb8b7c50f..0000000000000
--- a/node_modules/sigstore/node_modules/cacache/package.json
+++ /dev/null
@@ -1,82 +0,0 @@
-{
-  "name": "cacache",
-  "version": "17.1.4",
-  "cache-version": {
-    "content": "2",
-    "index": "5"
-  },
-  "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
-  "main": "lib/index.js",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "coverage": "tap",
-    "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
-    "lint": "eslint \"**/*.js\"",
-    "npmclilint": "npmcli-lint",
-    "lintfix": "npm run lint -- --fix",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "posttest": "npm run lint",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/cacache.git"
-  },
-  "keywords": [
-    "cache",
-    "caching",
-    "content-addressable",
-    "sri",
-    "sri hash",
-    "subresource integrity",
-    "cache",
-    "storage",
-    "store",
-    "file store",
-    "filesystem",
-    "disk cache",
-    "disk storage"
-  ],
-  "license": "ISC",
-  "dependencies": {
-    "@npmcli/fs": "^3.1.0",
-    "fs-minipass": "^3.0.0",
-    "glob": "^10.2.2",
-    "lru-cache": "^7.7.1",
-    "minipass": "^7.0.3",
-    "minipass-collect": "^1.0.2",
-    "minipass-flush": "^1.0.5",
-    "minipass-pipeline": "^1.2.4",
-    "p-map": "^4.0.0",
-    "ssri": "^10.0.0",
-    "tar": "^6.1.11",
-    "unique-filename": "^3.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.18.0",
-    "tap": "^16.0.0"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "windowsCI": false,
-    "version": "4.18.0",
-    "publish": "true"
-  },
-  "author": "GitHub Inc.",
-  "tap": {
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  }
-}
diff --git a/node_modules/sigstore/node_modules/lru-cache/LICENSE b/node_modules/sigstore/node_modules/lru-cache/LICENSE
deleted file mode 100644
index f785757cd63f8..0000000000000
--- a/node_modules/sigstore/node_modules/lru-cache/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/lru-cache/index.js b/node_modules/sigstore/node_modules/lru-cache/index.js
deleted file mode 100644
index 48e99fe5e5a70..0000000000000
--- a/node_modules/sigstore/node_modules/lru-cache/index.js
+++ /dev/null
@@ -1,1227 +0,0 @@
-const perf =
-  typeof performance === 'object' &&
-  performance &&
-  typeof performance.now === 'function'
-    ? performance
-    : Date
-
-const hasAbortController = typeof AbortController === 'function'
-
-// minimal backwards-compatibility polyfill
-// this doesn't have nearly all the checks and whatnot that
-// actual AbortController/Signal has, but it's enough for
-// our purposes, and if used properly, behaves the same.
-const AC = hasAbortController
-  ? AbortController
-  : class AbortController {
-      constructor() {
-        this.signal = new AS()
-      }
-      abort(reason = new Error('This operation was aborted')) {
-        this.signal.reason = this.signal.reason || reason
-        this.signal.aborted = true
-        this.signal.dispatchEvent({
-          type: 'abort',
-          target: this.signal,
-        })
-      }
-    }
-
-const hasAbortSignal = typeof AbortSignal === 'function'
-// Some polyfills put this on the AC class, not global
-const hasACAbortSignal = typeof AC.AbortSignal === 'function'
-const AS = hasAbortSignal
-  ? AbortSignal
-  : hasACAbortSignal
-  ? AC.AbortController
-  : class AbortSignal {
-      constructor() {
-        this.reason = undefined
-        this.aborted = false
-        this._listeners = []
-      }
-      dispatchEvent(e) {
-        if (e.type === 'abort') {
-          this.aborted = true
-          this.onabort(e)
-          this._listeners.forEach(f => f(e), this)
-        }
-      }
-      onabort() {}
-      addEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners.push(fn)
-        }
-      }
-      removeEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners = this._listeners.filter(f => f !== fn)
-        }
-      }
-    }
-
-const warned = new Set()
-const deprecatedOption = (opt, instead) => {
-  const code = `LRU_CACHE_OPTION_${opt}`
-  if (shouldWarn(code)) {
-    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
-  }
-}
-const deprecatedMethod = (method, instead) => {
-  const code = `LRU_CACHE_METHOD_${method}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
-    warn(code, `${method} method`, `cache.${instead}()`, get)
-  }
-}
-const deprecatedProperty = (field, instead) => {
-  const code = `LRU_CACHE_PROPERTY_${field}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
-    warn(code, `${field} property`, `cache.${instead}`, get)
-  }
-}
-
-const emitWarning = (...a) => {
-  typeof process === 'object' &&
-  process &&
-  typeof process.emitWarning === 'function'
-    ? process.emitWarning(...a)
-    : console.error(...a)
-}
-
-const shouldWarn = code => !warned.has(code)
-
-const warn = (code, what, instead, fn) => {
-  warned.add(code)
-  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
-  emitWarning(msg, 'DeprecationWarning', code, fn)
-}
-
-const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
-
-/* istanbul ignore next - This is a little bit ridiculous, tbh.
- * The maximum array length is 2^32-1 or thereabouts on most JS impls.
- * And well before that point, you're caching the entire world, I mean,
- * that's ~32GB of just integers for the next/prev links, plus whatever
- * else to hold that many keys and values.  Just filling the memory with
- * zeroes at init time is brutal when you get that big.
- * But why not be complete?
- * Maybe in the future, these limits will have expanded. */
-const getUintArray = max =>
-  !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-    ? Uint8Array
-    : max <= Math.pow(2, 16)
-    ? Uint16Array
-    : max <= Math.pow(2, 32)
-    ? Uint32Array
-    : max <= Number.MAX_SAFE_INTEGER
-    ? ZeroArray
-    : null
-
-class ZeroArray extends Array {
-  constructor(size) {
-    super(size)
-    this.fill(0)
-  }
-}
-
-class Stack {
-  constructor(max) {
-    if (max === 0) {
-      return []
-    }
-    const UintArray = getUintArray(max)
-    this.heap = new UintArray(max)
-    this.length = 0
-  }
-  push(n) {
-    this.heap[this.length++] = n
-  }
-  pop() {
-    return this.heap[--this.length]
-  }
-}
-
-class LRUCache {
-  constructor(options = {}) {
-    const {
-      max = 0,
-      ttl,
-      ttlResolution = 1,
-      ttlAutopurge,
-      updateAgeOnGet,
-      updateAgeOnHas,
-      allowStale,
-      dispose,
-      disposeAfter,
-      noDisposeOnSet,
-      noUpdateTTL,
-      maxSize = 0,
-      maxEntrySize = 0,
-      sizeCalculation,
-      fetchMethod,
-      fetchContext,
-      noDeleteOnFetchRejection,
-      noDeleteOnStaleGet,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-    } = options
-
-    // deprecated options, don't trigger a warning for getting them if
-    // the thing being passed in is another LRUCache we're copying.
-    const { length, maxAge, stale } =
-      options instanceof LRUCache ? {} : options
-
-    if (max !== 0 && !isPosInt(max)) {
-      throw new TypeError('max option must be a nonnegative integer')
-    }
-
-    const UintArray = max ? getUintArray(max) : Array
-    if (!UintArray) {
-      throw new Error('invalid max value: ' + max)
-    }
-
-    this.max = max
-    this.maxSize = maxSize
-    this.maxEntrySize = maxEntrySize || this.maxSize
-    this.sizeCalculation = sizeCalculation || length
-    if (this.sizeCalculation) {
-      if (!this.maxSize && !this.maxEntrySize) {
-        throw new TypeError(
-          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
-        )
-      }
-      if (typeof this.sizeCalculation !== 'function') {
-        throw new TypeError('sizeCalculation set to non-function')
-      }
-    }
-
-    this.fetchMethod = fetchMethod || null
-    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
-      throw new TypeError(
-        'fetchMethod must be a function if specified'
-      )
-    }
-
-    this.fetchContext = fetchContext
-    if (!this.fetchMethod && fetchContext !== undefined) {
-      throw new TypeError(
-        'cannot set fetchContext without fetchMethod'
-      )
-    }
-
-    this.keyMap = new Map()
-    this.keyList = new Array(max).fill(null)
-    this.valList = new Array(max).fill(null)
-    this.next = new UintArray(max)
-    this.prev = new UintArray(max)
-    this.head = 0
-    this.tail = 0
-    this.free = new Stack(max)
-    this.initialFill = 1
-    this.size = 0
-
-    if (typeof dispose === 'function') {
-      this.dispose = dispose
-    }
-    if (typeof disposeAfter === 'function') {
-      this.disposeAfter = disposeAfter
-      this.disposed = []
-    } else {
-      this.disposeAfter = null
-      this.disposed = null
-    }
-    this.noDisposeOnSet = !!noDisposeOnSet
-    this.noUpdateTTL = !!noUpdateTTL
-    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
-    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
-    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
-    this.ignoreFetchAbort = !!ignoreFetchAbort
-
-    // NB: maxEntrySize is set to maxSize if it's set
-    if (this.maxEntrySize !== 0) {
-      if (this.maxSize !== 0) {
-        if (!isPosInt(this.maxSize)) {
-          throw new TypeError(
-            'maxSize must be a positive integer if specified'
-          )
-        }
-      }
-      if (!isPosInt(this.maxEntrySize)) {
-        throw new TypeError(
-          'maxEntrySize must be a positive integer if specified'
-        )
-      }
-      this.initializeSizeTracking()
-    }
-
-    this.allowStale = !!allowStale || !!stale
-    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
-    this.updateAgeOnGet = !!updateAgeOnGet
-    this.updateAgeOnHas = !!updateAgeOnHas
-    this.ttlResolution =
-      isPosInt(ttlResolution) || ttlResolution === 0
-        ? ttlResolution
-        : 1
-    this.ttlAutopurge = !!ttlAutopurge
-    this.ttl = ttl || maxAge || 0
-    if (this.ttl) {
-      if (!isPosInt(this.ttl)) {
-        throw new TypeError(
-          'ttl must be a positive integer if specified'
-        )
-      }
-      this.initializeTTLTracking()
-    }
-
-    // do not allow completely unbounded caches
-    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
-      throw new TypeError(
-        'At least one of max, maxSize, or ttl is required'
-      )
-    }
-    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
-      const code = 'LRU_CACHE_UNBOUNDED'
-      if (shouldWarn(code)) {
-        warned.add(code)
-        const msg =
-          'TTL caching without ttlAutopurge, max, or maxSize can ' +
-          'result in unbounded memory consumption.'
-        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
-      }
-    }
-
-    if (stale) {
-      deprecatedOption('stale', 'allowStale')
-    }
-    if (maxAge) {
-      deprecatedOption('maxAge', 'ttl')
-    }
-    if (length) {
-      deprecatedOption('length', 'sizeCalculation')
-    }
-  }
-
-  getRemainingTTL(key) {
-    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
-  }
-
-  initializeTTLTracking() {
-    this.ttls = new ZeroArray(this.max)
-    this.starts = new ZeroArray(this.max)
-
-    this.setItemTTL = (index, ttl, start = perf.now()) => {
-      this.starts[index] = ttl !== 0 ? start : 0
-      this.ttls[index] = ttl
-      if (ttl !== 0 && this.ttlAutopurge) {
-        const t = setTimeout(() => {
-          if (this.isStale(index)) {
-            this.delete(this.keyList[index])
-          }
-        }, ttl + 1)
-        /* istanbul ignore else - unref() not supported on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-    }
-
-    this.updateItemAge = index => {
-      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
-    }
-
-    this.statusTTL = (status, index) => {
-      if (status) {
-        status.ttl = this.ttls[index]
-        status.start = this.starts[index]
-        status.now = cachedNow || getNow()
-        status.remainingTTL = status.now + status.ttl - status.start
-      }
-    }
-
-    // debounce calls to perf.now() to 1s so we're not hitting
-    // that costly call repeatedly.
-    let cachedNow = 0
-    const getNow = () => {
-      const n = perf.now()
-      if (this.ttlResolution > 0) {
-        cachedNow = n
-        const t = setTimeout(
-          () => (cachedNow = 0),
-          this.ttlResolution
-        )
-        /* istanbul ignore else - not available on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-      return n
-    }
-
-    this.getRemainingTTL = key => {
-      const index = this.keyMap.get(key)
-      if (index === undefined) {
-        return 0
-      }
-      return this.ttls[index] === 0 || this.starts[index] === 0
-        ? Infinity
-        : this.starts[index] +
-            this.ttls[index] -
-            (cachedNow || getNow())
-    }
-
-    this.isStale = index => {
-      return (
-        this.ttls[index] !== 0 &&
-        this.starts[index] !== 0 &&
-        (cachedNow || getNow()) - this.starts[index] >
-          this.ttls[index]
-      )
-    }
-  }
-  updateItemAge(_index) {}
-  statusTTL(_status, _index) {}
-  setItemTTL(_index, _ttl, _start) {}
-  isStale(_index) {
-    return false
-  }
-
-  initializeSizeTracking() {
-    this.calculatedSize = 0
-    this.sizes = new ZeroArray(this.max)
-    this.removeItemSize = index => {
-      this.calculatedSize -= this.sizes[index]
-      this.sizes[index] = 0
-    }
-    this.requireSize = (k, v, size, sizeCalculation) => {
-      // provisionally accept background fetches.
-      // actual value size will be checked when they return.
-      if (this.isBackgroundFetch(v)) {
-        return 0
-      }
-      if (!isPosInt(size)) {
-        if (sizeCalculation) {
-          if (typeof sizeCalculation !== 'function') {
-            throw new TypeError('sizeCalculation must be a function')
-          }
-          size = sizeCalculation(v, k)
-          if (!isPosInt(size)) {
-            throw new TypeError(
-              'sizeCalculation return invalid (expect positive integer)'
-            )
-          }
-        } else {
-          throw new TypeError(
-            'invalid size value (must be positive integer). ' +
-              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
-              'must be set.'
-          )
-        }
-      }
-      return size
-    }
-    this.addItemSize = (index, size, status) => {
-      this.sizes[index] = size
-      if (this.maxSize) {
-        const maxSize = this.maxSize - this.sizes[index]
-        while (this.calculatedSize > maxSize) {
-          this.evict(true)
-        }
-      }
-      this.calculatedSize += this.sizes[index]
-      if (status) {
-        status.entrySize = size
-        status.totalCalculatedSize = this.calculatedSize
-      }
-    }
-  }
-  removeItemSize(_index) {}
-  addItemSize(_index, _size) {}
-  requireSize(_k, _v, size, sizeCalculation) {
-    if (size || sizeCalculation) {
-      throw new TypeError(
-        'cannot set size without setting maxSize or maxEntrySize on cache'
-      )
-    }
-  }
-
-  *indexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.tail; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.head) {
-          break
-        } else {
-          i = this.prev[i]
-        }
-      }
-    }
-  }
-
-  *rindexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.head; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.tail) {
-          break
-        } else {
-          i = this.next[i]
-        }
-      }
-    }
-  }
-
-  isValidIndex(index) {
-    return (
-      index !== undefined &&
-      this.keyMap.get(this.keyList[index]) === index
-    )
-  }
-
-  *entries() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-  *rentries() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-
-  *keys() {
-    for (const i of this.indexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-  *rkeys() {
-    for (const i of this.rindexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-
-  *values() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-  *rvalues() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-
-  [Symbol.iterator]() {
-    return this.entries()
-  }
-
-  find(fn, getOptions) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      if (fn(value, this.keyList[i], this)) {
-        return this.get(this.keyList[i], getOptions)
-      }
-    }
-  }
-
-  forEach(fn, thisp = this) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  rforEach(fn, thisp = this) {
-    for (const i of this.rindexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  get prune() {
-    deprecatedMethod('prune', 'purgeStale')
-    return this.purgeStale
-  }
-
-  purgeStale() {
-    let deleted = false
-    for (const i of this.rindexes({ allowStale: true })) {
-      if (this.isStale(i)) {
-        this.delete(this.keyList[i])
-        deleted = true
-      }
-    }
-    return deleted
-  }
-
-  dump() {
-    const arr = []
-    for (const i of this.indexes({ allowStale: true })) {
-      const key = this.keyList[i]
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      const entry = { value }
-      if (this.ttls) {
-        entry.ttl = this.ttls[i]
-        // always dump the start relative to a portable timestamp
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = perf.now() - this.starts[i]
-        entry.start = Math.floor(Date.now() - age)
-      }
-      if (this.sizes) {
-        entry.size = this.sizes[i]
-      }
-      arr.unshift([key, entry])
-    }
-    return arr
-  }
-
-  load(arr) {
-    this.clear()
-    for (const [key, entry] of arr) {
-      if (entry.start) {
-        // entry.start is a portable timestamp, but we may be using
-        // node's performance.now(), so calculate the offset.
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = Date.now() - entry.start
-        entry.start = perf.now() - age
-      }
-      this.set(key, entry.value, entry)
-    }
-  }
-
-  dispose(_v, _k, _reason) {}
-
-  set(
-    k,
-    v,
-    {
-      ttl = this.ttl,
-      start,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      status,
-    } = {}
-  ) {
-    size = this.requireSize(k, v, size, sizeCalculation)
-    // if the item doesn't fit, don't do anything
-    // NB: maxEntrySize set to maxSize by default
-    if (this.maxEntrySize && size > this.maxEntrySize) {
-      if (status) {
-        status.set = 'miss'
-        status.maxEntrySizeExceeded = true
-      }
-      // have to delete, in case a background fetch is there already.
-      // in non-async cases, this is a no-op
-      this.delete(k)
-      return this
-    }
-    let index = this.size === 0 ? undefined : this.keyMap.get(k)
-    if (index === undefined) {
-      // addition
-      index = this.newIndex()
-      this.keyList[index] = k
-      this.valList[index] = v
-      this.keyMap.set(k, index)
-      this.next[this.tail] = index
-      this.prev[index] = this.tail
-      this.tail = index
-      this.size++
-      this.addItemSize(index, size, status)
-      if (status) {
-        status.set = 'add'
-      }
-      noUpdateTTL = false
-    } else {
-      // update
-      this.moveToTail(index)
-      const oldVal = this.valList[index]
-      if (v !== oldVal) {
-        if (this.isBackgroundFetch(oldVal)) {
-          oldVal.__abortController.abort(new Error('replaced'))
-        } else {
-          if (!noDisposeOnSet) {
-            this.dispose(oldVal, k, 'set')
-            if (this.disposeAfter) {
-              this.disposed.push([oldVal, k, 'set'])
-            }
-          }
-        }
-        this.removeItemSize(index)
-        this.valList[index] = v
-        this.addItemSize(index, size, status)
-        if (status) {
-          status.set = 'replace'
-          const oldValue =
-            oldVal && this.isBackgroundFetch(oldVal)
-              ? oldVal.__staleWhileFetching
-              : oldVal
-          if (oldValue !== undefined) status.oldValue = oldValue
-        }
-      } else if (status) {
-        status.set = 'update'
-      }
-    }
-    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
-      this.initializeTTLTracking()
-    }
-    if (!noUpdateTTL) {
-      this.setItemTTL(index, ttl, start)
-    }
-    this.statusTTL(status, index)
-    if (this.disposeAfter) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return this
-  }
-
-  newIndex() {
-    if (this.size === 0) {
-      return this.tail
-    }
-    if (this.size === this.max && this.max !== 0) {
-      return this.evict(false)
-    }
-    if (this.free.length !== 0) {
-      return this.free.pop()
-    }
-    // initial fill, just keep writing down the list
-    return this.initialFill++
-  }
-
-  pop() {
-    if (this.size) {
-      const val = this.valList[this.head]
-      this.evict(true)
-      return val
-    }
-  }
-
-  evict(free) {
-    const head = this.head
-    const k = this.keyList[head]
-    const v = this.valList[head]
-    if (this.isBackgroundFetch(v)) {
-      v.__abortController.abort(new Error('evicted'))
-    } else {
-      this.dispose(v, k, 'evict')
-      if (this.disposeAfter) {
-        this.disposed.push([v, k, 'evict'])
-      }
-    }
-    this.removeItemSize(head)
-    // if we aren't about to use the index, then null these out
-    if (free) {
-      this.keyList[head] = null
-      this.valList[head] = null
-      this.free.push(head)
-    }
-    this.head = this.next[head]
-    this.keyMap.delete(k)
-    this.size--
-    return head
-  }
-
-  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      if (!this.isStale(index)) {
-        if (updateAgeOnHas) {
-          this.updateItemAge(index)
-        }
-        if (status) status.has = 'hit'
-        this.statusTTL(status, index)
-        return true
-      } else if (status) {
-        status.has = 'stale'
-        this.statusTTL(status, index)
-      }
-    } else if (status) {
-      status.has = 'miss'
-    }
-    return false
-  }
-
-  // like get(), but without any LRU updating or TTL expiration
-  peek(k, { allowStale = this.allowStale } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined && (allowStale || !this.isStale(index))) {
-      const v = this.valList[index]
-      // either stale and allowed, or forcing a refresh of non-stale value
-      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
-    }
-  }
-
-  backgroundFetch(k, index, options, context) {
-    const v = index === undefined ? undefined : this.valList[index]
-    if (this.isBackgroundFetch(v)) {
-      return v
-    }
-    const ac = new AC()
-    if (options.signal) {
-      options.signal.addEventListener('abort', () =>
-        ac.abort(options.signal.reason)
-      )
-    }
-    const fetchOpts = {
-      signal: ac.signal,
-      options,
-      context,
-    }
-    const cb = (v, updateCache = false) => {
-      const { aborted } = ac.signal
-      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
-      if (options.status) {
-        if (aborted && !updateCache) {
-          options.status.fetchAborted = true
-          options.status.fetchError = ac.signal.reason
-          if (ignoreAbort) options.status.fetchAbortIgnored = true
-        } else {
-          options.status.fetchResolved = true
-        }
-      }
-      if (aborted && !ignoreAbort && !updateCache) {
-        return fetchFail(ac.signal.reason)
-      }
-      // either we didn't abort, and are still here, or we did, and ignored
-      if (this.valList[index] === p) {
-        if (v === undefined) {
-          if (p.__staleWhileFetching) {
-            this.valList[index] = p.__staleWhileFetching
-          } else {
-            this.delete(k)
-          }
-        } else {
-          if (options.status) options.status.fetchUpdated = true
-          this.set(k, v, fetchOpts.options)
-        }
-      }
-      return v
-    }
-    const eb = er => {
-      if (options.status) {
-        options.status.fetchRejected = true
-        options.status.fetchError = er
-      }
-      return fetchFail(er)
-    }
-    const fetchFail = er => {
-      const { aborted } = ac.signal
-      const allowStaleAborted =
-        aborted && options.allowStaleOnFetchAbort
-      const allowStale =
-        allowStaleAborted || options.allowStaleOnFetchRejection
-      const noDelete = allowStale || options.noDeleteOnFetchRejection
-      if (this.valList[index] === p) {
-        // if we allow stale on fetch rejections, then we need to ensure that
-        // the stale value is not removed from the cache when the fetch fails.
-        const del = !noDelete || p.__staleWhileFetching === undefined
-        if (del) {
-          this.delete(k)
-        } else if (!allowStaleAborted) {
-          // still replace the *promise* with the stale value,
-          // since we are done with the promise at this point.
-          // leave it untouched if we're still waiting for an
-          // aborted background fetch that hasn't yet returned.
-          this.valList[index] = p.__staleWhileFetching
-        }
-      }
-      if (allowStale) {
-        if (options.status && p.__staleWhileFetching !== undefined) {
-          options.status.returnedStale = true
-        }
-        return p.__staleWhileFetching
-      } else if (p.__returned === p) {
-        throw er
-      }
-    }
-    const pcall = (res, rej) => {
-      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
-      // ignored, we go until we finish, regardless.
-      // defer check until we are actually aborting,
-      // so fetchMethod can override.
-      ac.signal.addEventListener('abort', () => {
-        if (
-          !options.ignoreFetchAbort ||
-          options.allowStaleOnFetchAbort
-        ) {
-          res()
-          // when it eventually resolves, update the cache.
-          if (options.allowStaleOnFetchAbort) {
-            res = v => cb(v, true)
-          }
-        }
-      })
-    }
-    if (options.status) options.status.fetchDispatched = true
-    const p = new Promise(pcall).then(cb, eb)
-    p.__abortController = ac
-    p.__staleWhileFetching = v
-    p.__returned = null
-    if (index === undefined) {
-      // internal, don't expose status.
-      this.set(k, p, { ...fetchOpts.options, status: undefined })
-      index = this.keyMap.get(k)
-    } else {
-      this.valList[index] = p
-    }
-    return p
-  }
-
-  isBackgroundFetch(p) {
-    return (
-      p &&
-      typeof p === 'object' &&
-      typeof p.then === 'function' &&
-      Object.prototype.hasOwnProperty.call(
-        p,
-        '__staleWhileFetching'
-      ) &&
-      Object.prototype.hasOwnProperty.call(p, '__returned') &&
-      (p.__returned === p || p.__returned === null)
-    )
-  }
-
-  // this takes the union of get() and set() opts, because it does both
-  async fetch(
-    k,
-    {
-      // get options
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      // set options
-      ttl = this.ttl,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      // fetch exclusive options
-      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
-      ignoreFetchAbort = this.ignoreFetchAbort,
-      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
-      fetchContext = this.fetchContext,
-      forceRefresh = false,
-      status,
-      signal,
-    } = {}
-  ) {
-    if (!this.fetchMethod) {
-      if (status) status.fetch = 'get'
-      return this.get(k, {
-        allowStale,
-        updateAgeOnGet,
-        noDeleteOnStaleGet,
-        status,
-      })
-    }
-
-    const options = {
-      allowStale,
-      updateAgeOnGet,
-      noDeleteOnStaleGet,
-      ttl,
-      noDisposeOnSet,
-      size,
-      sizeCalculation,
-      noUpdateTTL,
-      noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-      status,
-      signal,
-    }
-
-    let index = this.keyMap.get(k)
-    if (index === undefined) {
-      if (status) status.fetch = 'miss'
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      return (p.__returned = p)
-    } else {
-      // in cache, maybe already fetching
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        const stale =
-          allowStale && v.__staleWhileFetching !== undefined
-        if (status) {
-          status.fetch = 'inflight'
-          if (stale) status.returnedStale = true
-        }
-        return stale ? v.__staleWhileFetching : (v.__returned = v)
-      }
-
-      // if we force a refresh, that means do NOT serve the cached value,
-      // unless we are already in the process of refreshing the cache.
-      const isStale = this.isStale(index)
-      if (!forceRefresh && !isStale) {
-        if (status) status.fetch = 'hit'
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        this.statusTTL(status, index)
-        return v
-      }
-
-      // ok, it is stale or a forced refresh, and not already fetching.
-      // refresh the cache.
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      const hasStale = p.__staleWhileFetching !== undefined
-      const staleVal = hasStale && allowStale
-      if (status) {
-        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
-        if (staleVal && isStale) status.returnedStale = true
-      }
-      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
-    }
-  }
-
-  get(
-    k,
-    {
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      status,
-    } = {}
-  ) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      const value = this.valList[index]
-      const fetching = this.isBackgroundFetch(value)
-      this.statusTTL(status, index)
-      if (this.isStale(index)) {
-        if (status) status.get = 'stale'
-        // delete only if not an in-flight background fetch
-        if (!fetching) {
-          if (!noDeleteOnStaleGet) {
-            this.delete(k)
-          }
-          if (status) status.returnedStale = allowStale
-          return allowStale ? value : undefined
-        } else {
-          if (status) {
-            status.returnedStale =
-              allowStale && value.__staleWhileFetching !== undefined
-          }
-          return allowStale ? value.__staleWhileFetching : undefined
-        }
-      } else {
-        if (status) status.get = 'hit'
-        // if we're currently fetching it, we don't actually have it yet
-        // it's not stale, which means this isn't a staleWhileRefetching.
-        // If it's not stale, and fetching, AND has a __staleWhileFetching
-        // value, then that means the user fetched with {forceRefresh:true},
-        // so it's safe to return that value.
-        if (fetching) {
-          return value.__staleWhileFetching
-        }
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        return value
-      }
-    } else if (status) {
-      status.get = 'miss'
-    }
-  }
-
-  connect(p, n) {
-    this.prev[n] = p
-    this.next[p] = n
-  }
-
-  moveToTail(index) {
-    // if tail already, nothing to do
-    // if head, move head to next[index]
-    // else
-    //   move next[prev[index]] to next[index] (head has no prev)
-    //   move prev[next[index]] to prev[index]
-    // prev[index] = tail
-    // next[tail] = index
-    // tail = index
-    if (index !== this.tail) {
-      if (index === this.head) {
-        this.head = this.next[index]
-      } else {
-        this.connect(this.prev[index], this.next[index])
-      }
-      this.connect(this.tail, index)
-      this.tail = index
-    }
-  }
-
-  get del() {
-    deprecatedMethod('del', 'delete')
-    return this.delete
-  }
-
-  delete(k) {
-    let deleted = false
-    if (this.size !== 0) {
-      const index = this.keyMap.get(k)
-      if (index !== undefined) {
-        deleted = true
-        if (this.size === 1) {
-          this.clear()
-        } else {
-          this.removeItemSize(index)
-          const v = this.valList[index]
-          if (this.isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('deleted'))
-          } else {
-            this.dispose(v, k, 'delete')
-            if (this.disposeAfter) {
-              this.disposed.push([v, k, 'delete'])
-            }
-          }
-          this.keyMap.delete(k)
-          this.keyList[index] = null
-          this.valList[index] = null
-          if (index === this.tail) {
-            this.tail = this.prev[index]
-          } else if (index === this.head) {
-            this.head = this.next[index]
-          } else {
-            this.next[this.prev[index]] = this.next[index]
-            this.prev[this.next[index]] = this.prev[index]
-          }
-          this.size--
-          this.free.push(index)
-        }
-      }
-    }
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return deleted
-  }
-
-  clear() {
-    for (const index of this.rindexes({ allowStale: true })) {
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        v.__abortController.abort(new Error('deleted'))
-      } else {
-        const k = this.keyList[index]
-        this.dispose(v, k, 'delete')
-        if (this.disposeAfter) {
-          this.disposed.push([v, k, 'delete'])
-        }
-      }
-    }
-
-    this.keyMap.clear()
-    this.valList.fill(null)
-    this.keyList.fill(null)
-    if (this.ttls) {
-      this.ttls.fill(0)
-      this.starts.fill(0)
-    }
-    if (this.sizes) {
-      this.sizes.fill(0)
-    }
-    this.head = 0
-    this.tail = 0
-    this.initialFill = 1
-    this.free.length = 0
-    this.calculatedSize = 0
-    this.size = 0
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-  }
-
-  get reset() {
-    deprecatedMethod('reset', 'clear')
-    return this.clear
-  }
-
-  get length() {
-    deprecatedProperty('length', 'size')
-    return this.size
-  }
-
-  static get AbortController() {
-    return AC
-  }
-  static get AbortSignal() {
-    return AS
-  }
-}
-
-module.exports = LRUCache
diff --git a/node_modules/sigstore/node_modules/lru-cache/index.mjs b/node_modules/sigstore/node_modules/lru-cache/index.mjs
deleted file mode 100644
index 4a0b4813ec515..0000000000000
--- a/node_modules/sigstore/node_modules/lru-cache/index.mjs
+++ /dev/null
@@ -1,1227 +0,0 @@
-const perf =
-  typeof performance === 'object' &&
-  performance &&
-  typeof performance.now === 'function'
-    ? performance
-    : Date
-
-const hasAbortController = typeof AbortController === 'function'
-
-// minimal backwards-compatibility polyfill
-// this doesn't have nearly all the checks and whatnot that
-// actual AbortController/Signal has, but it's enough for
-// our purposes, and if used properly, behaves the same.
-const AC = hasAbortController
-  ? AbortController
-  : class AbortController {
-      constructor() {
-        this.signal = new AS()
-      }
-      abort(reason = new Error('This operation was aborted')) {
-        this.signal.reason = this.signal.reason || reason
-        this.signal.aborted = true
-        this.signal.dispatchEvent({
-          type: 'abort',
-          target: this.signal,
-        })
-      }
-    }
-
-const hasAbortSignal = typeof AbortSignal === 'function'
-// Some polyfills put this on the AC class, not global
-const hasACAbortSignal = typeof AC.AbortSignal === 'function'
-const AS = hasAbortSignal
-  ? AbortSignal
-  : hasACAbortSignal
-  ? AC.AbortController
-  : class AbortSignal {
-      constructor() {
-        this.reason = undefined
-        this.aborted = false
-        this._listeners = []
-      }
-      dispatchEvent(e) {
-        if (e.type === 'abort') {
-          this.aborted = true
-          this.onabort(e)
-          this._listeners.forEach(f => f(e), this)
-        }
-      }
-      onabort() {}
-      addEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners.push(fn)
-        }
-      }
-      removeEventListener(ev, fn) {
-        if (ev === 'abort') {
-          this._listeners = this._listeners.filter(f => f !== fn)
-        }
-      }
-    }
-
-const warned = new Set()
-const deprecatedOption = (opt, instead) => {
-  const code = `LRU_CACHE_OPTION_${opt}`
-  if (shouldWarn(code)) {
-    warn(code, `${opt} option`, `options.${instead}`, LRUCache)
-  }
-}
-const deprecatedMethod = (method, instead) => {
-  const code = `LRU_CACHE_METHOD_${method}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, method)
-    warn(code, `${method} method`, `cache.${instead}()`, get)
-  }
-}
-const deprecatedProperty = (field, instead) => {
-  const code = `LRU_CACHE_PROPERTY_${field}`
-  if (shouldWarn(code)) {
-    const { prototype } = LRUCache
-    const { get } = Object.getOwnPropertyDescriptor(prototype, field)
-    warn(code, `${field} property`, `cache.${instead}`, get)
-  }
-}
-
-const emitWarning = (...a) => {
-  typeof process === 'object' &&
-  process &&
-  typeof process.emitWarning === 'function'
-    ? process.emitWarning(...a)
-    : console.error(...a)
-}
-
-const shouldWarn = code => !warned.has(code)
-
-const warn = (code, what, instead, fn) => {
-  warned.add(code)
-  const msg = `The ${what} is deprecated. Please use ${instead} instead.`
-  emitWarning(msg, 'DeprecationWarning', code, fn)
-}
-
-const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
-
-/* istanbul ignore next - This is a little bit ridiculous, tbh.
- * The maximum array length is 2^32-1 or thereabouts on most JS impls.
- * And well before that point, you're caching the entire world, I mean,
- * that's ~32GB of just integers for the next/prev links, plus whatever
- * else to hold that many keys and values.  Just filling the memory with
- * zeroes at init time is brutal when you get that big.
- * But why not be complete?
- * Maybe in the future, these limits will have expanded. */
-const getUintArray = max =>
-  !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-    ? Uint8Array
-    : max <= Math.pow(2, 16)
-    ? Uint16Array
-    : max <= Math.pow(2, 32)
-    ? Uint32Array
-    : max <= Number.MAX_SAFE_INTEGER
-    ? ZeroArray
-    : null
-
-class ZeroArray extends Array {
-  constructor(size) {
-    super(size)
-    this.fill(0)
-  }
-}
-
-class Stack {
-  constructor(max) {
-    if (max === 0) {
-      return []
-    }
-    const UintArray = getUintArray(max)
-    this.heap = new UintArray(max)
-    this.length = 0
-  }
-  push(n) {
-    this.heap[this.length++] = n
-  }
-  pop() {
-    return this.heap[--this.length]
-  }
-}
-
-class LRUCache {
-  constructor(options = {}) {
-    const {
-      max = 0,
-      ttl,
-      ttlResolution = 1,
-      ttlAutopurge,
-      updateAgeOnGet,
-      updateAgeOnHas,
-      allowStale,
-      dispose,
-      disposeAfter,
-      noDisposeOnSet,
-      noUpdateTTL,
-      maxSize = 0,
-      maxEntrySize = 0,
-      sizeCalculation,
-      fetchMethod,
-      fetchContext,
-      noDeleteOnFetchRejection,
-      noDeleteOnStaleGet,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-    } = options
-
-    // deprecated options, don't trigger a warning for getting them if
-    // the thing being passed in is another LRUCache we're copying.
-    const { length, maxAge, stale } =
-      options instanceof LRUCache ? {} : options
-
-    if (max !== 0 && !isPosInt(max)) {
-      throw new TypeError('max option must be a nonnegative integer')
-    }
-
-    const UintArray = max ? getUintArray(max) : Array
-    if (!UintArray) {
-      throw new Error('invalid max value: ' + max)
-    }
-
-    this.max = max
-    this.maxSize = maxSize
-    this.maxEntrySize = maxEntrySize || this.maxSize
-    this.sizeCalculation = sizeCalculation || length
-    if (this.sizeCalculation) {
-      if (!this.maxSize && !this.maxEntrySize) {
-        throw new TypeError(
-          'cannot set sizeCalculation without setting maxSize or maxEntrySize'
-        )
-      }
-      if (typeof this.sizeCalculation !== 'function') {
-        throw new TypeError('sizeCalculation set to non-function')
-      }
-    }
-
-    this.fetchMethod = fetchMethod || null
-    if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
-      throw new TypeError(
-        'fetchMethod must be a function if specified'
-      )
-    }
-
-    this.fetchContext = fetchContext
-    if (!this.fetchMethod && fetchContext !== undefined) {
-      throw new TypeError(
-        'cannot set fetchContext without fetchMethod'
-      )
-    }
-
-    this.keyMap = new Map()
-    this.keyList = new Array(max).fill(null)
-    this.valList = new Array(max).fill(null)
-    this.next = new UintArray(max)
-    this.prev = new UintArray(max)
-    this.head = 0
-    this.tail = 0
-    this.free = new Stack(max)
-    this.initialFill = 1
-    this.size = 0
-
-    if (typeof dispose === 'function') {
-      this.dispose = dispose
-    }
-    if (typeof disposeAfter === 'function') {
-      this.disposeAfter = disposeAfter
-      this.disposed = []
-    } else {
-      this.disposeAfter = null
-      this.disposed = null
-    }
-    this.noDisposeOnSet = !!noDisposeOnSet
-    this.noUpdateTTL = !!noUpdateTTL
-    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
-    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection
-    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort
-    this.ignoreFetchAbort = !!ignoreFetchAbort
-
-    // NB: maxEntrySize is set to maxSize if it's set
-    if (this.maxEntrySize !== 0) {
-      if (this.maxSize !== 0) {
-        if (!isPosInt(this.maxSize)) {
-          throw new TypeError(
-            'maxSize must be a positive integer if specified'
-          )
-        }
-      }
-      if (!isPosInt(this.maxEntrySize)) {
-        throw new TypeError(
-          'maxEntrySize must be a positive integer if specified'
-        )
-      }
-      this.initializeSizeTracking()
-    }
-
-    this.allowStale = !!allowStale || !!stale
-    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet
-    this.updateAgeOnGet = !!updateAgeOnGet
-    this.updateAgeOnHas = !!updateAgeOnHas
-    this.ttlResolution =
-      isPosInt(ttlResolution) || ttlResolution === 0
-        ? ttlResolution
-        : 1
-    this.ttlAutopurge = !!ttlAutopurge
-    this.ttl = ttl || maxAge || 0
-    if (this.ttl) {
-      if (!isPosInt(this.ttl)) {
-        throw new TypeError(
-          'ttl must be a positive integer if specified'
-        )
-      }
-      this.initializeTTLTracking()
-    }
-
-    // do not allow completely unbounded caches
-    if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
-      throw new TypeError(
-        'At least one of max, maxSize, or ttl is required'
-      )
-    }
-    if (!this.ttlAutopurge && !this.max && !this.maxSize) {
-      const code = 'LRU_CACHE_UNBOUNDED'
-      if (shouldWarn(code)) {
-        warned.add(code)
-        const msg =
-          'TTL caching without ttlAutopurge, max, or maxSize can ' +
-          'result in unbounded memory consumption.'
-        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
-      }
-    }
-
-    if (stale) {
-      deprecatedOption('stale', 'allowStale')
-    }
-    if (maxAge) {
-      deprecatedOption('maxAge', 'ttl')
-    }
-    if (length) {
-      deprecatedOption('length', 'sizeCalculation')
-    }
-  }
-
-  getRemainingTTL(key) {
-    return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
-  }
-
-  initializeTTLTracking() {
-    this.ttls = new ZeroArray(this.max)
-    this.starts = new ZeroArray(this.max)
-
-    this.setItemTTL = (index, ttl, start = perf.now()) => {
-      this.starts[index] = ttl !== 0 ? start : 0
-      this.ttls[index] = ttl
-      if (ttl !== 0 && this.ttlAutopurge) {
-        const t = setTimeout(() => {
-          if (this.isStale(index)) {
-            this.delete(this.keyList[index])
-          }
-        }, ttl + 1)
-        /* istanbul ignore else - unref() not supported on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-    }
-
-    this.updateItemAge = index => {
-      this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
-    }
-
-    this.statusTTL = (status, index) => {
-      if (status) {
-        status.ttl = this.ttls[index]
-        status.start = this.starts[index]
-        status.now = cachedNow || getNow()
-        status.remainingTTL = status.now + status.ttl - status.start
-      }
-    }
-
-    // debounce calls to perf.now() to 1s so we're not hitting
-    // that costly call repeatedly.
-    let cachedNow = 0
-    const getNow = () => {
-      const n = perf.now()
-      if (this.ttlResolution > 0) {
-        cachedNow = n
-        const t = setTimeout(
-          () => (cachedNow = 0),
-          this.ttlResolution
-        )
-        /* istanbul ignore else - not available on all platforms */
-        if (t.unref) {
-          t.unref()
-        }
-      }
-      return n
-    }
-
-    this.getRemainingTTL = key => {
-      const index = this.keyMap.get(key)
-      if (index === undefined) {
-        return 0
-      }
-      return this.ttls[index] === 0 || this.starts[index] === 0
-        ? Infinity
-        : this.starts[index] +
-            this.ttls[index] -
-            (cachedNow || getNow())
-    }
-
-    this.isStale = index => {
-      return (
-        this.ttls[index] !== 0 &&
-        this.starts[index] !== 0 &&
-        (cachedNow || getNow()) - this.starts[index] >
-          this.ttls[index]
-      )
-    }
-  }
-  updateItemAge(_index) {}
-  statusTTL(_status, _index) {}
-  setItemTTL(_index, _ttl, _start) {}
-  isStale(_index) {
-    return false
-  }
-
-  initializeSizeTracking() {
-    this.calculatedSize = 0
-    this.sizes = new ZeroArray(this.max)
-    this.removeItemSize = index => {
-      this.calculatedSize -= this.sizes[index]
-      this.sizes[index] = 0
-    }
-    this.requireSize = (k, v, size, sizeCalculation) => {
-      // provisionally accept background fetches.
-      // actual value size will be checked when they return.
-      if (this.isBackgroundFetch(v)) {
-        return 0
-      }
-      if (!isPosInt(size)) {
-        if (sizeCalculation) {
-          if (typeof sizeCalculation !== 'function') {
-            throw new TypeError('sizeCalculation must be a function')
-          }
-          size = sizeCalculation(v, k)
-          if (!isPosInt(size)) {
-            throw new TypeError(
-              'sizeCalculation return invalid (expect positive integer)'
-            )
-          }
-        } else {
-          throw new TypeError(
-            'invalid size value (must be positive integer). ' +
-              'When maxSize or maxEntrySize is used, sizeCalculation or size ' +
-              'must be set.'
-          )
-        }
-      }
-      return size
-    }
-    this.addItemSize = (index, size, status) => {
-      this.sizes[index] = size
-      if (this.maxSize) {
-        const maxSize = this.maxSize - this.sizes[index]
-        while (this.calculatedSize > maxSize) {
-          this.evict(true)
-        }
-      }
-      this.calculatedSize += this.sizes[index]
-      if (status) {
-        status.entrySize = size
-        status.totalCalculatedSize = this.calculatedSize
-      }
-    }
-  }
-  removeItemSize(_index) {}
-  addItemSize(_index, _size) {}
-  requireSize(_k, _v, size, sizeCalculation) {
-    if (size || sizeCalculation) {
-      throw new TypeError(
-        'cannot set size without setting maxSize or maxEntrySize on cache'
-      )
-    }
-  }
-
-  *indexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.tail; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.head) {
-          break
-        } else {
-          i = this.prev[i]
-        }
-      }
-    }
-  }
-
-  *rindexes({ allowStale = this.allowStale } = {}) {
-    if (this.size) {
-      for (let i = this.head; true; ) {
-        if (!this.isValidIndex(i)) {
-          break
-        }
-        if (allowStale || !this.isStale(i)) {
-          yield i
-        }
-        if (i === this.tail) {
-          break
-        } else {
-          i = this.next[i]
-        }
-      }
-    }
-  }
-
-  isValidIndex(index) {
-    return (
-      index !== undefined &&
-      this.keyMap.get(this.keyList[index]) === index
-    )
-  }
-
-  *entries() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-  *rentries() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield [this.keyList[i], this.valList[i]]
-      }
-    }
-  }
-
-  *keys() {
-    for (const i of this.indexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-  *rkeys() {
-    for (const i of this.rindexes()) {
-      if (
-        this.keyList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.keyList[i]
-      }
-    }
-  }
-
-  *values() {
-    for (const i of this.indexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-  *rvalues() {
-    for (const i of this.rindexes()) {
-      if (
-        this.valList[i] !== undefined &&
-        !this.isBackgroundFetch(this.valList[i])
-      ) {
-        yield this.valList[i]
-      }
-    }
-  }
-
-  [Symbol.iterator]() {
-    return this.entries()
-  }
-
-  find(fn, getOptions) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      if (fn(value, this.keyList[i], this)) {
-        return this.get(this.keyList[i], getOptions)
-      }
-    }
-  }
-
-  forEach(fn, thisp = this) {
-    for (const i of this.indexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  rforEach(fn, thisp = this) {
-    for (const i of this.rindexes()) {
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      fn.call(thisp, value, this.keyList[i], this)
-    }
-  }
-
-  get prune() {
-    deprecatedMethod('prune', 'purgeStale')
-    return this.purgeStale
-  }
-
-  purgeStale() {
-    let deleted = false
-    for (const i of this.rindexes({ allowStale: true })) {
-      if (this.isStale(i)) {
-        this.delete(this.keyList[i])
-        deleted = true
-      }
-    }
-    return deleted
-  }
-
-  dump() {
-    const arr = []
-    for (const i of this.indexes({ allowStale: true })) {
-      const key = this.keyList[i]
-      const v = this.valList[i]
-      const value = this.isBackgroundFetch(v)
-        ? v.__staleWhileFetching
-        : v
-      if (value === undefined) continue
-      const entry = { value }
-      if (this.ttls) {
-        entry.ttl = this.ttls[i]
-        // always dump the start relative to a portable timestamp
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = perf.now() - this.starts[i]
-        entry.start = Math.floor(Date.now() - age)
-      }
-      if (this.sizes) {
-        entry.size = this.sizes[i]
-      }
-      arr.unshift([key, entry])
-    }
-    return arr
-  }
-
-  load(arr) {
-    this.clear()
-    for (const [key, entry] of arr) {
-      if (entry.start) {
-        // entry.start is a portable timestamp, but we may be using
-        // node's performance.now(), so calculate the offset.
-        // it's ok for this to be a bit slow, it's a rare operation.
-        const age = Date.now() - entry.start
-        entry.start = perf.now() - age
-      }
-      this.set(key, entry.value, entry)
-    }
-  }
-
-  dispose(_v, _k, _reason) {}
-
-  set(
-    k,
-    v,
-    {
-      ttl = this.ttl,
-      start,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      status,
-    } = {}
-  ) {
-    size = this.requireSize(k, v, size, sizeCalculation)
-    // if the item doesn't fit, don't do anything
-    // NB: maxEntrySize set to maxSize by default
-    if (this.maxEntrySize && size > this.maxEntrySize) {
-      if (status) {
-        status.set = 'miss'
-        status.maxEntrySizeExceeded = true
-      }
-      // have to delete, in case a background fetch is there already.
-      // in non-async cases, this is a no-op
-      this.delete(k)
-      return this
-    }
-    let index = this.size === 0 ? undefined : this.keyMap.get(k)
-    if (index === undefined) {
-      // addition
-      index = this.newIndex()
-      this.keyList[index] = k
-      this.valList[index] = v
-      this.keyMap.set(k, index)
-      this.next[this.tail] = index
-      this.prev[index] = this.tail
-      this.tail = index
-      this.size++
-      this.addItemSize(index, size, status)
-      if (status) {
-        status.set = 'add'
-      }
-      noUpdateTTL = false
-    } else {
-      // update
-      this.moveToTail(index)
-      const oldVal = this.valList[index]
-      if (v !== oldVal) {
-        if (this.isBackgroundFetch(oldVal)) {
-          oldVal.__abortController.abort(new Error('replaced'))
-        } else {
-          if (!noDisposeOnSet) {
-            this.dispose(oldVal, k, 'set')
-            if (this.disposeAfter) {
-              this.disposed.push([oldVal, k, 'set'])
-            }
-          }
-        }
-        this.removeItemSize(index)
-        this.valList[index] = v
-        this.addItemSize(index, size, status)
-        if (status) {
-          status.set = 'replace'
-          const oldValue =
-            oldVal && this.isBackgroundFetch(oldVal)
-              ? oldVal.__staleWhileFetching
-              : oldVal
-          if (oldValue !== undefined) status.oldValue = oldValue
-        }
-      } else if (status) {
-        status.set = 'update'
-      }
-    }
-    if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
-      this.initializeTTLTracking()
-    }
-    if (!noUpdateTTL) {
-      this.setItemTTL(index, ttl, start)
-    }
-    this.statusTTL(status, index)
-    if (this.disposeAfter) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return this
-  }
-
-  newIndex() {
-    if (this.size === 0) {
-      return this.tail
-    }
-    if (this.size === this.max && this.max !== 0) {
-      return this.evict(false)
-    }
-    if (this.free.length !== 0) {
-      return this.free.pop()
-    }
-    // initial fill, just keep writing down the list
-    return this.initialFill++
-  }
-
-  pop() {
-    if (this.size) {
-      const val = this.valList[this.head]
-      this.evict(true)
-      return val
-    }
-  }
-
-  evict(free) {
-    const head = this.head
-    const k = this.keyList[head]
-    const v = this.valList[head]
-    if (this.isBackgroundFetch(v)) {
-      v.__abortController.abort(new Error('evicted'))
-    } else {
-      this.dispose(v, k, 'evict')
-      if (this.disposeAfter) {
-        this.disposed.push([v, k, 'evict'])
-      }
-    }
-    this.removeItemSize(head)
-    // if we aren't about to use the index, then null these out
-    if (free) {
-      this.keyList[head] = null
-      this.valList[head] = null
-      this.free.push(head)
-    }
-    this.head = this.next[head]
-    this.keyMap.delete(k)
-    this.size--
-    return head
-  }
-
-  has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      if (!this.isStale(index)) {
-        if (updateAgeOnHas) {
-          this.updateItemAge(index)
-        }
-        if (status) status.has = 'hit'
-        this.statusTTL(status, index)
-        return true
-      } else if (status) {
-        status.has = 'stale'
-        this.statusTTL(status, index)
-      }
-    } else if (status) {
-      status.has = 'miss'
-    }
-    return false
-  }
-
-  // like get(), but without any LRU updating or TTL expiration
-  peek(k, { allowStale = this.allowStale } = {}) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined && (allowStale || !this.isStale(index))) {
-      const v = this.valList[index]
-      // either stale and allowed, or forcing a refresh of non-stale value
-      return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v
-    }
-  }
-
-  backgroundFetch(k, index, options, context) {
-    const v = index === undefined ? undefined : this.valList[index]
-    if (this.isBackgroundFetch(v)) {
-      return v
-    }
-    const ac = new AC()
-    if (options.signal) {
-      options.signal.addEventListener('abort', () =>
-        ac.abort(options.signal.reason)
-      )
-    }
-    const fetchOpts = {
-      signal: ac.signal,
-      options,
-      context,
-    }
-    const cb = (v, updateCache = false) => {
-      const { aborted } = ac.signal
-      const ignoreAbort = options.ignoreFetchAbort && v !== undefined
-      if (options.status) {
-        if (aborted && !updateCache) {
-          options.status.fetchAborted = true
-          options.status.fetchError = ac.signal.reason
-          if (ignoreAbort) options.status.fetchAbortIgnored = true
-        } else {
-          options.status.fetchResolved = true
-        }
-      }
-      if (aborted && !ignoreAbort && !updateCache) {
-        return fetchFail(ac.signal.reason)
-      }
-      // either we didn't abort, and are still here, or we did, and ignored
-      if (this.valList[index] === p) {
-        if (v === undefined) {
-          if (p.__staleWhileFetching) {
-            this.valList[index] = p.__staleWhileFetching
-          } else {
-            this.delete(k)
-          }
-        } else {
-          if (options.status) options.status.fetchUpdated = true
-          this.set(k, v, fetchOpts.options)
-        }
-      }
-      return v
-    }
-    const eb = er => {
-      if (options.status) {
-        options.status.fetchRejected = true
-        options.status.fetchError = er
-      }
-      return fetchFail(er)
-    }
-    const fetchFail = er => {
-      const { aborted } = ac.signal
-      const allowStaleAborted =
-        aborted && options.allowStaleOnFetchAbort
-      const allowStale =
-        allowStaleAborted || options.allowStaleOnFetchRejection
-      const noDelete = allowStale || options.noDeleteOnFetchRejection
-      if (this.valList[index] === p) {
-        // if we allow stale on fetch rejections, then we need to ensure that
-        // the stale value is not removed from the cache when the fetch fails.
-        const del = !noDelete || p.__staleWhileFetching === undefined
-        if (del) {
-          this.delete(k)
-        } else if (!allowStaleAborted) {
-          // still replace the *promise* with the stale value,
-          // since we are done with the promise at this point.
-          // leave it untouched if we're still waiting for an
-          // aborted background fetch that hasn't yet returned.
-          this.valList[index] = p.__staleWhileFetching
-        }
-      }
-      if (allowStale) {
-        if (options.status && p.__staleWhileFetching !== undefined) {
-          options.status.returnedStale = true
-        }
-        return p.__staleWhileFetching
-      } else if (p.__returned === p) {
-        throw er
-      }
-    }
-    const pcall = (res, rej) => {
-      this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej)
-      // ignored, we go until we finish, regardless.
-      // defer check until we are actually aborting,
-      // so fetchMethod can override.
-      ac.signal.addEventListener('abort', () => {
-        if (
-          !options.ignoreFetchAbort ||
-          options.allowStaleOnFetchAbort
-        ) {
-          res()
-          // when it eventually resolves, update the cache.
-          if (options.allowStaleOnFetchAbort) {
-            res = v => cb(v, true)
-          }
-        }
-      })
-    }
-    if (options.status) options.status.fetchDispatched = true
-    const p = new Promise(pcall).then(cb, eb)
-    p.__abortController = ac
-    p.__staleWhileFetching = v
-    p.__returned = null
-    if (index === undefined) {
-      // internal, don't expose status.
-      this.set(k, p, { ...fetchOpts.options, status: undefined })
-      index = this.keyMap.get(k)
-    } else {
-      this.valList[index] = p
-    }
-    return p
-  }
-
-  isBackgroundFetch(p) {
-    return (
-      p &&
-      typeof p === 'object' &&
-      typeof p.then === 'function' &&
-      Object.prototype.hasOwnProperty.call(
-        p,
-        '__staleWhileFetching'
-      ) &&
-      Object.prototype.hasOwnProperty.call(p, '__returned') &&
-      (p.__returned === p || p.__returned === null)
-    )
-  }
-
-  // this takes the union of get() and set() opts, because it does both
-  async fetch(
-    k,
-    {
-      // get options
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      // set options
-      ttl = this.ttl,
-      noDisposeOnSet = this.noDisposeOnSet,
-      size = 0,
-      sizeCalculation = this.sizeCalculation,
-      noUpdateTTL = this.noUpdateTTL,
-      // fetch exclusive options
-      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
-      ignoreFetchAbort = this.ignoreFetchAbort,
-      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
-      fetchContext = this.fetchContext,
-      forceRefresh = false,
-      status,
-      signal,
-    } = {}
-  ) {
-    if (!this.fetchMethod) {
-      if (status) status.fetch = 'get'
-      return this.get(k, {
-        allowStale,
-        updateAgeOnGet,
-        noDeleteOnStaleGet,
-        status,
-      })
-    }
-
-    const options = {
-      allowStale,
-      updateAgeOnGet,
-      noDeleteOnStaleGet,
-      ttl,
-      noDisposeOnSet,
-      size,
-      sizeCalculation,
-      noUpdateTTL,
-      noDeleteOnFetchRejection,
-      allowStaleOnFetchRejection,
-      allowStaleOnFetchAbort,
-      ignoreFetchAbort,
-      status,
-      signal,
-    }
-
-    let index = this.keyMap.get(k)
-    if (index === undefined) {
-      if (status) status.fetch = 'miss'
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      return (p.__returned = p)
-    } else {
-      // in cache, maybe already fetching
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        const stale =
-          allowStale && v.__staleWhileFetching !== undefined
-        if (status) {
-          status.fetch = 'inflight'
-          if (stale) status.returnedStale = true
-        }
-        return stale ? v.__staleWhileFetching : (v.__returned = v)
-      }
-
-      // if we force a refresh, that means do NOT serve the cached value,
-      // unless we are already in the process of refreshing the cache.
-      const isStale = this.isStale(index)
-      if (!forceRefresh && !isStale) {
-        if (status) status.fetch = 'hit'
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        this.statusTTL(status, index)
-        return v
-      }
-
-      // ok, it is stale or a forced refresh, and not already fetching.
-      // refresh the cache.
-      const p = this.backgroundFetch(k, index, options, fetchContext)
-      const hasStale = p.__staleWhileFetching !== undefined
-      const staleVal = hasStale && allowStale
-      if (status) {
-        status.fetch = hasStale && isStale ? 'stale' : 'refresh'
-        if (staleVal && isStale) status.returnedStale = true
-      }
-      return staleVal ? p.__staleWhileFetching : (p.__returned = p)
-    }
-  }
-
-  get(
-    k,
-    {
-      allowStale = this.allowStale,
-      updateAgeOnGet = this.updateAgeOnGet,
-      noDeleteOnStaleGet = this.noDeleteOnStaleGet,
-      status,
-    } = {}
-  ) {
-    const index = this.keyMap.get(k)
-    if (index !== undefined) {
-      const value = this.valList[index]
-      const fetching = this.isBackgroundFetch(value)
-      this.statusTTL(status, index)
-      if (this.isStale(index)) {
-        if (status) status.get = 'stale'
-        // delete only if not an in-flight background fetch
-        if (!fetching) {
-          if (!noDeleteOnStaleGet) {
-            this.delete(k)
-          }
-          if (status) status.returnedStale = allowStale
-          return allowStale ? value : undefined
-        } else {
-          if (status) {
-            status.returnedStale =
-              allowStale && value.__staleWhileFetching !== undefined
-          }
-          return allowStale ? value.__staleWhileFetching : undefined
-        }
-      } else {
-        if (status) status.get = 'hit'
-        // if we're currently fetching it, we don't actually have it yet
-        // it's not stale, which means this isn't a staleWhileRefetching.
-        // If it's not stale, and fetching, AND has a __staleWhileFetching
-        // value, then that means the user fetched with {forceRefresh:true},
-        // so it's safe to return that value.
-        if (fetching) {
-          return value.__staleWhileFetching
-        }
-        this.moveToTail(index)
-        if (updateAgeOnGet) {
-          this.updateItemAge(index)
-        }
-        return value
-      }
-    } else if (status) {
-      status.get = 'miss'
-    }
-  }
-
-  connect(p, n) {
-    this.prev[n] = p
-    this.next[p] = n
-  }
-
-  moveToTail(index) {
-    // if tail already, nothing to do
-    // if head, move head to next[index]
-    // else
-    //   move next[prev[index]] to next[index] (head has no prev)
-    //   move prev[next[index]] to prev[index]
-    // prev[index] = tail
-    // next[tail] = index
-    // tail = index
-    if (index !== this.tail) {
-      if (index === this.head) {
-        this.head = this.next[index]
-      } else {
-        this.connect(this.prev[index], this.next[index])
-      }
-      this.connect(this.tail, index)
-      this.tail = index
-    }
-  }
-
-  get del() {
-    deprecatedMethod('del', 'delete')
-    return this.delete
-  }
-
-  delete(k) {
-    let deleted = false
-    if (this.size !== 0) {
-      const index = this.keyMap.get(k)
-      if (index !== undefined) {
-        deleted = true
-        if (this.size === 1) {
-          this.clear()
-        } else {
-          this.removeItemSize(index)
-          const v = this.valList[index]
-          if (this.isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('deleted'))
-          } else {
-            this.dispose(v, k, 'delete')
-            if (this.disposeAfter) {
-              this.disposed.push([v, k, 'delete'])
-            }
-          }
-          this.keyMap.delete(k)
-          this.keyList[index] = null
-          this.valList[index] = null
-          if (index === this.tail) {
-            this.tail = this.prev[index]
-          } else if (index === this.head) {
-            this.head = this.next[index]
-          } else {
-            this.next[this.prev[index]] = this.next[index]
-            this.prev[this.next[index]] = this.prev[index]
-          }
-          this.size--
-          this.free.push(index)
-        }
-      }
-    }
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-    return deleted
-  }
-
-  clear() {
-    for (const index of this.rindexes({ allowStale: true })) {
-      const v = this.valList[index]
-      if (this.isBackgroundFetch(v)) {
-        v.__abortController.abort(new Error('deleted'))
-      } else {
-        const k = this.keyList[index]
-        this.dispose(v, k, 'delete')
-        if (this.disposeAfter) {
-          this.disposed.push([v, k, 'delete'])
-        }
-      }
-    }
-
-    this.keyMap.clear()
-    this.valList.fill(null)
-    this.keyList.fill(null)
-    if (this.ttls) {
-      this.ttls.fill(0)
-      this.starts.fill(0)
-    }
-    if (this.sizes) {
-      this.sizes.fill(0)
-    }
-    this.head = 0
-    this.tail = 0
-    this.initialFill = 1
-    this.free.length = 0
-    this.calculatedSize = 0
-    this.size = 0
-    if (this.disposed) {
-      while (this.disposed.length) {
-        this.disposeAfter(...this.disposed.shift())
-      }
-    }
-  }
-
-  get reset() {
-    deprecatedMethod('reset', 'clear')
-    return this.clear
-  }
-
-  get length() {
-    deprecatedProperty('length', 'size')
-    return this.size
-  }
-
-  static get AbortController() {
-    return AC
-  }
-  static get AbortSignal() {
-    return AS
-  }
-}
-
-export default LRUCache
diff --git a/node_modules/sigstore/node_modules/lru-cache/package.json b/node_modules/sigstore/node_modules/lru-cache/package.json
deleted file mode 100644
index 9684991727e7a..0000000000000
--- a/node_modules/sigstore/node_modules/lru-cache/package.json
+++ /dev/null
@@ -1,96 +0,0 @@
-{
-  "name": "lru-cache",
-  "description": "A cache object that deletes the least-recently-used items.",
-  "version": "7.18.3",
-  "author": "Isaac Z. Schlueter ",
-  "keywords": [
-    "mru",
-    "lru",
-    "cache"
-  ],
-  "sideEffects": false,
-  "scripts": {
-    "build": "npm run prepare",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "prepare": "node ./scripts/transpile-to-esm.js",
-    "size": "size-limit",
-    "test": "tap",
-    "snap": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write .",
-    "typedoc": "typedoc ./index.d.ts"
-  },
-  "type": "commonjs",
-  "main": "./index.js",
-  "module": "./index.mjs",
-  "types": "./index.d.ts",
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./index.d.ts",
-        "default": "./index.mjs"
-      },
-      "require": {
-        "types": "./index.d.ts",
-        "default": "./index.js"
-      }
-    },
-    "./package.json": "./package.json"
-  },
-  "repository": "git://github.com/isaacs/node-lru-cache.git",
-  "devDependencies": {
-    "@size-limit/preset-small-lib": "^7.0.8",
-    "@types/node": "^17.0.31",
-    "@types/tap": "^15.0.6",
-    "benchmark": "^2.1.4",
-    "c8": "^7.11.2",
-    "clock-mock": "^1.0.6",
-    "eslint-config-prettier": "^8.5.0",
-    "prettier": "^2.6.2",
-    "size-limit": "^7.0.8",
-    "tap": "^16.3.4",
-    "ts-node": "^10.7.0",
-    "tslib": "^2.4.0",
-    "typedoc": "^0.23.24",
-    "typescript": "^4.6.4"
-  },
-  "license": "ISC",
-  "files": [
-    "index.js",
-    "index.mjs",
-    "index.d.ts"
-  ],
-  "engines": {
-    "node": ">=12"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tap": {
-    "nyc-arg": [
-      "--include=index.js"
-    ],
-    "node-arg": [
-      "--expose-gc",
-      "--require",
-      "ts-node/register"
-    ],
-    "ts": false
-  },
-  "size-limit": [
-    {
-      "path": "./index.js"
-    }
-  ]
-}
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/LICENSE b/node_modules/sigstore/node_modules/make-fetch-happen/LICENSE
deleted file mode 100644
index 1808eb2844231..0000000000000
--- a/node_modules/sigstore/node_modules/make-fetch-happen/LICENSE
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright 2017-2022 (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/agent.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/agent.js
deleted file mode 100644
index dd68492ed7ea7..0000000000000
--- a/node_modules/sigstore/node_modules/make-fetch-happen/lib/agent.js
+++ /dev/null
@@ -1,214 +0,0 @@
-'use strict'
-const LRU = require('lru-cache')
-const url = require('url')
-const isLambda = require('is-lambda')
-const dns = require('./dns.js')
-
-const AGENT_CACHE = new LRU({ max: 50 })
-const HttpAgent = require('agentkeepalive')
-const HttpsAgent = HttpAgent.HttpsAgent
-
-module.exports = getAgent
-
-const getAgentTimeout = timeout =>
-  typeof timeout !== 'number' || !timeout ? 0 : timeout + 1
-
-const getMaxSockets = maxSockets => maxSockets || 15
-
-function getAgent (uri, opts) {
-  const parsedUri = new url.URL(typeof uri === 'string' ? uri : uri.url)
-  const isHttps = parsedUri.protocol === 'https:'
-  const pxuri = getProxyUri(parsedUri.href, opts)
-
-  // If opts.timeout is zero, set the agentTimeout to zero as well. A timeout
-  // of zero disables the timeout behavior (OS limits still apply). Else, if
-  // opts.timeout is a non-zero value, set it to timeout + 1, to ensure that
-  // the node-fetch-npm timeout will always fire first, giving us more
-  // consistent errors.
-  const agentTimeout = getAgentTimeout(opts.timeout)
-  const agentMaxSockets = getMaxSockets(opts.maxSockets)
-
-  const key = [
-    `https:${isHttps}`,
-    pxuri
-      ? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}`
-      : '>no-proxy<',
-    `local-address:${opts.localAddress || '>no-local-address<'}`,
-    `strict-ssl:${isHttps ? opts.rejectUnauthorized : '>no-strict-ssl<'}`,
-    `ca:${(isHttps && opts.ca) || '>no-ca<'}`,
-    `cert:${(isHttps && opts.cert) || '>no-cert<'}`,
-    `key:${(isHttps && opts.key) || '>no-key<'}`,
-    `timeout:${agentTimeout}`,
-    `maxSockets:${agentMaxSockets}`,
-  ].join(':')
-
-  if (opts.agent != null) { // `agent: false` has special behavior!
-    return opts.agent
-  }
-
-  // keep alive in AWS lambda makes no sense
-  const lambdaAgent = !isLambda ? null
-    : isHttps ? require('https').globalAgent
-    : require('http').globalAgent
-
-  if (isLambda && !pxuri) {
-    return lambdaAgent
-  }
-
-  if (AGENT_CACHE.peek(key)) {
-    return AGENT_CACHE.get(key)
-  }
-
-  if (pxuri) {
-    const pxopts = isLambda ? {
-      ...opts,
-      agent: lambdaAgent,
-    } : opts
-    const proxy = getProxy(pxuri, pxopts, isHttps)
-    AGENT_CACHE.set(key, proxy)
-    return proxy
-  }
-
-  const agent = isHttps ? new HttpsAgent({
-    maxSockets: agentMaxSockets,
-    ca: opts.ca,
-    cert: opts.cert,
-    key: opts.key,
-    localAddress: opts.localAddress,
-    rejectUnauthorized: opts.rejectUnauthorized,
-    timeout: agentTimeout,
-    freeSocketTimeout: 15000,
-    lookup: dns.getLookup(opts.dns),
-  }) : new HttpAgent({
-    maxSockets: agentMaxSockets,
-    localAddress: opts.localAddress,
-    timeout: agentTimeout,
-    freeSocketTimeout: 15000,
-    lookup: dns.getLookup(opts.dns),
-  })
-  AGENT_CACHE.set(key, agent)
-  return agent
-}
-
-function checkNoProxy (uri, opts) {
-  const host = new url.URL(uri).hostname.split('.').reverse()
-  let noproxy = (opts.noProxy || getProcessEnv('no_proxy'))
-  if (typeof noproxy === 'string') {
-    noproxy = noproxy.split(',').map(n => n.trim())
-  }
-
-  return noproxy && noproxy.some(no => {
-    const noParts = no.split('.').filter(x => x).reverse()
-    if (!noParts.length) {
-      return false
-    }
-    for (let i = 0; i < noParts.length; i++) {
-      if (host[i] !== noParts[i]) {
-        return false
-      }
-    }
-    return true
-  })
-}
-
-module.exports.getProcessEnv = getProcessEnv
-
-function getProcessEnv (env) {
-  if (!env) {
-    return
-  }
-
-  let value
-
-  if (Array.isArray(env)) {
-    for (const e of env) {
-      value = process.env[e] ||
-        process.env[e.toUpperCase()] ||
-        process.env[e.toLowerCase()]
-      if (typeof value !== 'undefined') {
-        break
-      }
-    }
-  }
-
-  if (typeof env === 'string') {
-    value = process.env[env] ||
-      process.env[env.toUpperCase()] ||
-      process.env[env.toLowerCase()]
-  }
-
-  return value
-}
-
-module.exports.getProxyUri = getProxyUri
-function getProxyUri (uri, opts) {
-  const protocol = new url.URL(uri).protocol
-
-  const proxy = opts.proxy ||
-    (
-      protocol === 'https:' &&
-      getProcessEnv('https_proxy')
-    ) ||
-    (
-      protocol === 'http:' &&
-      getProcessEnv(['https_proxy', 'http_proxy', 'proxy'])
-    )
-  if (!proxy) {
-    return null
-  }
-
-  const parsedProxy = (typeof proxy === 'string') ? new url.URL(proxy) : proxy
-
-  return !checkNoProxy(uri, opts) && parsedProxy
-}
-
-const getAuth = u =>
-  u.username && u.password ? decodeURIComponent(`${u.username}:${u.password}`)
-  : u.username ? decodeURIComponent(u.username)
-  : null
-
-const getPath = u => u.pathname + u.search + u.hash
-
-const HttpProxyAgent = require('http-proxy-agent')
-const HttpsProxyAgent = require('https-proxy-agent')
-const { SocksProxyAgent } = require('socks-proxy-agent')
-module.exports.getProxy = getProxy
-function getProxy (proxyUrl, opts, isHttps) {
-  // our current proxy agents do not support an overridden dns lookup method, so will not
-  // benefit from the dns cache
-  const popts = {
-    host: proxyUrl.hostname,
-    port: proxyUrl.port,
-    protocol: proxyUrl.protocol,
-    path: getPath(proxyUrl),
-    auth: getAuth(proxyUrl),
-    ca: opts.ca,
-    cert: opts.cert,
-    key: opts.key,
-    timeout: getAgentTimeout(opts.timeout),
-    localAddress: opts.localAddress,
-    maxSockets: getMaxSockets(opts.maxSockets),
-    rejectUnauthorized: opts.rejectUnauthorized,
-  }
-
-  if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') {
-    if (!isHttps) {
-      return new HttpProxyAgent(popts)
-    } else {
-      return new HttpsProxyAgent(popts)
-    }
-  } else if (proxyUrl.protocol.startsWith('socks')) {
-    // socks-proxy-agent uses hostname not host
-    popts.hostname = popts.host
-    delete popts.host
-    return new SocksProxyAgent(popts)
-  } else {
-    throw Object.assign(
-      new Error(`unsupported proxy protocol: '${proxyUrl.protocol}'`),
-      {
-        code: 'EUNSUPPORTEDPROXY',
-        url: proxyUrl.href,
-      }
-    )
-  }
-}
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/entry.js
deleted file mode 100644
index 45141095074ec..0000000000000
--- a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/entry.js
+++ /dev/null
@@ -1,469 +0,0 @@
-const { Request, Response } = require('minipass-fetch')
-const { Minipass } = require('minipass')
-const MinipassFlush = require('minipass-flush')
-const cacache = require('cacache')
-const url = require('url')
-
-const CachingMinipassPipeline = require('../pipeline.js')
-const CachePolicy = require('./policy.js')
-const cacheKey = require('./key.js')
-const remote = require('../remote.js')
-
-const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop)
-
-// allow list for request headers that will be written to the cache index
-// note: we will also store any request headers
-// that are named in a response's vary header
-const KEEP_REQUEST_HEADERS = [
-  'accept-charset',
-  'accept-encoding',
-  'accept-language',
-  'accept',
-  'cache-control',
-]
-
-// allow list for response headers that will be written to the cache index
-// note: we must not store the real response's age header, or when we load
-// a cache policy based on the metadata it will think the cached response
-// is always stale
-const KEEP_RESPONSE_HEADERS = [
-  'cache-control',
-  'content-encoding',
-  'content-language',
-  'content-type',
-  'date',
-  'etag',
-  'expires',
-  'last-modified',
-  'link',
-  'location',
-  'pragma',
-  'vary',
-]
-
-// return an object containing all metadata to be written to the index
-const getMetadata = (request, response, options) => {
-  const metadata = {
-    time: Date.now(),
-    url: request.url,
-    reqHeaders: {},
-    resHeaders: {},
-
-    // options on which we must match the request and vary the response
-    options: {
-      compress: options.compress != null ? options.compress : request.compress,
-    },
-  }
-
-  // only save the status if it's not a 200 or 304
-  if (response.status !== 200 && response.status !== 304) {
-    metadata.status = response.status
-  }
-
-  for (const name of KEEP_REQUEST_HEADERS) {
-    if (request.headers.has(name)) {
-      metadata.reqHeaders[name] = request.headers.get(name)
-    }
-  }
-
-  // if the request's host header differs from the host in the url
-  // we need to keep it, otherwise it's just noise and we ignore it
-  const host = request.headers.get('host')
-  const parsedUrl = new url.URL(request.url)
-  if (host && parsedUrl.host !== host) {
-    metadata.reqHeaders.host = host
-  }
-
-  // if the response has a vary header, make sure
-  // we store the relevant request headers too
-  if (response.headers.has('vary')) {
-    const vary = response.headers.get('vary')
-    // a vary of "*" means every header causes a different response.
-    // in that scenario, we do not include any additional headers
-    // as the freshness check will always fail anyway and we don't
-    // want to bloat the cache indexes
-    if (vary !== '*') {
-      // copy any other request headers that will vary the response
-      const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/)
-      for (const name of varyHeaders) {
-        if (request.headers.has(name)) {
-          metadata.reqHeaders[name] = request.headers.get(name)
-        }
-      }
-    }
-  }
-
-  for (const name of KEEP_RESPONSE_HEADERS) {
-    if (response.headers.has(name)) {
-      metadata.resHeaders[name] = response.headers.get(name)
-    }
-  }
-
-  for (const name of options.cacheAdditionalHeaders) {
-    if (response.headers.has(name)) {
-      metadata.resHeaders[name] = response.headers.get(name)
-    }
-  }
-
-  return metadata
-}
-
-// symbols used to hide objects that may be lazily evaluated in a getter
-const _request = Symbol('request')
-const _response = Symbol('response')
-const _policy = Symbol('policy')
-
-class CacheEntry {
-  constructor ({ entry, request, response, options }) {
-    if (entry) {
-      this.key = entry.key
-      this.entry = entry
-      // previous versions of this module didn't write an explicit timestamp in
-      // the metadata, so fall back to the entry's timestamp. we can't use the
-      // entry timestamp to determine staleness because cacache will update it
-      // when it verifies its data
-      this.entry.metadata.time = this.entry.metadata.time || this.entry.time
-    } else {
-      this.key = cacheKey(request)
-    }
-
-    this.options = options
-
-    // these properties are behind getters that lazily evaluate
-    this[_request] = request
-    this[_response] = response
-    this[_policy] = null
-  }
-
-  // returns a CacheEntry instance that satisfies the given request
-  // or undefined if no existing entry satisfies
-  static async find (request, options) {
-    try {
-      // compacts the index and returns an array of unique entries
-      var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => {
-        const entryA = new CacheEntry({ entry: A, options })
-        const entryB = new CacheEntry({ entry: B, options })
-        return entryA.policy.satisfies(entryB.request)
-      }, {
-        validateEntry: (entry) => {
-          // clean out entries with a buggy content-encoding value
-          if (entry.metadata &&
-              entry.metadata.resHeaders &&
-              entry.metadata.resHeaders['content-encoding'] === null) {
-            return false
-          }
-
-          // if an integrity is null, it needs to have a status specified
-          if (entry.integrity === null) {
-            return !!(entry.metadata && entry.metadata.status)
-          }
-
-          return true
-        },
-      })
-    } catch (err) {
-      // if the compact request fails, ignore the error and return
-      return
-    }
-
-    // a cache mode of 'reload' means to behave as though we have no cache
-    // on the way to the network. return undefined to allow cacheFetch to
-    // create a brand new request no matter what.
-    if (options.cache === 'reload') {
-      return
-    }
-
-    // find the specific entry that satisfies the request
-    let match
-    for (const entry of matches) {
-      const _entry = new CacheEntry({
-        entry,
-        options,
-      })
-
-      if (_entry.policy.satisfies(request)) {
-        match = _entry
-        break
-      }
-    }
-
-    return match
-  }
-
-  // if the user made a PUT/POST/PATCH then we invalidate our
-  // cache for the same url by deleting the index entirely
-  static async invalidate (request, options) {
-    const key = cacheKey(request)
-    try {
-      await cacache.rm.entry(options.cachePath, key, { removeFully: true })
-    } catch (err) {
-      // ignore errors
-    }
-  }
-
-  get request () {
-    if (!this[_request]) {
-      this[_request] = new Request(this.entry.metadata.url, {
-        method: 'GET',
-        headers: this.entry.metadata.reqHeaders,
-        ...this.entry.metadata.options,
-      })
-    }
-
-    return this[_request]
-  }
-
-  get response () {
-    if (!this[_response]) {
-      this[_response] = new Response(null, {
-        url: this.entry.metadata.url,
-        counter: this.options.counter,
-        status: this.entry.metadata.status || 200,
-        headers: {
-          ...this.entry.metadata.resHeaders,
-          'content-length': this.entry.size,
-        },
-      })
-    }
-
-    return this[_response]
-  }
-
-  get policy () {
-    if (!this[_policy]) {
-      this[_policy] = new CachePolicy({
-        entry: this.entry,
-        request: this.request,
-        response: this.response,
-        options: this.options,
-      })
-    }
-
-    return this[_policy]
-  }
-
-  // wraps the response in a pipeline that stores the data
-  // in the cache while the user consumes it
-  async store (status) {
-    // if we got a status other than 200, 301, or 308,
-    // or the CachePolicy forbid storage, append the
-    // cache status header and return it untouched
-    if (
-      this.request.method !== 'GET' ||
-      ![200, 301, 308].includes(this.response.status) ||
-      !this.policy.storable()
-    ) {
-      this.response.headers.set('x-local-cache-status', 'skip')
-      return this.response
-    }
-
-    const size = this.response.headers.get('content-length')
-    const cacheOpts = {
-      algorithms: this.options.algorithms,
-      metadata: getMetadata(this.request, this.response, this.options),
-      size,
-      integrity: this.options.integrity,
-      integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body,
-    }
-
-    let body = null
-    // we only set a body if the status is a 200, redirects are
-    // stored as metadata only
-    if (this.response.status === 200) {
-      let cacheWriteResolve, cacheWriteReject
-      const cacheWritePromise = new Promise((resolve, reject) => {
-        cacheWriteResolve = resolve
-        cacheWriteReject = reject
-      })
-
-      body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({
-        flush () {
-          return cacheWritePromise
-        },
-      }))
-      // this is always true since if we aren't reusing the one from the remote fetch, we
-      // are using the one from cacache
-      body.hasIntegrityEmitter = true
-
-      const onResume = () => {
-        const tee = new Minipass()
-        const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts)
-        // re-emit the integrity and size events on our new response body so they can be reused
-        cacheStream.on('integrity', i => body.emit('integrity', i))
-        cacheStream.on('size', s => body.emit('size', s))
-        // stick a flag on here so downstream users will know if they can expect integrity events
-        tee.pipe(cacheStream)
-        // TODO if the cache write fails, log a warning but return the response anyway
-        // eslint-disable-next-line promise/catch-or-return
-        cacheStream.promise().then(cacheWriteResolve, cacheWriteReject)
-        body.unshift(tee)
-        body.unshift(this.response.body)
-      }
-
-      body.once('resume', onResume)
-      body.once('end', () => body.removeListener('resume', onResume))
-    } else {
-      await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts)
-    }
-
-    // note: we do not set the x-local-cache-hash header because we do not know
-    // the hash value until after the write to the cache completes, which doesn't
-    // happen until after the response has been sent and it's too late to write
-    // the header anyway
-    this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
-    this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
-    this.response.headers.set('x-local-cache-mode', 'stream')
-    this.response.headers.set('x-local-cache-status', status)
-    this.response.headers.set('x-local-cache-time', new Date().toISOString())
-    const newResponse = new Response(body, {
-      url: this.response.url,
-      status: this.response.status,
-      headers: this.response.headers,
-      counter: this.options.counter,
-    })
-    return newResponse
-  }
-
-  // use the cached data to create a response and return it
-  async respond (method, options, status) {
-    let response
-    if (method === 'HEAD' || [301, 308].includes(this.response.status)) {
-      // if the request is a HEAD, or the response is a redirect,
-      // then the metadata in the entry already includes everything
-      // we need to build a response
-      response = this.response
-    } else {
-      // we're responding with a full cached response, so create a body
-      // that reads from cacache and attach it to a new Response
-      const body = new Minipass()
-      const headers = { ...this.policy.responseHeaders() }
-
-      const onResume = () => {
-        const cacheStream = cacache.get.stream.byDigest(
-          this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
-        )
-        cacheStream.on('error', async (err) => {
-          cacheStream.pause()
-          if (err.code === 'EINTEGRITY') {
-            await cacache.rm.content(
-              this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
-            )
-          }
-          if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') {
-            await CacheEntry.invalidate(this.request, this.options)
-          }
-          body.emit('error', err)
-          cacheStream.resume()
-        })
-        // emit the integrity and size events based on our metadata so we're consistent
-        body.emit('integrity', this.entry.integrity)
-        body.emit('size', Number(headers['content-length']))
-        cacheStream.pipe(body)
-      }
-
-      body.once('resume', onResume)
-      body.once('end', () => body.removeListener('resume', onResume))
-      response = new Response(body, {
-        url: this.entry.metadata.url,
-        counter: options.counter,
-        status: 200,
-        headers,
-      })
-    }
-
-    response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
-    response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity))
-    response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
-    response.headers.set('x-local-cache-mode', 'stream')
-    response.headers.set('x-local-cache-status', status)
-    response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString())
-    return response
-  }
-
-  // use the provided request along with this cache entry to
-  // revalidate the stored response. returns a response, either
-  // from the cache or from the update
-  async revalidate (request, options) {
-    const revalidateRequest = new Request(request, {
-      headers: this.policy.revalidationHeaders(request),
-    })
-
-    try {
-      // NOTE: be sure to remove the headers property from the
-      // user supplied options, since we have already defined
-      // them on the new request object. if they're still in the
-      // options then those will overwrite the ones from the policy
-      var response = await remote(revalidateRequest, {
-        ...options,
-        headers: undefined,
-      })
-    } catch (err) {
-      // if the network fetch fails, return the stale
-      // cached response unless it has a cache-control
-      // of 'must-revalidate'
-      if (!this.policy.mustRevalidate) {
-        return this.respond(request.method, options, 'stale')
-      }
-
-      throw err
-    }
-
-    if (this.policy.revalidated(revalidateRequest, response)) {
-      // we got a 304, write a new index to the cache and respond from cache
-      const metadata = getMetadata(request, response, options)
-      // 304 responses do not include headers that are specific to the response data
-      // since they do not include a body, so we copy values for headers that were
-      // in the old cache entry to the new one, if the new metadata does not already
-      // include that header
-      for (const name of KEEP_RESPONSE_HEADERS) {
-        if (
-          !hasOwnProperty(metadata.resHeaders, name) &&
-          hasOwnProperty(this.entry.metadata.resHeaders, name)
-        ) {
-          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
-        }
-      }
-
-      for (const name of options.cacheAdditionalHeaders) {
-        const inMeta = hasOwnProperty(metadata.resHeaders, name)
-        const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name)
-        const inPolicy = hasOwnProperty(this.policy.response.headers, name)
-
-        // if the header is in the existing entry, but it is not in the metadata
-        // then we need to write it to the metadata as this will refresh the on-disk cache
-        if (!inMeta && inEntry) {
-          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
-        }
-        // if the header is in the metadata, but not in the policy, then we need to set
-        // it in the policy so that it's included in the immediate response. future
-        // responses will load a new cache entry, so we don't need to change that
-        if (!inPolicy && inMeta) {
-          this.policy.response.headers[name] = metadata.resHeaders[name]
-        }
-      }
-
-      try {
-        await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, {
-          size: this.entry.size,
-          metadata,
-        })
-      } catch (err) {
-        // if updating the cache index fails, we ignore it and
-        // respond anyway
-      }
-      return this.respond(request.method, options, 'revalidated')
-    }
-
-    // if we got a modified response, create a new entry based on it
-    const newEntry = new CacheEntry({
-      request,
-      response,
-      options,
-    })
-
-    // respond with the new entry while writing it to the cache
-    return newEntry.store('updated')
-  }
-}
-
-module.exports = CacheEntry
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/errors.js
deleted file mode 100644
index 67a66573bebe6..0000000000000
--- a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/errors.js
+++ /dev/null
@@ -1,11 +0,0 @@
-class NotCachedError extends Error {
-  constructor (url) {
-    /* eslint-disable-next-line max-len */
-    super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`)
-    this.code = 'ENOTCACHED'
-  }
-}
-
-module.exports = {
-  NotCachedError,
-}
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/index.js
deleted file mode 100644
index 0de49d23fb933..0000000000000
--- a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/index.js
+++ /dev/null
@@ -1,49 +0,0 @@
-const { NotCachedError } = require('./errors.js')
-const CacheEntry = require('./entry.js')
-const remote = require('../remote.js')
-
-// do whatever is necessary to get a Response and return it
-const cacheFetch = async (request, options) => {
-  // try to find a cached entry that satisfies this request
-  const entry = await CacheEntry.find(request, options)
-  if (!entry) {
-    // no cached result, if the cache mode is 'only-if-cached' that's a failure
-    if (options.cache === 'only-if-cached') {
-      throw new NotCachedError(request.url)
-    }
-
-    // otherwise, we make a request, store it and return it
-    const response = await remote(request, options)
-    const newEntry = new CacheEntry({ request, response, options })
-    return newEntry.store('miss')
-  }
-
-  // we have a cached response that satisfies this request, however if the cache
-  // mode is 'no-cache' then we send the revalidation request no matter what
-  if (options.cache === 'no-cache') {
-    return entry.revalidate(request, options)
-  }
-
-  // if the cached entry is not stale, or if the cache mode is 'force-cache' or
-  // 'only-if-cached' we can respond with the cached entry. set the status
-  // based on the result of needsRevalidation and respond
-  const _needsRevalidation = entry.policy.needsRevalidation(request)
-  if (options.cache === 'force-cache' ||
-      options.cache === 'only-if-cached' ||
-      !_needsRevalidation) {
-    return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit')
-  }
-
-  // if we got here, the cache entry is stale so revalidate it
-  return entry.revalidate(request, options)
-}
-
-cacheFetch.invalidate = async (request, options) => {
-  if (!options.cachePath) {
-    return
-  }
-
-  return CacheEntry.invalidate(request, options)
-}
-
-module.exports = cacheFetch
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/key.js
deleted file mode 100644
index f7684d562b7fa..0000000000000
--- a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/key.js
+++ /dev/null
@@ -1,17 +0,0 @@
-const { URL, format } = require('url')
-
-// options passed to url.format() when generating a key
-const formatOptions = {
-  auth: false,
-  fragment: false,
-  search: true,
-  unicode: false,
-}
-
-// returns a string to be used as the cache key for the Request
-const cacheKey = (request) => {
-  const parsed = new URL(request.url)
-  return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}`
-}
-
-module.exports = cacheKey
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/policy.js
deleted file mode 100644
index ada3c8600dae9..0000000000000
--- a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/policy.js
+++ /dev/null
@@ -1,161 +0,0 @@
-const CacheSemantics = require('http-cache-semantics')
-const Negotiator = require('negotiator')
-const ssri = require('ssri')
-
-// options passed to http-cache-semantics constructor
-const policyOptions = {
-  shared: false,
-  ignoreCargoCult: true,
-}
-
-// a fake empty response, used when only testing the
-// request for storability
-const emptyResponse = { status: 200, headers: {} }
-
-// returns a plain object representation of the Request
-const requestObject = (request) => {
-  const _obj = {
-    method: request.method,
-    url: request.url,
-    headers: {},
-    compress: request.compress,
-  }
-
-  request.headers.forEach((value, key) => {
-    _obj.headers[key] = value
-  })
-
-  return _obj
-}
-
-// returns a plain object representation of the Response
-const responseObject = (response) => {
-  const _obj = {
-    status: response.status,
-    headers: {},
-  }
-
-  response.headers.forEach((value, key) => {
-    _obj.headers[key] = value
-  })
-
-  return _obj
-}
-
-class CachePolicy {
-  constructor ({ entry, request, response, options }) {
-    this.entry = entry
-    this.request = requestObject(request)
-    this.response = responseObject(response)
-    this.options = options
-    this.policy = new CacheSemantics(this.request, this.response, policyOptions)
-
-    if (this.entry) {
-      // if we have an entry, copy the timestamp to the _responseTime
-      // this is necessary because the CacheSemantics constructor forces
-      // the value to Date.now() which means a policy created from a
-      // cache entry is likely to always identify itself as stale
-      this.policy._responseTime = this.entry.metadata.time
-    }
-  }
-
-  // static method to quickly determine if a request alone is storable
-  static storable (request, options) {
-    // no cachePath means no caching
-    if (!options.cachePath) {
-      return false
-    }
-
-    // user explicitly asked not to cache
-    if (options.cache === 'no-store') {
-      return false
-    }
-
-    // we only cache GET and HEAD requests
-    if (!['GET', 'HEAD'].includes(request.method)) {
-      return false
-    }
-
-    // otherwise, let http-cache-semantics make the decision
-    // based on the request's headers
-    const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions)
-    return policy.storable()
-  }
-
-  // returns true if the policy satisfies the request
-  satisfies (request) {
-    const _req = requestObject(request)
-    if (this.request.headers.host !== _req.headers.host) {
-      return false
-    }
-
-    if (this.request.compress !== _req.compress) {
-      return false
-    }
-
-    const negotiatorA = new Negotiator(this.request)
-    const negotiatorB = new Negotiator(_req)
-
-    if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) {
-      return false
-    }
-
-    if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) {
-      return false
-    }
-
-    if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) {
-      return false
-    }
-
-    if (this.options.integrity) {
-      return ssri.parse(this.options.integrity).match(this.entry.integrity)
-    }
-
-    return true
-  }
-
-  // returns true if the request and response allow caching
-  storable () {
-    return this.policy.storable()
-  }
-
-  // NOTE: this is a hack to avoid parsing the cache-control
-  // header ourselves, it returns true if the response's
-  // cache-control contains must-revalidate
-  get mustRevalidate () {
-    return !!this.policy._rescc['must-revalidate']
-  }
-
-  // returns true if the cached response requires revalidation
-  // for the given request
-  needsRevalidation (request) {
-    const _req = requestObject(request)
-    // force method to GET because we only cache GETs
-    // but can serve a HEAD from a cached GET
-    _req.method = 'GET'
-    return !this.policy.satisfiesWithoutRevalidation(_req)
-  }
-
-  responseHeaders () {
-    return this.policy.responseHeaders()
-  }
-
-  // returns a new object containing the appropriate headers
-  // to send a revalidation request
-  revalidationHeaders (request) {
-    const _req = requestObject(request)
-    return this.policy.revalidationHeaders(_req)
-  }
-
-  // returns true if the request/response was revalidated
-  // successfully. returns false if a new response was received
-  revalidated (request, response) {
-    const _req = requestObject(request)
-    const _res = responseObject(response)
-    const policy = this.policy.revalidatedPolicy(_req, _res)
-    return !policy.modified
-  }
-}
-
-module.exports = CachePolicy
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/dns.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/dns.js
deleted file mode 100644
index 13102b57c4aa0..0000000000000
--- a/node_modules/sigstore/node_modules/make-fetch-happen/lib/dns.js
+++ /dev/null
@@ -1,49 +0,0 @@
-const LRUCache = require('lru-cache')
-const dns = require('dns')
-
-const defaultOptions = exports.defaultOptions = {
-  family: undefined,
-  hints: dns.ADDRCONFIG,
-  all: false,
-  verbatim: undefined,
-}
-
-const lookupCache = exports.lookupCache = new LRUCache({ max: 50 })
-
-// this is a factory so that each request can have its own opts (i.e. ttl)
-// while still sharing the cache across all requests
-exports.getLookup = (dnsOptions) => {
-  return (hostname, options, callback) => {
-    if (typeof options === 'function') {
-      callback = options
-      options = null
-    } else if (typeof options === 'number') {
-      options = { family: options }
-    }
-
-    options = { ...defaultOptions, ...options }
-
-    const key = JSON.stringify({
-      hostname,
-      family: options.family,
-      hints: options.hints,
-      all: options.all,
-      verbatim: options.verbatim,
-    })
-
-    if (lookupCache.has(key)) {
-      const [address, family] = lookupCache.get(key)
-      process.nextTick(callback, null, address, family)
-      return
-    }
-
-    dnsOptions.lookup(hostname, options, (err, address, family) => {
-      if (err) {
-        return callback(err)
-      }
-
-      lookupCache.set(key, [address, family], { ttl: dnsOptions.ttl })
-      return callback(null, address, family)
-    })
-  }
-}
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/fetch.js
deleted file mode 100644
index 233ba67e16550..0000000000000
--- a/node_modules/sigstore/node_modules/make-fetch-happen/lib/fetch.js
+++ /dev/null
@@ -1,118 +0,0 @@
-'use strict'
-
-const { FetchError, Request, isRedirect } = require('minipass-fetch')
-const url = require('url')
-
-const CachePolicy = require('./cache/policy.js')
-const cache = require('./cache/index.js')
-const remote = require('./remote.js')
-
-// given a Request, a Response and user options
-// return true if the response is a redirect that
-// can be followed. we throw errors that will result
-// in the fetch being rejected if the redirect is
-// possible but invalid for some reason
-const canFollowRedirect = (request, response, options) => {
-  if (!isRedirect(response.status)) {
-    return false
-  }
-
-  if (options.redirect === 'manual') {
-    return false
-  }
-
-  if (options.redirect === 'error') {
-    throw new FetchError(`redirect mode is set to error: ${request.url}`,
-      'no-redirect', { code: 'ENOREDIRECT' })
-  }
-
-  if (!response.headers.has('location')) {
-    throw new FetchError(`redirect location header missing for: ${request.url}`,
-      'no-location', { code: 'EINVALIDREDIRECT' })
-  }
-
-  if (request.counter >= request.follow) {
-    throw new FetchError(`maximum redirect reached at: ${request.url}`,
-      'max-redirect', { code: 'EMAXREDIRECT' })
-  }
-
-  return true
-}
-
-// given a Request, a Response, and the user's options return an object
-// with a new Request and a new options object that will be used for
-// following the redirect
-const getRedirect = (request, response, options) => {
-  const _opts = { ...options }
-  const location = response.headers.get('location')
-  const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url)
-  // Comment below is used under the following license:
-  /**
-   * @license
-   * Copyright (c) 2010-2012 Mikeal Rogers
-   * Licensed under the Apache License, Version 2.0 (the "License");
-   * you may not use this file except in compliance with the License.
-   * You may obtain a copy of the License at
-   * http://www.apache.org/licenses/LICENSE-2.0
-   * Unless required by applicable law or agreed to in writing,
-   * software distributed under the License is distributed on an "AS
-   * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
-   * express or implied. See the License for the specific language
-   * governing permissions and limitations under the License.
-   */
-
-  // Remove authorization if changing hostnames (but not if just
-  // changing ports or protocols).  This matches the behavior of request:
-  // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
-  if (new url.URL(request.url).hostname !== redirectUrl.hostname) {
-    request.headers.delete('authorization')
-    request.headers.delete('cookie')
-  }
-
-  // for POST request with 301/302 response, or any request with 303 response,
-  // use GET when following redirect
-  if (
-    response.status === 303 ||
-    (request.method === 'POST' && [301, 302].includes(response.status))
-  ) {
-    _opts.method = 'GET'
-    _opts.body = null
-    request.headers.delete('content-length')
-  }
-
-  _opts.headers = {}
-  request.headers.forEach((value, key) => {
-    _opts.headers[key] = value
-  })
-
-  _opts.counter = ++request.counter
-  const redirectReq = new Request(url.format(redirectUrl), _opts)
-  return {
-    request: redirectReq,
-    options: _opts,
-  }
-}
-
-const fetch = async (request, options) => {
-  const response = CachePolicy.storable(request, options)
-    ? await cache(request, options)
-    : await remote(request, options)
-
-  // if the request wasn't a GET or HEAD, and the response
-  // status is between 200 and 399 inclusive, invalidate the
-  // request url
-  if (!['GET', 'HEAD'].includes(request.method) &&
-      response.status >= 200 &&
-      response.status <= 399) {
-    await cache.invalidate(request, options)
-  }
-
-  if (!canFollowRedirect(request, response, options)) {
-    return response
-  }
-
-  const redirect = getRedirect(request, response, options)
-  return fetch(redirect.request, redirect.options)
-}
-
-module.exports = fetch
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/index.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/index.js
deleted file mode 100644
index 2f12e8e1b6113..0000000000000
--- a/node_modules/sigstore/node_modules/make-fetch-happen/lib/index.js
+++ /dev/null
@@ -1,41 +0,0 @@
-const { FetchError, Headers, Request, Response } = require('minipass-fetch')
-
-const configureOptions = require('./options.js')
-const fetch = require('./fetch.js')
-
-const makeFetchHappen = (url, opts) => {
-  const options = configureOptions(opts)
-
-  const request = new Request(url, options)
-  return fetch(request, options)
-}
-
-makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => {
-  if (typeof defaultUrl === 'object') {
-    defaultOptions = defaultUrl
-    defaultUrl = null
-  }
-
-  const defaultedFetch = (url, options = {}) => {
-    const finalUrl = url || defaultUrl
-    const finalOptions = {
-      ...defaultOptions,
-      ...options,
-      headers: {
-        ...defaultOptions.headers,
-        ...options.headers,
-      },
-    }
-    return wrappedFetch(finalUrl, finalOptions)
-  }
-
-  defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) =>
-    makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch)
-  return defaultedFetch
-}
-
-module.exports = makeFetchHappen
-module.exports.FetchError = FetchError
-module.exports.Headers = Headers
-module.exports.Request = Request
-module.exports.Response = Response
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/options.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/options.js
deleted file mode 100644
index f77511279f831..0000000000000
--- a/node_modules/sigstore/node_modules/make-fetch-happen/lib/options.js
+++ /dev/null
@@ -1,54 +0,0 @@
-const dns = require('dns')
-
-const conditionalHeaders = [
-  'if-modified-since',
-  'if-none-match',
-  'if-unmodified-since',
-  'if-match',
-  'if-range',
-]
-
-const configureOptions = (opts) => {
-  const { strictSSL, ...options } = { ...opts }
-  options.method = options.method ? options.method.toUpperCase() : 'GET'
-  options.rejectUnauthorized = strictSSL !== false
-
-  if (!options.retry) {
-    options.retry = { retries: 0 }
-  } else if (typeof options.retry === 'string') {
-    const retries = parseInt(options.retry, 10)
-    if (isFinite(retries)) {
-      options.retry = { retries }
-    } else {
-      options.retry = { retries: 0 }
-    }
-  } else if (typeof options.retry === 'number') {
-    options.retry = { retries: options.retry }
-  } else {
-    options.retry = { retries: 0, ...options.retry }
-  }
-
-  options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns }
-
-  options.cache = options.cache || 'default'
-  if (options.cache === 'default') {
-    const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => {
-      return conditionalHeaders.includes(name.toLowerCase())
-    })
-    if (hasConditionalHeader) {
-      options.cache = 'no-store'
-    }
-  }
-
-  options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || []
-
-  // cacheManager is deprecated, but if it's set and
-  // cachePath is not we should copy it to the new field
-  if (options.cacheManager && !options.cachePath) {
-    options.cachePath = options.cacheManager
-  }
-
-  return options
-}
-
-module.exports = configureOptions
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/pipeline.js
deleted file mode 100644
index b1d221b2d0ce3..0000000000000
--- a/node_modules/sigstore/node_modules/make-fetch-happen/lib/pipeline.js
+++ /dev/null
@@ -1,41 +0,0 @@
-'use strict'
-
-const MinipassPipeline = require('minipass-pipeline')
-
-class CachingMinipassPipeline extends MinipassPipeline {
-  #events = []
-  #data = new Map()
-
-  constructor (opts, ...streams) {
-    // CRITICAL: do NOT pass the streams to the call to super(), this will start
-    // the flow of data and potentially cause the events we need to catch to emit
-    // before we've finished our own setup. instead we call super() with no args,
-    // finish our setup, and then push the streams into ourselves to start the
-    // data flow
-    super()
-    this.#events = opts.events
-
-    /* istanbul ignore next - coverage disabled because this is pointless to test here */
-    if (streams.length) {
-      this.push(...streams)
-    }
-  }
-
-  on (event, handler) {
-    if (this.#events.includes(event) && this.#data.has(event)) {
-      return handler(...this.#data.get(event))
-    }
-
-    return super.on(event, handler)
-  }
-
-  emit (event, ...data) {
-    if (this.#events.includes(event)) {
-      this.#data.set(event, data)
-    }
-
-    return super.emit(event, ...data)
-  }
-}
-
-module.exports = CachingMinipassPipeline
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/remote.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/remote.js
deleted file mode 100644
index bdbcc79cad908..0000000000000
--- a/node_modules/sigstore/node_modules/make-fetch-happen/lib/remote.js
+++ /dev/null
@@ -1,121 +0,0 @@
-const { Minipass } = require('minipass')
-const fetch = require('minipass-fetch')
-const promiseRetry = require('promise-retry')
-const ssri = require('ssri')
-
-const CachingMinipassPipeline = require('./pipeline.js')
-const getAgent = require('./agent.js')
-const pkg = require('../package.json')
-
-const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
-
-const RETRY_ERRORS = [
-  'ECONNRESET', // remote socket closed on us
-  'ECONNREFUSED', // remote host refused to open connection
-  'EADDRINUSE', // failed to bind to a local port (proxy?)
-  'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW
-  'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive
-  // Known codes we do NOT retry on:
-  // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
-]
-
-const RETRY_TYPES = [
-  'request-timeout',
-]
-
-// make a request directly to the remote source,
-// retrying certain classes of errors as well as
-// following redirects (through the cache if necessary)
-// and verifying response integrity
-const remoteFetch = (request, options) => {
-  const agent = getAgent(request.url, options)
-  if (!request.headers.has('connection')) {
-    request.headers.set('connection', agent ? 'keep-alive' : 'close')
-  }
-
-  if (!request.headers.has('user-agent')) {
-    request.headers.set('user-agent', USER_AGENT)
-  }
-
-  // keep our own options since we're overriding the agent
-  // and the redirect mode
-  const _opts = {
-    ...options,
-    agent,
-    redirect: 'manual',
-  }
-
-  return promiseRetry(async (retryHandler, attemptNum) => {
-    const req = new fetch.Request(request, _opts)
-    try {
-      let res = await fetch(req, _opts)
-      if (_opts.integrity && res.status === 200) {
-        // we got a 200 response and the user has specified an expected
-        // integrity value, so wrap the response in an ssri stream to verify it
-        const integrityStream = ssri.integrityStream({
-          algorithms: _opts.algorithms,
-          integrity: _opts.integrity,
-          size: _opts.size,
-        })
-        const pipeline = new CachingMinipassPipeline({
-          events: ['integrity', 'size'],
-        }, res.body, integrityStream)
-        // we also propagate the integrity and size events out to the pipeline so we can use
-        // this new response body as an integrityEmitter for cacache
-        integrityStream.on('integrity', i => pipeline.emit('integrity', i))
-        integrityStream.on('size', s => pipeline.emit('size', s))
-        res = new fetch.Response(pipeline, res)
-        // set an explicit flag so we know if our response body will emit integrity and size
-        res.body.hasIntegrityEmitter = true
-      }
-
-      res.headers.set('x-fetch-attempts', attemptNum)
-
-      // do not retry POST requests, or requests with a streaming body
-      // do retry requests with a 408, 420, 429 or 500+ status in the response
-      const isStream = Minipass.isStream(req.body)
-      const isRetriable = req.method !== 'POST' &&
-          !isStream &&
-          ([408, 420, 429].includes(res.status) || res.status >= 500)
-
-      if (isRetriable) {
-        if (typeof options.onRetry === 'function') {
-          options.onRetry(res)
-        }
-
-        return retryHandler(res)
-      }
-
-      return res
-    } catch (err) {
-      const code = (err.code === 'EPROMISERETRY')
-        ? err.retried.code
-        : err.code
-
-      // err.retried will be the thing that was thrown from above
-      // if it's a response, we just got a bad status code and we
-      // can re-throw to allow the retry
-      const isRetryError = err.retried instanceof fetch.Response ||
-        (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type))
-
-      if (req.method === 'POST' || isRetryError) {
-        throw err
-      }
-
-      if (typeof options.onRetry === 'function') {
-        options.onRetry(err)
-      }
-
-      return retryHandler(err)
-    }
-  }, options.retry).catch((err) => {
-    // don't reject for http errors, just return them
-    if (err.status >= 400 && err.type !== 'system') {
-      return err
-    }
-
-    throw err
-  })
-}
-
-module.exports = remoteFetch
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/package.json b/node_modules/sigstore/node_modules/make-fetch-happen/package.json
deleted file mode 100644
index fd415dc9966fa..0000000000000
--- a/node_modules/sigstore/node_modules/make-fetch-happen/package.json
+++ /dev/null
@@ -1,78 +0,0 @@
-{
-  "name": "make-fetch-happen",
-  "version": "11.1.1",
-  "description": "Opinionated, caching, retrying fetch client",
-  "main": "lib/index.js",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "test": "tap",
-    "posttest": "npm run lint",
-    "eslint": "eslint",
-    "lint": "eslint \"**/*.js\"",
-    "lintfix": "npm run lint -- --fix",
-    "postlint": "template-oss-check",
-    "snap": "tap",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/make-fetch-happen.git"
-  },
-  "keywords": [
-    "http",
-    "request",
-    "fetch",
-    "mean girls",
-    "caching",
-    "cache",
-    "subresource integrity"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "dependencies": {
-    "agentkeepalive": "^4.2.1",
-    "cacache": "^17.0.0",
-    "http-cache-semantics": "^4.1.1",
-    "http-proxy-agent": "^5.0.0",
-    "https-proxy-agent": "^5.0.0",
-    "is-lambda": "^1.0.1",
-    "lru-cache": "^7.7.1",
-    "minipass": "^5.0.0",
-    "minipass-fetch": "^3.0.0",
-    "minipass-flush": "^1.0.5",
-    "minipass-pipeline": "^1.2.4",
-    "negotiator": "^0.6.3",
-    "promise-retry": "^2.0.1",
-    "socks-proxy-agent": "^7.0.0",
-    "ssri": "^10.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.14.1",
-    "nock": "^13.2.4",
-    "safe-buffer": "^5.2.1",
-    "standard-version": "^9.3.2",
-    "tap": "^16.0.0"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "tap": {
-    "color": 1,
-    "files": "test/*.js",
-    "check-coverage": true,
-    "timeout": 60,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.14.1",
-    "publish": "true"
-  }
-}
diff --git a/node_modules/sigstore/node_modules/minipass/LICENSE b/node_modules/sigstore/node_modules/minipass/LICENSE
deleted file mode 100644
index 97f8e32ed82e4..0000000000000
--- a/node_modules/sigstore/node_modules/minipass/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/minipass/index.js b/node_modules/sigstore/node_modules/minipass/index.js
deleted file mode 100644
index ed07c17acd97b..0000000000000
--- a/node_modules/sigstore/node_modules/minipass/index.js
+++ /dev/null
@@ -1,702 +0,0 @@
-'use strict'
-const proc =
-  typeof process === 'object' && process
-    ? process
-    : {
-        stdout: null,
-        stderr: null,
-      }
-const EE = require('events')
-const Stream = require('stream')
-const stringdecoder = require('string_decoder')
-const SD = stringdecoder.StringDecoder
-
-const EOF = Symbol('EOF')
-const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
-const EMITTED_END = Symbol('emittedEnd')
-const EMITTING_END = Symbol('emittingEnd')
-const EMITTED_ERROR = Symbol('emittedError')
-const CLOSED = Symbol('closed')
-const READ = Symbol('read')
-const FLUSH = Symbol('flush')
-const FLUSHCHUNK = Symbol('flushChunk')
-const ENCODING = Symbol('encoding')
-const DECODER = Symbol('decoder')
-const FLOWING = Symbol('flowing')
-const PAUSED = Symbol('paused')
-const RESUME = Symbol('resume')
-const BUFFER = Symbol('buffer')
-const PIPES = Symbol('pipes')
-const BUFFERLENGTH = Symbol('bufferLength')
-const BUFFERPUSH = Symbol('bufferPush')
-const BUFFERSHIFT = Symbol('bufferShift')
-const OBJECTMODE = Symbol('objectMode')
-// internal event when stream is destroyed
-const DESTROYED = Symbol('destroyed')
-// internal event when stream has an error
-const ERROR = Symbol('error')
-const EMITDATA = Symbol('emitData')
-const EMITEND = Symbol('emitEnd')
-const EMITEND2 = Symbol('emitEnd2')
-const ASYNC = Symbol('async')
-const ABORT = Symbol('abort')
-const ABORTED = Symbol('aborted')
-const SIGNAL = Symbol('signal')
-
-const defer = fn => Promise.resolve().then(fn)
-
-// TODO remove when Node v8 support drops
-const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
-const ASYNCITERATOR =
-  (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented')
-const ITERATOR =
-  (doIter && Symbol.iterator) || Symbol('iterator not implemented')
-
-// events that mean 'the stream is over'
-// these are treated specially, and re-emitted
-// if they are listened for after emitting.
-const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish'
-
-const isArrayBuffer = b =>
-  b instanceof ArrayBuffer ||
-  (typeof b === 'object' &&
-    b.constructor &&
-    b.constructor.name === 'ArrayBuffer' &&
-    b.byteLength >= 0)
-
-const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
-
-class Pipe {
-  constructor(src, dest, opts) {
-    this.src = src
-    this.dest = dest
-    this.opts = opts
-    this.ondrain = () => src[RESUME]()
-    dest.on('drain', this.ondrain)
-  }
-  unpipe() {
-    this.dest.removeListener('drain', this.ondrain)
-  }
-  // istanbul ignore next - only here for the prototype
-  proxyErrors() {}
-  end() {
-    this.unpipe()
-    if (this.opts.end) this.dest.end()
-  }
-}
-
-class PipeProxyErrors extends Pipe {
-  unpipe() {
-    this.src.removeListener('error', this.proxyErrors)
-    super.unpipe()
-  }
-  constructor(src, dest, opts) {
-    super(src, dest, opts)
-    this.proxyErrors = er => dest.emit('error', er)
-    src.on('error', this.proxyErrors)
-  }
-}
-
-class Minipass extends Stream {
-  constructor(options) {
-    super()
-    this[FLOWING] = false
-    // whether we're explicitly paused
-    this[PAUSED] = false
-    this[PIPES] = []
-    this[BUFFER] = []
-    this[OBJECTMODE] = (options && options.objectMode) || false
-    if (this[OBJECTMODE]) this[ENCODING] = null
-    else this[ENCODING] = (options && options.encoding) || null
-    if (this[ENCODING] === 'buffer') this[ENCODING] = null
-    this[ASYNC] = (options && !!options.async) || false
-    this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
-    this[EOF] = false
-    this[EMITTED_END] = false
-    this[EMITTING_END] = false
-    this[CLOSED] = false
-    this[EMITTED_ERROR] = null
-    this.writable = true
-    this.readable = true
-    this[BUFFERLENGTH] = 0
-    this[DESTROYED] = false
-    if (options && options.debugExposeBuffer === true) {
-      Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] })
-    }
-    if (options && options.debugExposePipes === true) {
-      Object.defineProperty(this, 'pipes', { get: () => this[PIPES] })
-    }
-    this[SIGNAL] = options && options.signal
-    this[ABORTED] = false
-    if (this[SIGNAL]) {
-      this[SIGNAL].addEventListener('abort', () => this[ABORT]())
-      if (this[SIGNAL].aborted) {
-        this[ABORT]()
-      }
-    }
-  }
-
-  get bufferLength() {
-    return this[BUFFERLENGTH]
-  }
-
-  get encoding() {
-    return this[ENCODING]
-  }
-  set encoding(enc) {
-    if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode')
-
-    if (
-      this[ENCODING] &&
-      enc !== this[ENCODING] &&
-      ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH])
-    )
-      throw new Error('cannot change encoding')
-
-    if (this[ENCODING] !== enc) {
-      this[DECODER] = enc ? new SD(enc) : null
-      if (this[BUFFER].length)
-        this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk))
-    }
-
-    this[ENCODING] = enc
-  }
-
-  setEncoding(enc) {
-    this.encoding = enc
-  }
-
-  get objectMode() {
-    return this[OBJECTMODE]
-  }
-  set objectMode(om) {
-    this[OBJECTMODE] = this[OBJECTMODE] || !!om
-  }
-
-  get ['async']() {
-    return this[ASYNC]
-  }
-  set ['async'](a) {
-    this[ASYNC] = this[ASYNC] || !!a
-  }
-
-  // drop everything and get out of the flow completely
-  [ABORT]() {
-    this[ABORTED] = true
-    this.emit('abort', this[SIGNAL].reason)
-    this.destroy(this[SIGNAL].reason)
-  }
-
-  get aborted() {
-    return this[ABORTED]
-  }
-  set aborted(_) {}
-
-  write(chunk, encoding, cb) {
-    if (this[ABORTED]) return false
-    if (this[EOF]) throw new Error('write after end')
-
-    if (this[DESTROYED]) {
-      this.emit(
-        'error',
-        Object.assign(
-          new Error('Cannot call write after a stream was destroyed'),
-          { code: 'ERR_STREAM_DESTROYED' }
-        )
-      )
-      return true
-    }
-
-    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
-
-    if (!encoding) encoding = 'utf8'
-
-    const fn = this[ASYNC] ? defer : f => f()
-
-    // convert array buffers and typed array views into buffers
-    // at some point in the future, we may want to do the opposite!
-    // leave strings and buffers as-is
-    // anything else switches us into object mode
-    if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
-      if (isArrayBufferView(chunk))
-        chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
-      else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk)
-      else if (typeof chunk !== 'string')
-        // use the setter so we throw if we have encoding set
-        this.objectMode = true
-    }
-
-    // handle object mode up front, since it's simpler
-    // this yields better performance, fewer checks later.
-    if (this[OBJECTMODE]) {
-      /* istanbul ignore if - maybe impossible? */
-      if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
-
-      if (this.flowing) this.emit('data', chunk)
-      else this[BUFFERPUSH](chunk)
-
-      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
-
-      if (cb) fn(cb)
-
-      return this.flowing
-    }
-
-    // at this point the chunk is a buffer or string
-    // don't buffer it up or send it to the decoder
-    if (!chunk.length) {
-      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
-      if (cb) fn(cb)
-      return this.flowing
-    }
-
-    // fast-path writing strings of same encoding to a stream with
-    // an empty buffer, skipping the buffer/decoder dance
-    if (
-      typeof chunk === 'string' &&
-      // unless it is a string already ready for us to use
-      !(encoding === this[ENCODING] && !this[DECODER].lastNeed)
-    ) {
-      chunk = Buffer.from(chunk, encoding)
-    }
-
-    if (Buffer.isBuffer(chunk) && this[ENCODING])
-      chunk = this[DECODER].write(chunk)
-
-    // Note: flushing CAN potentially switch us into not-flowing mode
-    if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
-
-    if (this.flowing) this.emit('data', chunk)
-    else this[BUFFERPUSH](chunk)
-
-    if (this[BUFFERLENGTH] !== 0) this.emit('readable')
-
-    if (cb) fn(cb)
-
-    return this.flowing
-  }
-
-  read(n) {
-    if (this[DESTROYED]) return null
-
-    if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
-      this[MAYBE_EMIT_END]()
-      return null
-    }
-
-    if (this[OBJECTMODE]) n = null
-
-    if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
-      if (this.encoding) this[BUFFER] = [this[BUFFER].join('')]
-      else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])]
-    }
-
-    const ret = this[READ](n || null, this[BUFFER][0])
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [READ](n, chunk) {
-    if (n === chunk.length || n === null) this[BUFFERSHIFT]()
-    else {
-      this[BUFFER][0] = chunk.slice(n)
-      chunk = chunk.slice(0, n)
-      this[BUFFERLENGTH] -= n
-    }
-
-    this.emit('data', chunk)
-
-    if (!this[BUFFER].length && !this[EOF]) this.emit('drain')
-
-    return chunk
-  }
-
-  end(chunk, encoding, cb) {
-    if (typeof chunk === 'function') (cb = chunk), (chunk = null)
-    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
-    if (chunk) this.write(chunk, encoding)
-    if (cb) this.once('end', cb)
-    this[EOF] = true
-    this.writable = false
-
-    // if we haven't written anything, then go ahead and emit,
-    // even if we're not reading.
-    // we'll re-emit if a new 'end' listener is added anyway.
-    // This makes MP more suitable to write-only use cases.
-    if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]()
-    return this
-  }
-
-  // don't let the internal resume be overwritten
-  [RESUME]() {
-    if (this[DESTROYED]) return
-
-    this[PAUSED] = false
-    this[FLOWING] = true
-    this.emit('resume')
-    if (this[BUFFER].length) this[FLUSH]()
-    else if (this[EOF]) this[MAYBE_EMIT_END]()
-    else this.emit('drain')
-  }
-
-  resume() {
-    return this[RESUME]()
-  }
-
-  pause() {
-    this[FLOWING] = false
-    this[PAUSED] = true
-  }
-
-  get destroyed() {
-    return this[DESTROYED]
-  }
-
-  get flowing() {
-    return this[FLOWING]
-  }
-
-  get paused() {
-    return this[PAUSED]
-  }
-
-  [BUFFERPUSH](chunk) {
-    if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1
-    else this[BUFFERLENGTH] += chunk.length
-    this[BUFFER].push(chunk)
-  }
-
-  [BUFFERSHIFT]() {
-    if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1
-    else this[BUFFERLENGTH] -= this[BUFFER][0].length
-    return this[BUFFER].shift()
-  }
-
-  [FLUSH](noDrain) {
-    do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length)
-
-    if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain')
-  }
-
-  [FLUSHCHUNK](chunk) {
-    this.emit('data', chunk)
-    return this.flowing
-  }
-
-  pipe(dest, opts) {
-    if (this[DESTROYED]) return
-
-    const ended = this[EMITTED_END]
-    opts = opts || {}
-    if (dest === proc.stdout || dest === proc.stderr) opts.end = false
-    else opts.end = opts.end !== false
-    opts.proxyErrors = !!opts.proxyErrors
-
-    // piping an ended stream ends immediately
-    if (ended) {
-      if (opts.end) dest.end()
-    } else {
-      this[PIPES].push(
-        !opts.proxyErrors
-          ? new Pipe(this, dest, opts)
-          : new PipeProxyErrors(this, dest, opts)
-      )
-      if (this[ASYNC]) defer(() => this[RESUME]())
-      else this[RESUME]()
-    }
-
-    return dest
-  }
-
-  unpipe(dest) {
-    const p = this[PIPES].find(p => p.dest === dest)
-    if (p) {
-      this[PIPES].splice(this[PIPES].indexOf(p), 1)
-      p.unpipe()
-    }
-  }
-
-  addListener(ev, fn) {
-    return this.on(ev, fn)
-  }
-
-  on(ev, fn) {
-    const ret = super.on(ev, fn)
-    if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]()
-    else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
-      super.emit('readable')
-    else if (isEndish(ev) && this[EMITTED_END]) {
-      super.emit(ev)
-      this.removeAllListeners(ev)
-    } else if (ev === 'error' && this[EMITTED_ERROR]) {
-      if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR]))
-      else fn.call(this, this[EMITTED_ERROR])
-    }
-    return ret
-  }
-
-  get emittedEnd() {
-    return this[EMITTED_END]
-  }
-
-  [MAYBE_EMIT_END]() {
-    if (
-      !this[EMITTING_END] &&
-      !this[EMITTED_END] &&
-      !this[DESTROYED] &&
-      this[BUFFER].length === 0 &&
-      this[EOF]
-    ) {
-      this[EMITTING_END] = true
-      this.emit('end')
-      this.emit('prefinish')
-      this.emit('finish')
-      if (this[CLOSED]) this.emit('close')
-      this[EMITTING_END] = false
-    }
-  }
-
-  emit(ev, data, ...extra) {
-    // error and close are only events allowed after calling destroy()
-    if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
-      return
-    else if (ev === 'data') {
-      return !this[OBJECTMODE] && !data
-        ? false
-        : this[ASYNC]
-        ? defer(() => this[EMITDATA](data))
-        : this[EMITDATA](data)
-    } else if (ev === 'end') {
-      return this[EMITEND]()
-    } else if (ev === 'close') {
-      this[CLOSED] = true
-      // don't emit close before 'end' and 'finish'
-      if (!this[EMITTED_END] && !this[DESTROYED]) return
-      const ret = super.emit('close')
-      this.removeAllListeners('close')
-      return ret
-    } else if (ev === 'error') {
-      this[EMITTED_ERROR] = data
-      super.emit(ERROR, data)
-      const ret =
-        !this[SIGNAL] || this.listeners('error').length
-          ? super.emit('error', data)
-          : false
-      this[MAYBE_EMIT_END]()
-      return ret
-    } else if (ev === 'resume') {
-      const ret = super.emit('resume')
-      this[MAYBE_EMIT_END]()
-      return ret
-    } else if (ev === 'finish' || ev === 'prefinish') {
-      const ret = super.emit(ev)
-      this.removeAllListeners(ev)
-      return ret
-    }
-
-    // Some other unknown event
-    const ret = super.emit(ev, data, ...extra)
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [EMITDATA](data) {
-    for (const p of this[PIPES]) {
-      if (p.dest.write(data) === false) this.pause()
-    }
-    const ret = super.emit('data', data)
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [EMITEND]() {
-    if (this[EMITTED_END]) return
-
-    this[EMITTED_END] = true
-    this.readable = false
-    if (this[ASYNC]) defer(() => this[EMITEND2]())
-    else this[EMITEND2]()
-  }
-
-  [EMITEND2]() {
-    if (this[DECODER]) {
-      const data = this[DECODER].end()
-      if (data) {
-        for (const p of this[PIPES]) {
-          p.dest.write(data)
-        }
-        super.emit('data', data)
-      }
-    }
-
-    for (const p of this[PIPES]) {
-      p.end()
-    }
-    const ret = super.emit('end')
-    this.removeAllListeners('end')
-    return ret
-  }
-
-  // const all = await stream.collect()
-  collect() {
-    const buf = []
-    if (!this[OBJECTMODE]) buf.dataLength = 0
-    // set the promise first, in case an error is raised
-    // by triggering the flow here.
-    const p = this.promise()
-    this.on('data', c => {
-      buf.push(c)
-      if (!this[OBJECTMODE]) buf.dataLength += c.length
-    })
-    return p.then(() => buf)
-  }
-
-  // const data = await stream.concat()
-  concat() {
-    return this[OBJECTMODE]
-      ? Promise.reject(new Error('cannot concat in objectMode'))
-      : this.collect().then(buf =>
-          this[OBJECTMODE]
-            ? Promise.reject(new Error('cannot concat in objectMode'))
-            : this[ENCODING]
-            ? buf.join('')
-            : Buffer.concat(buf, buf.dataLength)
-        )
-  }
-
-  // stream.promise().then(() => done, er => emitted error)
-  promise() {
-    return new Promise((resolve, reject) => {
-      this.on(DESTROYED, () => reject(new Error('stream destroyed')))
-      this.on('error', er => reject(er))
-      this.on('end', () => resolve())
-    })
-  }
-
-  // for await (let chunk of stream)
-  [ASYNCITERATOR]() {
-    let stopped = false
-    const stop = () => {
-      this.pause()
-      stopped = true
-      return Promise.resolve({ done: true })
-    }
-    const next = () => {
-      if (stopped) return stop()
-      const res = this.read()
-      if (res !== null) return Promise.resolve({ done: false, value: res })
-
-      if (this[EOF]) return stop()
-
-      let resolve = null
-      let reject = null
-      const onerr = er => {
-        this.removeListener('data', ondata)
-        this.removeListener('end', onend)
-        this.removeListener(DESTROYED, ondestroy)
-        stop()
-        reject(er)
-      }
-      const ondata = value => {
-        this.removeListener('error', onerr)
-        this.removeListener('end', onend)
-        this.removeListener(DESTROYED, ondestroy)
-        this.pause()
-        resolve({ value: value, done: !!this[EOF] })
-      }
-      const onend = () => {
-        this.removeListener('error', onerr)
-        this.removeListener('data', ondata)
-        this.removeListener(DESTROYED, ondestroy)
-        stop()
-        resolve({ done: true })
-      }
-      const ondestroy = () => onerr(new Error('stream destroyed'))
-      return new Promise((res, rej) => {
-        reject = rej
-        resolve = res
-        this.once(DESTROYED, ondestroy)
-        this.once('error', onerr)
-        this.once('end', onend)
-        this.once('data', ondata)
-      })
-    }
-
-    return {
-      next,
-      throw: stop,
-      return: stop,
-      [ASYNCITERATOR]() {
-        return this
-      },
-    }
-  }
-
-  // for (let chunk of stream)
-  [ITERATOR]() {
-    let stopped = false
-    const stop = () => {
-      this.pause()
-      this.removeListener(ERROR, stop)
-      this.removeListener(DESTROYED, stop)
-      this.removeListener('end', stop)
-      stopped = true
-      return { done: true }
-    }
-
-    const next = () => {
-      if (stopped) return stop()
-      const value = this.read()
-      return value === null ? stop() : { value }
-    }
-    this.once('end', stop)
-    this.once(ERROR, stop)
-    this.once(DESTROYED, stop)
-
-    return {
-      next,
-      throw: stop,
-      return: stop,
-      [ITERATOR]() {
-        return this
-      },
-    }
-  }
-
-  destroy(er) {
-    if (this[DESTROYED]) {
-      if (er) this.emit('error', er)
-      else this.emit(DESTROYED)
-      return this
-    }
-
-    this[DESTROYED] = true
-
-    // throw away all buffered data, it's never coming out
-    this[BUFFER].length = 0
-    this[BUFFERLENGTH] = 0
-
-    if (typeof this.close === 'function' && !this[CLOSED]) this.close()
-
-    if (er) this.emit('error', er)
-    // if no error to emit, still reject pending promises
-    else this.emit(DESTROYED)
-
-    return this
-  }
-
-  static isStream(s) {
-    return (
-      !!s &&
-      (s instanceof Minipass ||
-        s instanceof Stream ||
-        (s instanceof EE &&
-          // readable
-          (typeof s.pipe === 'function' ||
-            // writable
-            (typeof s.write === 'function' && typeof s.end === 'function'))))
-    )
-  }
-}
-
-exports.Minipass = Minipass
diff --git a/node_modules/sigstore/node_modules/minipass/index.mjs b/node_modules/sigstore/node_modules/minipass/index.mjs
deleted file mode 100644
index 6ef6cd8cf0703..0000000000000
--- a/node_modules/sigstore/node_modules/minipass/index.mjs
+++ /dev/null
@@ -1,702 +0,0 @@
-'use strict'
-const proc =
-  typeof process === 'object' && process
-    ? process
-    : {
-        stdout: null,
-        stderr: null,
-      }
-import EE from 'events'
-import Stream from 'stream'
-import stringdecoder from 'string_decoder'
-const SD = stringdecoder.StringDecoder
-
-const EOF = Symbol('EOF')
-const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
-const EMITTED_END = Symbol('emittedEnd')
-const EMITTING_END = Symbol('emittingEnd')
-const EMITTED_ERROR = Symbol('emittedError')
-const CLOSED = Symbol('closed')
-const READ = Symbol('read')
-const FLUSH = Symbol('flush')
-const FLUSHCHUNK = Symbol('flushChunk')
-const ENCODING = Symbol('encoding')
-const DECODER = Symbol('decoder')
-const FLOWING = Symbol('flowing')
-const PAUSED = Symbol('paused')
-const RESUME = Symbol('resume')
-const BUFFER = Symbol('buffer')
-const PIPES = Symbol('pipes')
-const BUFFERLENGTH = Symbol('bufferLength')
-const BUFFERPUSH = Symbol('bufferPush')
-const BUFFERSHIFT = Symbol('bufferShift')
-const OBJECTMODE = Symbol('objectMode')
-// internal event when stream is destroyed
-const DESTROYED = Symbol('destroyed')
-// internal event when stream has an error
-const ERROR = Symbol('error')
-const EMITDATA = Symbol('emitData')
-const EMITEND = Symbol('emitEnd')
-const EMITEND2 = Symbol('emitEnd2')
-const ASYNC = Symbol('async')
-const ABORT = Symbol('abort')
-const ABORTED = Symbol('aborted')
-const SIGNAL = Symbol('signal')
-
-const defer = fn => Promise.resolve().then(fn)
-
-// TODO remove when Node v8 support drops
-const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
-const ASYNCITERATOR =
-  (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented')
-const ITERATOR =
-  (doIter && Symbol.iterator) || Symbol('iterator not implemented')
-
-// events that mean 'the stream is over'
-// these are treated specially, and re-emitted
-// if they are listened for after emitting.
-const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish'
-
-const isArrayBuffer = b =>
-  b instanceof ArrayBuffer ||
-  (typeof b === 'object' &&
-    b.constructor &&
-    b.constructor.name === 'ArrayBuffer' &&
-    b.byteLength >= 0)
-
-const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
-
-class Pipe {
-  constructor(src, dest, opts) {
-    this.src = src
-    this.dest = dest
-    this.opts = opts
-    this.ondrain = () => src[RESUME]()
-    dest.on('drain', this.ondrain)
-  }
-  unpipe() {
-    this.dest.removeListener('drain', this.ondrain)
-  }
-  // istanbul ignore next - only here for the prototype
-  proxyErrors() {}
-  end() {
-    this.unpipe()
-    if (this.opts.end) this.dest.end()
-  }
-}
-
-class PipeProxyErrors extends Pipe {
-  unpipe() {
-    this.src.removeListener('error', this.proxyErrors)
-    super.unpipe()
-  }
-  constructor(src, dest, opts) {
-    super(src, dest, opts)
-    this.proxyErrors = er => dest.emit('error', er)
-    src.on('error', this.proxyErrors)
-  }
-}
-
-export class Minipass extends Stream {
-  constructor(options) {
-    super()
-    this[FLOWING] = false
-    // whether we're explicitly paused
-    this[PAUSED] = false
-    this[PIPES] = []
-    this[BUFFER] = []
-    this[OBJECTMODE] = (options && options.objectMode) || false
-    if (this[OBJECTMODE]) this[ENCODING] = null
-    else this[ENCODING] = (options && options.encoding) || null
-    if (this[ENCODING] === 'buffer') this[ENCODING] = null
-    this[ASYNC] = (options && !!options.async) || false
-    this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
-    this[EOF] = false
-    this[EMITTED_END] = false
-    this[EMITTING_END] = false
-    this[CLOSED] = false
-    this[EMITTED_ERROR] = null
-    this.writable = true
-    this.readable = true
-    this[BUFFERLENGTH] = 0
-    this[DESTROYED] = false
-    if (options && options.debugExposeBuffer === true) {
-      Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] })
-    }
-    if (options && options.debugExposePipes === true) {
-      Object.defineProperty(this, 'pipes', { get: () => this[PIPES] })
-    }
-    this[SIGNAL] = options && options.signal
-    this[ABORTED] = false
-    if (this[SIGNAL]) {
-      this[SIGNAL].addEventListener('abort', () => this[ABORT]())
-      if (this[SIGNAL].aborted) {
-        this[ABORT]()
-      }
-    }
-  }
-
-  get bufferLength() {
-    return this[BUFFERLENGTH]
-  }
-
-  get encoding() {
-    return this[ENCODING]
-  }
-  set encoding(enc) {
-    if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode')
-
-    if (
-      this[ENCODING] &&
-      enc !== this[ENCODING] &&
-      ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH])
-    )
-      throw new Error('cannot change encoding')
-
-    if (this[ENCODING] !== enc) {
-      this[DECODER] = enc ? new SD(enc) : null
-      if (this[BUFFER].length)
-        this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk))
-    }
-
-    this[ENCODING] = enc
-  }
-
-  setEncoding(enc) {
-    this.encoding = enc
-  }
-
-  get objectMode() {
-    return this[OBJECTMODE]
-  }
-  set objectMode(om) {
-    this[OBJECTMODE] = this[OBJECTMODE] || !!om
-  }
-
-  get ['async']() {
-    return this[ASYNC]
-  }
-  set ['async'](a) {
-    this[ASYNC] = this[ASYNC] || !!a
-  }
-
-  // drop everything and get out of the flow completely
-  [ABORT]() {
-    this[ABORTED] = true
-    this.emit('abort', this[SIGNAL].reason)
-    this.destroy(this[SIGNAL].reason)
-  }
-
-  get aborted() {
-    return this[ABORTED]
-  }
-  set aborted(_) {}
-
-  write(chunk, encoding, cb) {
-    if (this[ABORTED]) return false
-    if (this[EOF]) throw new Error('write after end')
-
-    if (this[DESTROYED]) {
-      this.emit(
-        'error',
-        Object.assign(
-          new Error('Cannot call write after a stream was destroyed'),
-          { code: 'ERR_STREAM_DESTROYED' }
-        )
-      )
-      return true
-    }
-
-    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
-
-    if (!encoding) encoding = 'utf8'
-
-    const fn = this[ASYNC] ? defer : f => f()
-
-    // convert array buffers and typed array views into buffers
-    // at some point in the future, we may want to do the opposite!
-    // leave strings and buffers as-is
-    // anything else switches us into object mode
-    if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
-      if (isArrayBufferView(chunk))
-        chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
-      else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk)
-      else if (typeof chunk !== 'string')
-        // use the setter so we throw if we have encoding set
-        this.objectMode = true
-    }
-
-    // handle object mode up front, since it's simpler
-    // this yields better performance, fewer checks later.
-    if (this[OBJECTMODE]) {
-      /* istanbul ignore if - maybe impossible? */
-      if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
-
-      if (this.flowing) this.emit('data', chunk)
-      else this[BUFFERPUSH](chunk)
-
-      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
-
-      if (cb) fn(cb)
-
-      return this.flowing
-    }
-
-    // at this point the chunk is a buffer or string
-    // don't buffer it up or send it to the decoder
-    if (!chunk.length) {
-      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
-      if (cb) fn(cb)
-      return this.flowing
-    }
-
-    // fast-path writing strings of same encoding to a stream with
-    // an empty buffer, skipping the buffer/decoder dance
-    if (
-      typeof chunk === 'string' &&
-      // unless it is a string already ready for us to use
-      !(encoding === this[ENCODING] && !this[DECODER].lastNeed)
-    ) {
-      chunk = Buffer.from(chunk, encoding)
-    }
-
-    if (Buffer.isBuffer(chunk) && this[ENCODING])
-      chunk = this[DECODER].write(chunk)
-
-    // Note: flushing CAN potentially switch us into not-flowing mode
-    if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
-
-    if (this.flowing) this.emit('data', chunk)
-    else this[BUFFERPUSH](chunk)
-
-    if (this[BUFFERLENGTH] !== 0) this.emit('readable')
-
-    if (cb) fn(cb)
-
-    return this.flowing
-  }
-
-  read(n) {
-    if (this[DESTROYED]) return null
-
-    if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
-      this[MAYBE_EMIT_END]()
-      return null
-    }
-
-    if (this[OBJECTMODE]) n = null
-
-    if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
-      if (this.encoding) this[BUFFER] = [this[BUFFER].join('')]
-      else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])]
-    }
-
-    const ret = this[READ](n || null, this[BUFFER][0])
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [READ](n, chunk) {
-    if (n === chunk.length || n === null) this[BUFFERSHIFT]()
-    else {
-      this[BUFFER][0] = chunk.slice(n)
-      chunk = chunk.slice(0, n)
-      this[BUFFERLENGTH] -= n
-    }
-
-    this.emit('data', chunk)
-
-    if (!this[BUFFER].length && !this[EOF]) this.emit('drain')
-
-    return chunk
-  }
-
-  end(chunk, encoding, cb) {
-    if (typeof chunk === 'function') (cb = chunk), (chunk = null)
-    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
-    if (chunk) this.write(chunk, encoding)
-    if (cb) this.once('end', cb)
-    this[EOF] = true
-    this.writable = false
-
-    // if we haven't written anything, then go ahead and emit,
-    // even if we're not reading.
-    // we'll re-emit if a new 'end' listener is added anyway.
-    // This makes MP more suitable to write-only use cases.
-    if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]()
-    return this
-  }
-
-  // don't let the internal resume be overwritten
-  [RESUME]() {
-    if (this[DESTROYED]) return
-
-    this[PAUSED] = false
-    this[FLOWING] = true
-    this.emit('resume')
-    if (this[BUFFER].length) this[FLUSH]()
-    else if (this[EOF]) this[MAYBE_EMIT_END]()
-    else this.emit('drain')
-  }
-
-  resume() {
-    return this[RESUME]()
-  }
-
-  pause() {
-    this[FLOWING] = false
-    this[PAUSED] = true
-  }
-
-  get destroyed() {
-    return this[DESTROYED]
-  }
-
-  get flowing() {
-    return this[FLOWING]
-  }
-
-  get paused() {
-    return this[PAUSED]
-  }
-
-  [BUFFERPUSH](chunk) {
-    if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1
-    else this[BUFFERLENGTH] += chunk.length
-    this[BUFFER].push(chunk)
-  }
-
-  [BUFFERSHIFT]() {
-    if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1
-    else this[BUFFERLENGTH] -= this[BUFFER][0].length
-    return this[BUFFER].shift()
-  }
-
-  [FLUSH](noDrain) {
-    do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length)
-
-    if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain')
-  }
-
-  [FLUSHCHUNK](chunk) {
-    this.emit('data', chunk)
-    return this.flowing
-  }
-
-  pipe(dest, opts) {
-    if (this[DESTROYED]) return
-
-    const ended = this[EMITTED_END]
-    opts = opts || {}
-    if (dest === proc.stdout || dest === proc.stderr) opts.end = false
-    else opts.end = opts.end !== false
-    opts.proxyErrors = !!opts.proxyErrors
-
-    // piping an ended stream ends immediately
-    if (ended) {
-      if (opts.end) dest.end()
-    } else {
-      this[PIPES].push(
-        !opts.proxyErrors
-          ? new Pipe(this, dest, opts)
-          : new PipeProxyErrors(this, dest, opts)
-      )
-      if (this[ASYNC]) defer(() => this[RESUME]())
-      else this[RESUME]()
-    }
-
-    return dest
-  }
-
-  unpipe(dest) {
-    const p = this[PIPES].find(p => p.dest === dest)
-    if (p) {
-      this[PIPES].splice(this[PIPES].indexOf(p), 1)
-      p.unpipe()
-    }
-  }
-
-  addListener(ev, fn) {
-    return this.on(ev, fn)
-  }
-
-  on(ev, fn) {
-    const ret = super.on(ev, fn)
-    if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]()
-    else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
-      super.emit('readable')
-    else if (isEndish(ev) && this[EMITTED_END]) {
-      super.emit(ev)
-      this.removeAllListeners(ev)
-    } else if (ev === 'error' && this[EMITTED_ERROR]) {
-      if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR]))
-      else fn.call(this, this[EMITTED_ERROR])
-    }
-    return ret
-  }
-
-  get emittedEnd() {
-    return this[EMITTED_END]
-  }
-
-  [MAYBE_EMIT_END]() {
-    if (
-      !this[EMITTING_END] &&
-      !this[EMITTED_END] &&
-      !this[DESTROYED] &&
-      this[BUFFER].length === 0 &&
-      this[EOF]
-    ) {
-      this[EMITTING_END] = true
-      this.emit('end')
-      this.emit('prefinish')
-      this.emit('finish')
-      if (this[CLOSED]) this.emit('close')
-      this[EMITTING_END] = false
-    }
-  }
-
-  emit(ev, data, ...extra) {
-    // error and close are only events allowed after calling destroy()
-    if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
-      return
-    else if (ev === 'data') {
-      return !this[OBJECTMODE] && !data
-        ? false
-        : this[ASYNC]
-        ? defer(() => this[EMITDATA](data))
-        : this[EMITDATA](data)
-    } else if (ev === 'end') {
-      return this[EMITEND]()
-    } else if (ev === 'close') {
-      this[CLOSED] = true
-      // don't emit close before 'end' and 'finish'
-      if (!this[EMITTED_END] && !this[DESTROYED]) return
-      const ret = super.emit('close')
-      this.removeAllListeners('close')
-      return ret
-    } else if (ev === 'error') {
-      this[EMITTED_ERROR] = data
-      super.emit(ERROR, data)
-      const ret =
-        !this[SIGNAL] || this.listeners('error').length
-          ? super.emit('error', data)
-          : false
-      this[MAYBE_EMIT_END]()
-      return ret
-    } else if (ev === 'resume') {
-      const ret = super.emit('resume')
-      this[MAYBE_EMIT_END]()
-      return ret
-    } else if (ev === 'finish' || ev === 'prefinish') {
-      const ret = super.emit(ev)
-      this.removeAllListeners(ev)
-      return ret
-    }
-
-    // Some other unknown event
-    const ret = super.emit(ev, data, ...extra)
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [EMITDATA](data) {
-    for (const p of this[PIPES]) {
-      if (p.dest.write(data) === false) this.pause()
-    }
-    const ret = super.emit('data', data)
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [EMITEND]() {
-    if (this[EMITTED_END]) return
-
-    this[EMITTED_END] = true
-    this.readable = false
-    if (this[ASYNC]) defer(() => this[EMITEND2]())
-    else this[EMITEND2]()
-  }
-
-  [EMITEND2]() {
-    if (this[DECODER]) {
-      const data = this[DECODER].end()
-      if (data) {
-        for (const p of this[PIPES]) {
-          p.dest.write(data)
-        }
-        super.emit('data', data)
-      }
-    }
-
-    for (const p of this[PIPES]) {
-      p.end()
-    }
-    const ret = super.emit('end')
-    this.removeAllListeners('end')
-    return ret
-  }
-
-  // const all = await stream.collect()
-  collect() {
-    const buf = []
-    if (!this[OBJECTMODE]) buf.dataLength = 0
-    // set the promise first, in case an error is raised
-    // by triggering the flow here.
-    const p = this.promise()
-    this.on('data', c => {
-      buf.push(c)
-      if (!this[OBJECTMODE]) buf.dataLength += c.length
-    })
-    return p.then(() => buf)
-  }
-
-  // const data = await stream.concat()
-  concat() {
-    return this[OBJECTMODE]
-      ? Promise.reject(new Error('cannot concat in objectMode'))
-      : this.collect().then(buf =>
-          this[OBJECTMODE]
-            ? Promise.reject(new Error('cannot concat in objectMode'))
-            : this[ENCODING]
-            ? buf.join('')
-            : Buffer.concat(buf, buf.dataLength)
-        )
-  }
-
-  // stream.promise().then(() => done, er => emitted error)
-  promise() {
-    return new Promise((resolve, reject) => {
-      this.on(DESTROYED, () => reject(new Error('stream destroyed')))
-      this.on('error', er => reject(er))
-      this.on('end', () => resolve())
-    })
-  }
-
-  // for await (let chunk of stream)
-  [ASYNCITERATOR]() {
-    let stopped = false
-    const stop = () => {
-      this.pause()
-      stopped = true
-      return Promise.resolve({ done: true })
-    }
-    const next = () => {
-      if (stopped) return stop()
-      const res = this.read()
-      if (res !== null) return Promise.resolve({ done: false, value: res })
-
-      if (this[EOF]) return stop()
-
-      let resolve = null
-      let reject = null
-      const onerr = er => {
-        this.removeListener('data', ondata)
-        this.removeListener('end', onend)
-        this.removeListener(DESTROYED, ondestroy)
-        stop()
-        reject(er)
-      }
-      const ondata = value => {
-        this.removeListener('error', onerr)
-        this.removeListener('end', onend)
-        this.removeListener(DESTROYED, ondestroy)
-        this.pause()
-        resolve({ value: value, done: !!this[EOF] })
-      }
-      const onend = () => {
-        this.removeListener('error', onerr)
-        this.removeListener('data', ondata)
-        this.removeListener(DESTROYED, ondestroy)
-        stop()
-        resolve({ done: true })
-      }
-      const ondestroy = () => onerr(new Error('stream destroyed'))
-      return new Promise((res, rej) => {
-        reject = rej
-        resolve = res
-        this.once(DESTROYED, ondestroy)
-        this.once('error', onerr)
-        this.once('end', onend)
-        this.once('data', ondata)
-      })
-    }
-
-    return {
-      next,
-      throw: stop,
-      return: stop,
-      [ASYNCITERATOR]() {
-        return this
-      },
-    }
-  }
-
-  // for (let chunk of stream)
-  [ITERATOR]() {
-    let stopped = false
-    const stop = () => {
-      this.pause()
-      this.removeListener(ERROR, stop)
-      this.removeListener(DESTROYED, stop)
-      this.removeListener('end', stop)
-      stopped = true
-      return { done: true }
-    }
-
-    const next = () => {
-      if (stopped) return stop()
-      const value = this.read()
-      return value === null ? stop() : { value }
-    }
-    this.once('end', stop)
-    this.once(ERROR, stop)
-    this.once(DESTROYED, stop)
-
-    return {
-      next,
-      throw: stop,
-      return: stop,
-      [ITERATOR]() {
-        return this
-      },
-    }
-  }
-
-  destroy(er) {
-    if (this[DESTROYED]) {
-      if (er) this.emit('error', er)
-      else this.emit(DESTROYED)
-      return this
-    }
-
-    this[DESTROYED] = true
-
-    // throw away all buffered data, it's never coming out
-    this[BUFFER].length = 0
-    this[BUFFERLENGTH] = 0
-
-    if (typeof this.close === 'function' && !this[CLOSED]) this.close()
-
-    if (er) this.emit('error', er)
-    // if no error to emit, still reject pending promises
-    else this.emit(DESTROYED)
-
-    return this
-  }
-
-  static isStream(s) {
-    return (
-      !!s &&
-      (s instanceof Minipass ||
-        s instanceof Stream ||
-        (s instanceof EE &&
-          // readable
-          (typeof s.pipe === 'function' ||
-            // writable
-            (typeof s.write === 'function' && typeof s.end === 'function'))))
-    )
-  }
-}
-
-
diff --git a/node_modules/sigstore/node_modules/minipass/package.json b/node_modules/sigstore/node_modules/minipass/package.json
deleted file mode 100644
index 0e20e988047f2..0000000000000
--- a/node_modules/sigstore/node_modules/minipass/package.json
+++ /dev/null
@@ -1,76 +0,0 @@
-{
-  "name": "minipass",
-  "version": "5.0.0",
-  "description": "minimal implementation of a PassThrough stream",
-  "main": "./index.js",
-  "module": "./index.mjs",
-  "types": "./index.d.ts",
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./index.d.ts",
-        "default": "./index.mjs"
-      },
-      "require": {
-        "types": "./index.d.ts",
-        "default": "./index.js"
-      }
-    },
-    "./package.json": "./package.json"
-  },
-  "devDependencies": {
-    "@types/node": "^17.0.41",
-    "end-of-stream": "^1.4.0",
-    "node-abort-controller": "^3.1.1",
-    "prettier": "^2.6.2",
-    "tap": "^16.2.0",
-    "through2": "^2.0.3",
-    "ts-node": "^10.8.1",
-    "typedoc": "^0.23.24",
-    "typescript": "^4.7.3"
-  },
-  "scripts": {
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "prepare": "node ./scripts/transpile-to-esm.js",
-    "snap": "tap",
-    "test": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "postpublish": "git push origin --follow-tags",
-    "typedoc": "typedoc ./index.d.ts",
-    "format": "prettier --write . --loglevel warn"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/minipass.git"
-  },
-  "keywords": [
-    "passthrough",
-    "stream"
-  ],
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "ISC",
-  "files": [
-    "index.d.ts",
-    "index.js",
-    "index.mjs"
-  ],
-  "tap": {
-    "check-coverage": true
-  },
-  "engines": {
-    "node": ">=8"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 80,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  }
-}
diff --git a/node_modules/sigstore/node_modules/tuf-js/LICENSE b/node_modules/sigstore/node_modules/tuf-js/LICENSE
deleted file mode 100644
index 420700f5d3765..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-MIT License
-
-Copyright (c) 2022 GitHub and the TUF Contributors
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/config.js b/node_modules/sigstore/node_modules/tuf-js/dist/config.js
deleted file mode 100644
index c2d970e256244..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/dist/config.js
+++ /dev/null
@@ -1,14 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.defaultConfig = void 0;
-exports.defaultConfig = {
-    maxRootRotations: 32,
-    maxDelegations: 32,
-    rootMaxLength: 512000,
-    timestampMaxLength: 16384,
-    snapshotMaxLength: 2000000,
-    targetsMaxLength: 5000000,
-    prefixTargetsWithHash: true,
-    fetchTimeout: 100000,
-    fetchRetries: 2,
-};
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/error.js b/node_modules/sigstore/node_modules/tuf-js/dist/error.js
deleted file mode 100644
index f4b10fa202895..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/dist/error.js
+++ /dev/null
@@ -1,48 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.DownloadHTTPError = exports.DownloadLengthMismatchError = exports.DownloadError = exports.ExpiredMetadataError = exports.EqualVersionError = exports.BadVersionError = exports.RepositoryError = exports.PersistError = exports.RuntimeError = exports.ValueError = void 0;
-// An error about insufficient values
-class ValueError extends Error {
-}
-exports.ValueError = ValueError;
-class RuntimeError extends Error {
-}
-exports.RuntimeError = RuntimeError;
-class PersistError extends Error {
-}
-exports.PersistError = PersistError;
-// An error with a repository's state, such as a missing file.
-// It covers all exceptions that come from the repository side when
-// looking from the perspective of users of metadata API or ngclient.
-class RepositoryError extends Error {
-}
-exports.RepositoryError = RepositoryError;
-// An error for metadata that contains an invalid version number.
-class BadVersionError extends RepositoryError {
-}
-exports.BadVersionError = BadVersionError;
-// An error for metadata containing a previously verified version number.
-class EqualVersionError extends BadVersionError {
-}
-exports.EqualVersionError = EqualVersionError;
-// Indicate that a TUF Metadata file has expired.
-class ExpiredMetadataError extends RepositoryError {
-}
-exports.ExpiredMetadataError = ExpiredMetadataError;
-//----- Download Errors -------------------------------------------------------
-// An error occurred while attempting to download a file.
-class DownloadError extends Error {
-}
-exports.DownloadError = DownloadError;
-// Indicate that a mismatch of lengths was seen while downloading a file
-class DownloadLengthMismatchError extends DownloadError {
-}
-exports.DownloadLengthMismatchError = DownloadLengthMismatchError;
-// Returned by FetcherInterface implementations for HTTP errors.
-class DownloadHTTPError extends DownloadError {
-    constructor(message, statusCode) {
-        super(message);
-        this.statusCode = statusCode;
-    }
-}
-exports.DownloadHTTPError = DownloadHTTPError;
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/fetcher.js b/node_modules/sigstore/node_modules/tuf-js/dist/fetcher.js
deleted file mode 100644
index d3dcf53eeb869..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/dist/fetcher.js
+++ /dev/null
@@ -1,84 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.DefaultFetcher = exports.BaseFetcher = void 0;
-const debug_1 = __importDefault(require("debug"));
-const fs_1 = __importDefault(require("fs"));
-const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
-const util_1 = __importDefault(require("util"));
-const error_1 = require("./error");
-const tmpfile_1 = require("./utils/tmpfile");
-const log = (0, debug_1.default)('tuf:fetch');
-class BaseFetcher {
-    // Download file from given URL. The file is downloaded to a temporary
-    // location and then passed to the given handler. The handler is responsible
-    // for moving the file to its final location. The temporary file is deleted
-    // after the handler returns.
-    async downloadFile(url, maxLength, handler) {
-        return (0, tmpfile_1.withTempFile)(async (tmpFile) => {
-            const reader = await this.fetch(url);
-            let numberOfBytesReceived = 0;
-            const fileStream = fs_1.default.createWriteStream(tmpFile);
-            // Read the stream a chunk at a time so that we can check
-            // the length of the file as we go
-            try {
-                for await (const chunk of reader) {
-                    const bufferChunk = Buffer.from(chunk);
-                    numberOfBytesReceived += bufferChunk.length;
-                    if (numberOfBytesReceived > maxLength) {
-                        throw new error_1.DownloadLengthMismatchError('Max length reached');
-                    }
-                    await writeBufferToStream(fileStream, bufferChunk);
-                }
-            }
-            finally {
-                // Make sure we always close the stream
-                await util_1.default.promisify(fileStream.close).bind(fileStream)();
-            }
-            return handler(tmpFile);
-        });
-    }
-    // Download bytes from given URL.
-    async downloadBytes(url, maxLength) {
-        return this.downloadFile(url, maxLength, async (file) => {
-            const stream = fs_1.default.createReadStream(file);
-            const chunks = [];
-            for await (const chunk of stream) {
-                chunks.push(chunk);
-            }
-            return Buffer.concat(chunks);
-        });
-    }
-}
-exports.BaseFetcher = BaseFetcher;
-class DefaultFetcher extends BaseFetcher {
-    constructor(options = {}) {
-        super();
-        this.timeout = options.timeout;
-        this.retries = options.retries;
-    }
-    async fetch(url) {
-        log('GET %s', url);
-        const response = await (0, make_fetch_happen_1.default)(url, {
-            timeout: this.timeout,
-            retry: this.retries,
-        });
-        if (!response.ok || !response?.body) {
-            throw new error_1.DownloadHTTPError('Failed to download', response.status);
-        }
-        return response.body;
-    }
-}
-exports.DefaultFetcher = DefaultFetcher;
-const writeBufferToStream = async (stream, buffer) => {
-    return new Promise((resolve, reject) => {
-        stream.write(buffer, (err) => {
-            if (err) {
-                reject(err);
-            }
-            resolve(true);
-        });
-    });
-};
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/index.js b/node_modules/sigstore/node_modules/tuf-js/dist/index.js
deleted file mode 100644
index 5a83b91f355d8..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/dist/index.js
+++ /dev/null
@@ -1,9 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Updater = exports.BaseFetcher = exports.TargetFile = void 0;
-var models_1 = require("@tufjs/models");
-Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return models_1.TargetFile; } });
-var fetcher_1 = require("./fetcher");
-Object.defineProperty(exports, "BaseFetcher", { enumerable: true, get: function () { return fetcher_1.BaseFetcher; } });
-var updater_1 = require("./updater");
-Object.defineProperty(exports, "Updater", { enumerable: true, get: function () { return updater_1.Updater; } });
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/store.js b/node_modules/sigstore/node_modules/tuf-js/dist/store.js
deleted file mode 100644
index 8567336108709..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/dist/store.js
+++ /dev/null
@@ -1,208 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TrustedMetadataStore = void 0;
-const models_1 = require("@tufjs/models");
-const error_1 = require("./error");
-class TrustedMetadataStore {
-    constructor(rootData) {
-        this.trustedSet = {};
-        // Client workflow 5.1: record fixed update start time
-        this.referenceTime = new Date();
-        // Client workflow 5.2: load trusted root metadata
-        this.loadTrustedRoot(rootData);
-    }
-    get root() {
-        if (!this.trustedSet.root) {
-            throw new ReferenceError('No trusted root metadata');
-        }
-        return this.trustedSet.root;
-    }
-    get timestamp() {
-        return this.trustedSet.timestamp;
-    }
-    get snapshot() {
-        return this.trustedSet.snapshot;
-    }
-    get targets() {
-        return this.trustedSet.targets;
-    }
-    getRole(name) {
-        return this.trustedSet[name];
-    }
-    updateRoot(bytesBuffer) {
-        const data = JSON.parse(bytesBuffer.toString('utf8'));
-        const newRoot = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data);
-        if (newRoot.signed.type != models_1.MetadataKind.Root) {
-            throw new error_1.RepositoryError(`Expected 'root', got ${newRoot.signed.type}`);
-        }
-        // Client workflow 5.4: check for arbitrary software attack
-        this.root.verifyDelegate(models_1.MetadataKind.Root, newRoot);
-        // Client workflow 5.5: check for rollback attack
-        if (newRoot.signed.version != this.root.signed.version + 1) {
-            throw new error_1.BadVersionError(`Expected version ${this.root.signed.version + 1}, got ${newRoot.signed.version}`);
-        }
-        // Check that new root is signed by self
-        newRoot.verifyDelegate(models_1.MetadataKind.Root, newRoot);
-        // Client workflow 5.7: set new root as trusted root
-        this.trustedSet.root = newRoot;
-        return newRoot;
-    }
-    updateTimestamp(bytesBuffer) {
-        if (this.snapshot) {
-            throw new error_1.RuntimeError('Cannot update timestamp after snapshot');
-        }
-        if (this.root.signed.isExpired(this.referenceTime)) {
-            throw new error_1.ExpiredMetadataError('Final root.json is expired');
-        }
-        const data = JSON.parse(bytesBuffer.toString('utf8'));
-        const newTimestamp = models_1.Metadata.fromJSON(models_1.MetadataKind.Timestamp, data);
-        if (newTimestamp.signed.type != models_1.MetadataKind.Timestamp) {
-            throw new error_1.RepositoryError(`Expected 'timestamp', got ${newTimestamp.signed.type}`);
-        }
-        // Client workflow 5.4.2: check for arbitrary software attack
-        this.root.verifyDelegate(models_1.MetadataKind.Timestamp, newTimestamp);
-        if (this.timestamp) {
-            // Prevent rolling back timestamp version
-            // Client workflow 5.4.3.1: check for rollback attack
-            if (newTimestamp.signed.version < this.timestamp.signed.version) {
-                throw new error_1.BadVersionError(`New timestamp version ${newTimestamp.signed.version} is less than current version ${this.timestamp.signed.version}`);
-            }
-            //  Keep using old timestamp if versions are equal.
-            if (newTimestamp.signed.version === this.timestamp.signed.version) {
-                throw new error_1.EqualVersionError(`New timestamp version ${newTimestamp.signed.version} is equal to current version ${this.timestamp.signed.version}`);
-            }
-            // Prevent rolling back snapshot version
-            // Client workflow 5.4.3.2: check for rollback attack
-            const snapshotMeta = this.timestamp.signed.snapshotMeta;
-            const newSnapshotMeta = newTimestamp.signed.snapshotMeta;
-            if (newSnapshotMeta.version < snapshotMeta.version) {
-                throw new error_1.BadVersionError(`New snapshot version ${newSnapshotMeta.version} is less than current version ${snapshotMeta.version}`);
-            }
-        }
-        // expiry not checked to allow old timestamp to be used for rollback
-        // protection of new timestamp: expiry is checked in update_snapshot
-        this.trustedSet.timestamp = newTimestamp;
-        // Client workflow 5.4.4: check for freeze attack
-        this.checkFinalTimestamp();
-        return newTimestamp;
-    }
-    updateSnapshot(bytesBuffer, trusted = false) {
-        if (!this.timestamp) {
-            throw new error_1.RuntimeError('Cannot update snapshot before timestamp');
-        }
-        if (this.targets) {
-            throw new error_1.RuntimeError('Cannot update snapshot after targets');
-        }
-        // Snapshot cannot be loaded if final timestamp is expired
-        this.checkFinalTimestamp();
-        const snapshotMeta = this.timestamp.signed.snapshotMeta;
-        // Verify non-trusted data against the hashes in timestamp, if any.
-        // Trusted snapshot data has already been verified once.
-        // Client workflow 5.5.2: check against timestamp role's snaphsot hash
-        if (!trusted) {
-            snapshotMeta.verify(bytesBuffer);
-        }
-        const data = JSON.parse(bytesBuffer.toString('utf8'));
-        const newSnapshot = models_1.Metadata.fromJSON(models_1.MetadataKind.Snapshot, data);
-        if (newSnapshot.signed.type != models_1.MetadataKind.Snapshot) {
-            throw new error_1.RepositoryError(`Expected 'snapshot', got ${newSnapshot.signed.type}`);
-        }
-        // Client workflow 5.5.3: check for arbitrary software attack
-        this.root.verifyDelegate(models_1.MetadataKind.Snapshot, newSnapshot);
-        // version check against meta version (5.5.4) is deferred to allow old
-        // snapshot to be used in rollback protection
-        // Client workflow 5.5.5: check for rollback attack
-        if (this.snapshot) {
-            Object.entries(this.snapshot.signed.meta).forEach(([fileName, fileInfo]) => {
-                const newFileInfo = newSnapshot.signed.meta[fileName];
-                if (!newFileInfo) {
-                    throw new error_1.RepositoryError(`Missing file ${fileName} in new snapshot`);
-                }
-                if (newFileInfo.version < fileInfo.version) {
-                    throw new error_1.BadVersionError(`New version ${newFileInfo.version} of ${fileName} is less than current version ${fileInfo.version}`);
-                }
-            });
-        }
-        this.trustedSet.snapshot = newSnapshot;
-        // snapshot is loaded, but we raise if it's not valid _final_ snapshot
-        // Client workflow 5.5.4 & 5.5.6
-        this.checkFinalSnapsnot();
-        return newSnapshot;
-    }
-    updateDelegatedTargets(bytesBuffer, roleName, delegatorName) {
-        if (!this.snapshot) {
-            throw new error_1.RuntimeError('Cannot update delegated targets before snapshot');
-        }
-        // Targets cannot be loaded if final snapshot is expired or its version
-        // does not match meta version in timestamp.
-        this.checkFinalSnapsnot();
-        const delegator = this.trustedSet[delegatorName];
-        if (!delegator) {
-            throw new error_1.RuntimeError(`No trusted ${delegatorName} metadata`);
-        }
-        // Extract metadata for the delegated role from snapshot
-        const meta = this.snapshot.signed.meta?.[`${roleName}.json`];
-        if (!meta) {
-            throw new error_1.RepositoryError(`Missing ${roleName}.json in snapshot`);
-        }
-        // Client workflow 5.6.2: check against snapshot role's targets hash
-        meta.verify(bytesBuffer);
-        const data = JSON.parse(bytesBuffer.toString('utf8'));
-        const newDelegate = models_1.Metadata.fromJSON(models_1.MetadataKind.Targets, data);
-        if (newDelegate.signed.type != models_1.MetadataKind.Targets) {
-            throw new error_1.RepositoryError(`Expected 'targets', got ${newDelegate.signed.type}`);
-        }
-        // Client workflow 5.6.3: check for arbitrary software attack
-        delegator.verifyDelegate(roleName, newDelegate);
-        // Client workflow 5.6.4: Check against snapshot role’s targets version
-        const version = newDelegate.signed.version;
-        if (version != meta.version) {
-            throw new error_1.BadVersionError(`Version ${version} of ${roleName} does not match snapshot version ${meta.version}`);
-        }
-        // Client workflow 5.6.5: check for a freeze attack
-        if (newDelegate.signed.isExpired(this.referenceTime)) {
-            throw new error_1.ExpiredMetadataError(`${roleName}.json is expired`);
-        }
-        this.trustedSet[roleName] = newDelegate;
-    }
-    // Verifies and loads data as trusted root metadata.
-    // Note that an expired initial root is still considered valid.
-    loadTrustedRoot(bytesBuffer) {
-        const data = JSON.parse(bytesBuffer.toString('utf8'));
-        const root = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data);
-        if (root.signed.type != models_1.MetadataKind.Root) {
-            throw new error_1.RepositoryError(`Expected 'root', got ${root.signed.type}`);
-        }
-        root.verifyDelegate(models_1.MetadataKind.Root, root);
-        this.trustedSet['root'] = root;
-    }
-    checkFinalTimestamp() {
-        // Timestamp MUST be loaded
-        if (!this.timestamp) {
-            throw new ReferenceError('No trusted timestamp metadata');
-        }
-        // Client workflow 5.4.4: check for freeze attack
-        if (this.timestamp.signed.isExpired(this.referenceTime)) {
-            throw new error_1.ExpiredMetadataError('Final timestamp.json is expired');
-        }
-    }
-    checkFinalSnapsnot() {
-        // Snapshot and timestamp MUST be loaded
-        if (!this.snapshot) {
-            throw new ReferenceError('No trusted snapshot metadata');
-        }
-        if (!this.timestamp) {
-            throw new ReferenceError('No trusted timestamp metadata');
-        }
-        // Client workflow 5.5.6: check for freeze attack
-        if (this.snapshot.signed.isExpired(this.referenceTime)) {
-            throw new error_1.ExpiredMetadataError('snapshot.json is expired');
-        }
-        // Client workflow 5.5.4: check against timestamp role’s snapshot version
-        const snapshotMeta = this.timestamp.signed.snapshotMeta;
-        if (this.snapshot.signed.version !== snapshotMeta.version) {
-            throw new error_1.BadVersionError("Snapshot version doesn't match timestamp");
-        }
-    }
-}
-exports.TrustedMetadataStore = TrustedMetadataStore;
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/updater.js b/node_modules/sigstore/node_modules/tuf-js/dist/updater.js
deleted file mode 100644
index 2aba48d24affd..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/dist/updater.js
+++ /dev/null
@@ -1,320 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Updater = void 0;
-const models_1 = require("@tufjs/models");
-const debug_1 = __importDefault(require("debug"));
-const fs = __importStar(require("fs"));
-const path = __importStar(require("path"));
-const config_1 = require("./config");
-const error_1 = require("./error");
-const fetcher_1 = require("./fetcher");
-const store_1 = require("./store");
-const url = __importStar(require("./utils/url"));
-const log = (0, debug_1.default)('tuf:cache');
-class Updater {
-    constructor(options) {
-        const { metadataDir, metadataBaseUrl, targetDir, targetBaseUrl, fetcher, config, } = options;
-        this.dir = metadataDir;
-        this.metadataBaseUrl = metadataBaseUrl;
-        this.targetDir = targetDir;
-        this.targetBaseUrl = targetBaseUrl;
-        const data = this.loadLocalMetadata(models_1.MetadataKind.Root);
-        this.trustedSet = new store_1.TrustedMetadataStore(data);
-        this.config = { ...config_1.defaultConfig, ...config };
-        this.fetcher =
-            fetcher ||
-                new fetcher_1.DefaultFetcher({
-                    timeout: this.config.fetchTimeout,
-                    retries: this.config.fetchRetries,
-                });
-    }
-    // refresh and load the metadata before downloading the target
-    // refresh should be called once after the client is initialized
-    async refresh() {
-        await this.loadRoot();
-        await this.loadTimestamp();
-        await this.loadSnapshot();
-        await this.loadTargets(models_1.MetadataKind.Targets, models_1.MetadataKind.Root);
-    }
-    // Returns the TargetFile instance with information for the given target path.
-    //
-    // Implicitly calls refresh if it hasn't already been called.
-    async getTargetInfo(targetPath) {
-        if (!this.trustedSet.targets) {
-            await this.refresh();
-        }
-        return this.preorderDepthFirstWalk(targetPath);
-    }
-    async downloadTarget(targetInfo, filePath, targetBaseUrl) {
-        const targetPath = filePath || this.generateTargetPath(targetInfo);
-        if (!targetBaseUrl) {
-            if (!this.targetBaseUrl) {
-                throw new error_1.ValueError('Target base URL not set');
-            }
-            targetBaseUrl = this.targetBaseUrl;
-        }
-        let targetFilePath = targetInfo.path;
-        const consistentSnapshot = this.trustedSet.root.signed.consistentSnapshot;
-        if (consistentSnapshot && this.config.prefixTargetsWithHash) {
-            const hashes = Object.values(targetInfo.hashes);
-            const { dir, base } = path.parse(targetFilePath);
-            const filename = `${hashes[0]}.${base}`;
-            targetFilePath = dir ? `${dir}/${filename}` : filename;
-        }
-        const targetUrl = url.join(targetBaseUrl, targetFilePath);
-        // Client workflow 5.7.3: download target file
-        await this.fetcher.downloadFile(targetUrl, targetInfo.length, async (fileName) => {
-            // Verify hashes and length of downloaded file
-            await targetInfo.verify(fs.createReadStream(fileName));
-            // Copy file to target path
-            log('WRITE %s', targetPath);
-            fs.copyFileSync(fileName, targetPath);
-        });
-        return targetPath;
-    }
-    async findCachedTarget(targetInfo, filePath) {
-        if (!filePath) {
-            filePath = this.generateTargetPath(targetInfo);
-        }
-        try {
-            if (fs.existsSync(filePath)) {
-                await targetInfo.verify(fs.createReadStream(filePath));
-                return filePath;
-            }
-        }
-        catch (error) {
-            return; // File not found
-        }
-        return; // File not found
-    }
-    loadLocalMetadata(fileName) {
-        const filePath = path.join(this.dir, `${fileName}.json`);
-        log('READ %s', filePath);
-        return fs.readFileSync(filePath);
-    }
-    // Sequentially load and persist on local disk every newer root metadata
-    // version available on the remote.
-    // Client workflow 5.3: update root role
-    async loadRoot() {
-        // Client workflow 5.3.2: version of trusted root metadata file
-        const rootVersion = this.trustedSet.root.signed.version;
-        const lowerBound = rootVersion + 1;
-        const upperBound = lowerBound + this.config.maxRootRotations;
-        for (let version = lowerBound; version <= upperBound; version++) {
-            const rootUrl = url.join(this.metadataBaseUrl, `${version}.root.json`);
-            try {
-                // Client workflow 5.3.3: download new root metadata file
-                const bytesData = await this.fetcher.downloadBytes(rootUrl, this.config.rootMaxLength);
-                // Client workflow 5.3.4 - 5.4.7
-                this.trustedSet.updateRoot(bytesData);
-                // Client workflow 5.3.8: persist root metadata file
-                this.persistMetadata(models_1.MetadataKind.Root, bytesData);
-            }
-            catch (error) {
-                break;
-            }
-        }
-    }
-    // Load local and remote timestamp metadata.
-    // Client workflow 5.4: update timestamp role
-    async loadTimestamp() {
-        // Load local and remote timestamp metadata
-        try {
-            const data = this.loadLocalMetadata(models_1.MetadataKind.Timestamp);
-            this.trustedSet.updateTimestamp(data);
-        }
-        catch (error) {
-            // continue
-        }
-        //Load from remote (whether local load succeeded or not)
-        const timestampUrl = url.join(this.metadataBaseUrl, 'timestamp.json');
-        // Client workflow 5.4.1: download timestamp metadata file
-        const bytesData = await this.fetcher.downloadBytes(timestampUrl, this.config.timestampMaxLength);
-        try {
-            // Client workflow 5.4.2 - 5.4.4
-            this.trustedSet.updateTimestamp(bytesData);
-        }
-        catch (error) {
-            // If new timestamp version is same as current, discardd the new one.
-            // This is normal and should NOT raise an error.
-            if (error instanceof error_1.EqualVersionError) {
-                return;
-            }
-            // Re-raise any other error
-            throw error;
-        }
-        // Client workflow 5.4.5: persist timestamp metadata
-        this.persistMetadata(models_1.MetadataKind.Timestamp, bytesData);
-    }
-    // Load local and remote snapshot metadata.
-    // Client workflow 5.5: update snapshot role
-    async loadSnapshot() {
-        //Load local (and if needed remote) snapshot metadata
-        try {
-            const data = this.loadLocalMetadata(models_1.MetadataKind.Snapshot);
-            this.trustedSet.updateSnapshot(data, true);
-        }
-        catch (error) {
-            if (!this.trustedSet.timestamp) {
-                throw new ReferenceError('No timestamp metadata');
-            }
-            const snapshotMeta = this.trustedSet.timestamp.signed.snapshotMeta;
-            const maxLength = snapshotMeta.length || this.config.snapshotMaxLength;
-            const version = this.trustedSet.root.signed.consistentSnapshot
-                ? snapshotMeta.version
-                : undefined;
-            const snapshotUrl = url.join(this.metadataBaseUrl, version ? `${version}.snapshot.json` : 'snapshot.json');
-            try {
-                // Client workflow 5.5.1: download snapshot metadata file
-                const bytesData = await this.fetcher.downloadBytes(snapshotUrl, maxLength);
-                // Client workflow 5.5.2 - 5.5.6
-                this.trustedSet.updateSnapshot(bytesData);
-                // Client workflow 5.5.7: persist snapshot metadata file
-                this.persistMetadata(models_1.MetadataKind.Snapshot, bytesData);
-            }
-            catch (error) {
-                throw new error_1.RuntimeError(`Unable to load snapshot metadata error ${error}`);
-            }
-        }
-    }
-    // Load local and remote targets metadata.
-    // Client workflow 5.6: update targets role
-    async loadTargets(role, parentRole) {
-        if (this.trustedSet.getRole(role)) {
-            return this.trustedSet.getRole(role);
-        }
-        try {
-            const buffer = this.loadLocalMetadata(role);
-            this.trustedSet.updateDelegatedTargets(buffer, role, parentRole);
-        }
-        catch (error) {
-            // Local 'role' does not exist or is invalid: update from remote
-            if (!this.trustedSet.snapshot) {
-                throw new ReferenceError('No snapshot metadata');
-            }
-            const metaInfo = this.trustedSet.snapshot.signed.meta[`${role}.json`];
-            // TODO: use length for fetching
-            const maxLength = metaInfo.length || this.config.targetsMaxLength;
-            const version = this.trustedSet.root.signed.consistentSnapshot
-                ? metaInfo.version
-                : undefined;
-            const metadataUrl = url.join(this.metadataBaseUrl, version ? `${version}.${role}.json` : `${role}.json`);
-            try {
-                // Client workflow 5.6.1: download targets metadata file
-                const bytesData = await this.fetcher.downloadBytes(metadataUrl, maxLength);
-                // Client workflow 5.6.2 - 5.6.6
-                this.trustedSet.updateDelegatedTargets(bytesData, role, parentRole);
-                // Client workflow 5.6.7: persist targets metadata file
-                this.persistMetadata(role, bytesData);
-            }
-            catch (error) {
-                throw new error_1.RuntimeError(`Unable to load targets error ${error}`);
-            }
-        }
-        return this.trustedSet.getRole(role);
-    }
-    async preorderDepthFirstWalk(targetPath) {
-        // Interrogates the tree of target delegations in order of appearance
-        // (which implicitly order trustworthiness), and returns the matching
-        // target found in the most trusted role.
-        // List of delegations to be interrogated. A (role, parent role) pair
-        // is needed to load and verify the delegated targets metadata.
-        const delegationsToVisit = [
-            {
-                roleName: models_1.MetadataKind.Targets,
-                parentRoleName: models_1.MetadataKind.Root,
-            },
-        ];
-        const visitedRoleNames = new Set();
-        // Client workflow 5.6.7: preorder depth-first traversal of the graph of
-        // target delegations
-        while (visitedRoleNames.size <= this.config.maxDelegations &&
-            delegationsToVisit.length > 0) {
-            //  Pop the role name from the top of the stack.
-            // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
-            const { roleName, parentRoleName } = delegationsToVisit.pop();
-            // Skip any visited current role to prevent cycles.
-            // Client workflow 5.6.7.1: skip already-visited roles
-            if (visitedRoleNames.has(roleName)) {
-                continue;
-            }
-            // The metadata for 'role_name' must be downloaded/updated before
-            // its targets, delegations, and child roles can be inspected.
-            const targets = (await this.loadTargets(roleName, parentRoleName))
-                ?.signed;
-            if (!targets) {
-                continue;
-            }
-            const target = targets.targets?.[targetPath];
-            if (target) {
-                return target;
-            }
-            // After preorder check, add current role to set of visited roles.
-            visitedRoleNames.add(roleName);
-            if (targets.delegations) {
-                const childRolesToVisit = [];
-                // NOTE: This may be a slow operation if there are many delegated roles.
-                const rolesForTarget = targets.delegations.rolesForTarget(targetPath);
-                for (const { role: childName, terminating } of rolesForTarget) {
-                    childRolesToVisit.push({
-                        roleName: childName,
-                        parentRoleName: roleName,
-                    });
-                    // Client workflow 5.6.7.2.1
-                    if (terminating) {
-                        delegationsToVisit.splice(0); // empty the array
-                        break;
-                    }
-                }
-                childRolesToVisit.reverse();
-                delegationsToVisit.push(...childRolesToVisit);
-            }
-        }
-        return; // no matching target found
-    }
-    generateTargetPath(targetInfo) {
-        if (!this.targetDir) {
-            throw new error_1.ValueError('Target directory not set');
-        }
-        // URL encode target path
-        const filePath = encodeURIComponent(targetInfo.path);
-        return path.join(this.targetDir, filePath);
-    }
-    async persistMetadata(metaDataName, bytesData) {
-        try {
-            const filePath = path.join(this.dir, `${metaDataName}.json`);
-            log('WRITE %s', filePath);
-            fs.writeFileSync(filePath, bytesData.toString('utf8'));
-        }
-        catch (error) {
-            throw new error_1.PersistError(`Failed to persist metadata ${metaDataName} error: ${error}`);
-        }
-    }
-}
-exports.Updater = Updater;
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/utils/tmpfile.js b/node_modules/sigstore/node_modules/tuf-js/dist/utils/tmpfile.js
deleted file mode 100644
index 923eef6044bcc..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/dist/utils/tmpfile.js
+++ /dev/null
@@ -1,25 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.withTempFile = void 0;
-const promises_1 = __importDefault(require("fs/promises"));
-const os_1 = __importDefault(require("os"));
-const path_1 = __importDefault(require("path"));
-// Invokes the given handler with the path to a temporary file. The file
-// is deleted after the handler returns.
-const withTempFile = async (handler) => withTempDir(async (dir) => handler(path_1.default.join(dir, 'tempfile')));
-exports.withTempFile = withTempFile;
-// Invokes the given handler with a temporary directory. The directory is
-// deleted after the handler returns.
-const withTempDir = async (handler) => {
-    const tmpDir = await promises_1.default.realpath(os_1.default.tmpdir());
-    const dir = await promises_1.default.mkdtemp(tmpDir + path_1.default.sep);
-    try {
-        return await handler(dir);
-    }
-    finally {
-        await promises_1.default.rm(dir, { force: true, recursive: true, maxRetries: 3 });
-    }
-};
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/utils/url.js b/node_modules/sigstore/node_modules/tuf-js/dist/utils/url.js
deleted file mode 100644
index ce67fe2c23053..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/dist/utils/url.js
+++ /dev/null
@@ -1,14 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.join = void 0;
-const url_1 = require("url");
-function join(base, path) {
-    return new url_1.URL(ensureTrailingSlash(base) + removeLeadingSlash(path)).toString();
-}
-exports.join = join;
-function ensureTrailingSlash(path) {
-    return path.endsWith('/') ? path : path + '/';
-}
-function removeLeadingSlash(path) {
-    return path.startsWith('/') ? path.slice(1) : path;
-}
diff --git a/node_modules/sigstore/node_modules/tuf-js/package.json b/node_modules/sigstore/node_modules/tuf-js/package.json
deleted file mode 100644
index 9187d88083272..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/package.json
+++ /dev/null
@@ -1,46 +0,0 @@
-{
-  "name": "tuf-js",
-  "version": "1.1.7",
-  "description": "JavaScript implementation of The Update Framework (TUF)",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
-  "scripts": {
-    "build": "tsc --build",
-    "clean": "rm -rf dist",
-    "test": "jest"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/theupdateframework/tuf-js.git"
-  },
-  "files": [
-    "dist"
-  ],
-  "keywords": [
-    "tuf",
-    "security",
-    "update"
-  ],
-  "author": "bdehamer@github.com",
-  "license": "MIT",
-  "bugs": {
-    "url": "https://github.com/theupdateframework/tuf-js/issues"
-  },
-  "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/client#readme",
-  "devDependencies": {
-    "@tufjs/repo-mock": "1.3.1",
-    "@types/debug": "^4.1.8",
-    "@types/make-fetch-happen": "^10.0.1",
-    "@types/node": "^20.2.5",
-    "nock": "^13.3.1",
-    "typescript": "^5.1.3"
-  },
-  "dependencies": {
-    "@tufjs/models": "1.0.4",
-    "debug": "^4.3.4",
-    "make-fetch-happen": "^11.1.1"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  }
-}
diff --git a/node_modules/sigstore/package.json b/node_modules/sigstore/package.json
index 02655a6c79bc8..26e58edd47af3 100644
--- a/node_modules/sigstore/package.json
+++ b/node_modules/sigstore/package.json
@@ -1,6 +1,6 @@
 {
   "name": "sigstore",
-  "version": "1.7.0",
+  "version": "2.0.0",
   "description": "code-signing for npm packages",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
@@ -9,9 +9,6 @@
     "build": "tsc --build",
     "test": "jest"
   },
-  "bin": {
-    "sigstore": "bin/sigstore.js"
-  },
   "files": [
     "dist",
     "store"
@@ -30,17 +27,19 @@
     "provenance": true
   },
   "devDependencies": {
-    "@sigstore/rekor-types": "^1.0.0",
+    "@sigstore/rekor-types": "^2.0.0",
     "@sigstore/jest": "^0.0.0",
-    "@tufjs/repo-mock": "^1.1.0",
+    "@sigstore/mock": "^0.3.0",
+    "@tufjs/repo-mock": "^2.0.0",
     "@types/make-fetch-happen": "^10.0.0"
   },
   "dependencies": {
-    "@sigstore/protobuf-specs": "^0.1.0",
-    "@sigstore/tuf": "^1.0.1",
-    "make-fetch-happen": "^11.0.1"
+    "@sigstore/bundle": "^2.0.0",
+    "@sigstore/protobuf-specs": "^0.2.1",
+    "@sigstore/sign": "^2.0.0",
+    "@sigstore/tuf": "^2.0.0"
   },
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   }
 }
diff --git a/package-lock.json b/package-lock.json
index 43b0ba6054880..3793db08f0bc8 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -139,7 +139,7 @@
         "npm-user-validate": "^2.0.0",
         "npmlog": "^7.0.1",
         "p-map": "^4.0.0",
-        "pacote": "^17.0.1",
+        "pacote": "^17.0.2",
         "parse-conflict-json": "^3.0.1",
         "proc-log": "^3.0.0",
         "qrcode-terminal": "^0.12.0",
@@ -230,7 +230,7 @@
         "json-stringify-safe": "^5.0.1",
         "nock": "^13.3.3",
         "npm-package-arg": "^11.0.0",
-        "pacote": "^17.0.1",
+        "pacote": "^17.0.2",
         "tap": "^16.3.8"
       },
       "engines": {
@@ -2851,6 +2851,7 @@
       "version": "2.0.0",
       "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-2.0.0.tgz",
       "integrity": "sha512-EO7D7/kMtUsYn596WP+b5N/txWTgOt7N8vsZ2gyneMsxfrPW4FJHRZtMlZeGKCgBNCcjZhZ8ItyawkZqJC8XiA==",
+      "inBundle": true,
       "dependencies": {
         "@sigstore/protobuf-specs": "^0.2.1"
       },
@@ -2858,18 +2859,10 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs": {
+    "node_modules/@sigstore/protobuf-specs": {
       "version": "0.2.1",
       "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.2.1.tgz",
       "integrity": "sha512-XTWVxnWJu+c1oCshMLwnKvz8ZQJJDVOlciMfgpJBQbThVjKTCG8dwyhgLngBD2KN0ap9F/gOV8rFDEx8uh7R2A==",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@sigstore/protobuf-specs": {
-      "version": "0.1.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.1.0.tgz",
-      "integrity": "sha512-a31EnjuIDSX8IXBUib3cYLDRlPMU36AWX4xS8ysLaNu4ZzUesDiPt83pgrW2X1YLMe5L2HbDyaKK5BrL4cNKaQ==",
       "inBundle": true,
       "engines": {
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
@@ -2879,6 +2872,7 @@
       "version": "2.0.0",
       "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-2.0.0.tgz",
       "integrity": "sha512-f+r1jEDwM5969DTORRln9sDmWjTy1cOQzhU/iisGNzFdbF2TglmwNScbH6aiQ6QH4lc3jOXNMgKP6sec1kSVKA==",
+      "inBundle": true,
       "dependencies": {
         "@sigstore/bundle": "^2.0.0",
         "@sigstore/protobuf-specs": "^0.2.1",
@@ -2888,14 +2882,6 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs": {
-      "version": "0.2.1",
-      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.2.1.tgz",
-      "integrity": "sha512-XTWVxnWJu+c1oCshMLwnKvz8ZQJJDVOlciMfgpJBQbThVjKTCG8dwyhgLngBD2KN0ap9F/gOV8rFDEx8uh7R2A==",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
     "node_modules/@sigstore/tuf": {
       "version": "2.0.0",
       "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-2.0.0.tgz",
@@ -2909,15 +2895,6 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs": {
-      "version": "0.2.1",
-      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.2.1.tgz",
-      "integrity": "sha512-XTWVxnWJu+c1oCshMLwnKvz8ZQJJDVOlciMfgpJBQbThVjKTCG8dwyhgLngBD2KN0ap9F/gOV8rFDEx8uh7R2A==",
-      "inBundle": true,
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
     "node_modules/@tootallnate/once": {
       "version": "2.0.0",
       "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz",
@@ -10402,9 +10379,9 @@
       }
     },
     "node_modules/pacote": {
-      "version": "17.0.1",
-      "resolved": "https://registry.npmjs.org/pacote/-/pacote-17.0.1.tgz",
-      "integrity": "sha512-rZzq8E6l+rputgZnhDd/t9rpp47oVnz4SPaKKBRPb7zmrM/QJLub7iJGge4UglWfpxeRynQLUI2Tj2MYaiRqCg==",
+      "version": "17.0.2",
+      "resolved": "https://registry.npmjs.org/pacote/-/pacote-17.0.2.tgz",
+      "integrity": "sha512-Us2QUwVHu4wwUhGZVOHlFIG9LhQ0Aq8zsv1ZvJ37rQwfxSIe/PaPfskz905hHycEXRfmlGKJ5xoEtrF+y66T6w==",
       "inBundle": true,
       "dependencies": {
         "@npmcli/git": "^5.0.0",
@@ -10422,7 +10399,7 @@
         "promise-retry": "^2.0.1",
         "read-package-json": "^7.0.0",
         "read-package-json-fast": "^3.0.0",
-        "sigstore": "^1.3.0",
+        "sigstore": "^2.0.0",
         "ssri": "^10.0.0",
         "tar": "^6.1.11"
       },
@@ -11756,154 +11733,18 @@
       }
     },
     "node_modules/sigstore": {
-      "version": "1.7.0",
-      "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-1.7.0.tgz",
-      "integrity": "sha512-KP7QULhWdlu3hlp+jw2EvgWKlOGOY9McLj/jrchLjHNlNPK0KWIwF919cbmOp6QiKXLmPijR2qH/5KYWlbtG9Q==",
-      "inBundle": true,
-      "dependencies": {
-        "@sigstore/protobuf-specs": "^0.1.0",
-        "@sigstore/tuf": "^1.0.1",
-        "make-fetch-happen": "^11.0.1"
-      },
-      "bin": {
-        "sigstore": "bin/sigstore.js"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/sigstore/node_modules/@sigstore/tuf": {
-      "version": "1.0.3",
-      "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-1.0.3.tgz",
-      "integrity": "sha512-2bRovzs0nJZFlCN3rXirE4gwxCn97JNjMmwpecqlbgV9WcxX7WRuIrgzx/X7Ib7MYRbyUTpBYE0s2x6AmZXnlg==",
-      "inBundle": true,
-      "dependencies": {
-        "@sigstore/protobuf-specs": "^0.2.0",
-        "tuf-js": "^1.1.7"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/sigstore/node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs": {
-      "version": "0.2.1",
-      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.2.1.tgz",
-      "integrity": "sha512-XTWVxnWJu+c1oCshMLwnKvz8ZQJJDVOlciMfgpJBQbThVjKTCG8dwyhgLngBD2KN0ap9F/gOV8rFDEx8uh7R2A==",
-      "inBundle": true,
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/sigstore/node_modules/@tufjs/canonical-json": {
-      "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/@tufjs/canonical-json/-/canonical-json-1.0.0.tgz",
-      "integrity": "sha512-QTnf++uxunWvG2z3UFNzAoQPHxnSXOwtaI3iJ+AohhV+5vONuArPjJE7aPXPVXfXJsqrVbZBu9b81AJoSd09IQ==",
-      "inBundle": true,
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/sigstore/node_modules/@tufjs/models": {
-      "version": "1.0.4",
-      "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-1.0.4.tgz",
-      "integrity": "sha512-qaGV9ltJP0EO25YfFUPhxRVK0evXFIAGicsVXuRim4Ed9cjPxYhNnNJ49SFmbeLgtxpslIkX317IgpfcHPVj/A==",
-      "inBundle": true,
-      "dependencies": {
-        "@tufjs/canonical-json": "1.0.0",
-        "minimatch": "^9.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/sigstore/node_modules/cacache": {
-      "version": "17.1.4",
-      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
-      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
-      "inBundle": true,
-      "dependencies": {
-        "@npmcli/fs": "^3.1.0",
-        "fs-minipass": "^3.0.0",
-        "glob": "^10.2.2",
-        "lru-cache": "^7.7.1",
-        "minipass": "^7.0.3",
-        "minipass-collect": "^1.0.2",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "p-map": "^4.0.0",
-        "ssri": "^10.0.0",
-        "tar": "^6.1.11",
-        "unique-filename": "^3.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/sigstore/node_modules/cacache/node_modules/minipass": {
-      "version": "7.0.3",
-      "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz",
-      "integrity": "sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg==",
-      "inBundle": true,
-      "engines": {
-        "node": ">=16 || 14 >=14.17"
-      }
-    },
-    "node_modules/sigstore/node_modules/lru-cache": {
-      "version": "7.18.3",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
-      "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
-      "inBundle": true,
-      "engines": {
-        "node": ">=12"
-      }
-    },
-    "node_modules/sigstore/node_modules/make-fetch-happen": {
-      "version": "11.1.1",
-      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz",
-      "integrity": "sha512-rLWS7GCSTcEujjVBs2YqG7Y4643u8ucvCJeSRqiLYhesrDuzeuFIk37xREzAsfQaqzl8b9rNCE4m6J8tvX4Q8w==",
-      "inBundle": true,
-      "dependencies": {
-        "agentkeepalive": "^4.2.1",
-        "cacache": "^17.0.0",
-        "http-cache-semantics": "^4.1.1",
-        "http-proxy-agent": "^5.0.0",
-        "https-proxy-agent": "^5.0.0",
-        "is-lambda": "^1.0.1",
-        "lru-cache": "^7.7.1",
-        "minipass": "^5.0.0",
-        "minipass-fetch": "^3.0.0",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "negotiator": "^0.6.3",
-        "promise-retry": "^2.0.1",
-        "socks-proxy-agent": "^7.0.0",
-        "ssri": "^10.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/sigstore/node_modules/minipass": {
-      "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz",
-      "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==",
-      "inBundle": true,
-      "engines": {
-        "node": ">=8"
-      }
-    },
-    "node_modules/sigstore/node_modules/tuf-js": {
-      "version": "1.1.7",
-      "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-1.1.7.tgz",
-      "integrity": "sha512-i3P9Kgw3ytjELUfpuKVDNBJvk4u5bXL6gskv572mcevPbSKCV3zt3djhmlEQ65yERjIbOSncy7U4cQJaB1CBCg==",
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-2.0.0.tgz",
+      "integrity": "sha512-RtTi90xIdzFmQAAKb9+Ki1nx4IR2Z5c+mFn3dN0xuPHgk3gTt3f7ZqKsZ9UFQP40ZAlm7un8LMyjhwgrTIXNPA==",
       "inBundle": true,
       "dependencies": {
-        "@tufjs/models": "1.0.4",
-        "debug": "^4.3.4",
-        "make-fetch-happen": "^11.1.1"
+        "@sigstore/bundle": "^2.0.0",
+        "@sigstore/protobuf-specs": "^0.2.1",
+        "@sigstore/sign": "^2.0.0",
+        "@sigstore/tuf": "^2.0.0"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "node_modules/slash": {
@@ -16011,7 +15852,7 @@
         "npm-pick-manifest": "^9.0.0",
         "npm-registry-fetch": "^16.0.0",
         "npmlog": "^7.0.1",
-        "pacote": "^17.0.1",
+        "pacote": "^17.0.2",
         "parse-conflict-json": "^3.0.0",
         "proc-log": "^3.0.0",
         "promise-all-reject-late": "^1.0.0",
@@ -16092,7 +15933,7 @@
         "diff": "^5.1.0",
         "minimatch": "^9.0.0",
         "npm-package-arg": "^11.0.0",
-        "pacote": "^17.0.1",
+        "pacote": "^17.0.2",
         "tar": "^6.1.13"
       },
       "devDependencies": {
@@ -16113,7 +15954,7 @@
         "ci-info": "^3.7.1",
         "npm-package-arg": "^11.0.0",
         "npmlog": "^7.0.1",
-        "pacote": "^17.0.1",
+        "pacote": "^17.0.2",
         "proc-log": "^3.0.0",
         "read": "^2.0.0",
         "read-package-json-fast": "^3.0.2",
@@ -16191,7 +16032,7 @@
         "@npmcli/arborist": "^6.3.0",
         "@npmcli/run-script": "^6.0.0",
         "npm-package-arg": "^11.0.0",
-        "pacote": "^17.0.1"
+        "pacote": "^17.0.2"
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^4.0.0",
@@ -16229,28 +16070,6 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "workspaces/libnpmpublish/node_modules/@sigstore/protobuf-specs": {
-      "version": "0.2.1",
-      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.2.1.tgz",
-      "integrity": "sha512-XTWVxnWJu+c1oCshMLwnKvz8ZQJJDVOlciMfgpJBQbThVjKTCG8dwyhgLngBD2KN0ap9F/gOV8rFDEx8uh7R2A==",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "workspaces/libnpmpublish/node_modules/sigstore": {
-      "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-2.0.0.tgz",
-      "integrity": "sha512-RtTi90xIdzFmQAAKb9+Ki1nx4IR2Z5c+mFn3dN0xuPHgk3gTt3f7ZqKsZ9UFQP40ZAlm7un8LMyjhwgrTIXNPA==",
-      "dependencies": {
-        "@sigstore/bundle": "^2.0.0",
-        "@sigstore/protobuf-specs": "^0.2.1",
-        "@sigstore/sign": "^2.0.0",
-        "@sigstore/tuf": "^2.0.0"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
     "workspaces/libnpmsearch": {
       "version": "6.0.2",
       "license": "ISC",
diff --git a/package.json b/package.json
index 303fd1e42c853..f57afaa03a103 100644
--- a/package.json
+++ b/package.json
@@ -104,7 +104,7 @@
     "npm-user-validate": "^2.0.0",
     "npmlog": "^7.0.1",
     "p-map": "^4.0.0",
-    "pacote": "^17.0.1",
+    "pacote": "^17.0.2",
     "parse-conflict-json": "^3.0.1",
     "proc-log": "^3.0.0",
     "qrcode-terminal": "^0.12.0",
diff --git a/test/lib/commands/audit.js b/test/lib/commands/audit.js
index ae6d6c170842f..4a776e89bd9e9 100644
--- a/test/lib/commands/audit.js
+++ b/test/lib/commands/audit.js
@@ -1873,9 +1873,7 @@ t.test('audit signatures', async t => {
       prefixDir: installWithValidAttestations,
       mocks: {
         pacote: t.mock('pacote', {
-          sigstore: {
-            sigstore: { verify: async () => true },
-          },
+          sigstore: { verify: async () => true },
         }),
       },
     })
@@ -1900,9 +1898,7 @@ t.test('audit signatures', async t => {
       prefixDir: installWithMultipleValidAttestations,
       mocks: {
         pacote: t.mock('pacote', {
-          sigstore: {
-            sigstore: { verify: async () => true },
-          },
+          sigstore: { verify: async () => true },
         }),
       },
     })
@@ -1933,10 +1929,8 @@ t.test('audit signatures', async t => {
       mocks: {
         pacote: t.mock('pacote', {
           sigstore: {
-            sigstore: {
-              verify: async () => {
-                throw new Error(`artifact signature verification failed`)
-              },
+            verify: async () => {
+              throw new Error(`artifact signature verification failed`)
             },
           },
         }),
@@ -1970,10 +1964,8 @@ t.test('audit signatures', async t => {
       mocks: {
         pacote: t.mock('pacote', {
           sigstore: {
-            sigstore: {
-              verify: async () => {
-                throw new Error(`artifact signature verification failed`)
-              },
+            verify: async () => {
+              throw new Error(`artifact signature verification failed`)
             },
           },
         }),
@@ -2001,10 +1993,8 @@ t.test('audit signatures', async t => {
       mocks: {
         pacote: t.mock('pacote', {
           sigstore: {
-            sigstore: {
-              verify: async () => {
-                throw new Error(`artifact signature verification failed`)
-              },
+            verify: async () => {
+              throw new Error(`artifact signature verification failed`)
             },
           },
         }),
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 844ecf4b09bac..496b8137e03ce 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -26,7 +26,7 @@
     "npm-pick-manifest": "^9.0.0",
     "npm-registry-fetch": "^16.0.0",
     "npmlog": "^7.0.1",
-    "pacote": "^17.0.1",
+    "pacote": "^17.0.2",
     "parse-conflict-json": "^3.0.0",
     "proc-log": "^3.0.0",
     "promise-all-reject-late": "^1.0.0",
diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json
index 8a9f82a675ce0..b9a6408ee3d6e 100644
--- a/workspaces/libnpmdiff/package.json
+++ b/workspaces/libnpmdiff/package.json
@@ -53,7 +53,7 @@
     "diff": "^5.1.0",
     "minimatch": "^9.0.0",
     "npm-package-arg": "^11.0.0",
-    "pacote": "^17.0.1",
+    "pacote": "^17.0.2",
     "tar": "^6.1.13"
   },
   "templateOSS": {
diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json
index f4d199602cb2d..79f04a170170d 100644
--- a/workspaces/libnpmexec/package.json
+++ b/workspaces/libnpmexec/package.json
@@ -64,7 +64,7 @@
     "ci-info": "^3.7.1",
     "npm-package-arg": "^11.0.0",
     "npmlog": "^7.0.1",
-    "pacote": "^17.0.1",
+    "pacote": "^17.0.2",
     "proc-log": "^3.0.0",
     "read": "^2.0.0",
     "read-package-json-fast": "^3.0.2",
diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json
index 439e5deb119f4..a5d1e14c4468e 100644
--- a/workspaces/libnpmpack/package.json
+++ b/workspaces/libnpmpack/package.json
@@ -39,7 +39,7 @@
     "@npmcli/arborist": "^6.3.0",
     "@npmcli/run-script": "^6.0.0",
     "npm-package-arg": "^11.0.0",
-    "pacote": "^17.0.1"
+    "pacote": "^17.0.2"
   },
   "engines": {
     "node": "^16.14.0 || >=18.0.0"

From bbbd3f1b0c2101bf524b47ca1ddafd0af14198ce Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Wed, 30 Aug 2023 10:19:29 -0700
Subject: [PATCH 56/68] deps: pacote@17.0.3

---
 mock-registry/package.json                    |  2 +-
 node_modules/.gitignore                       |  4 ++-
 .../node_modules}/npm-packlist/LICENSE        |  0
 .../node_modules}/npm-packlist/lib/index.js   | 34 +++++++++++++++++--
 .../node_modules}/npm-packlist/package.json   |  7 ++--
 node_modules/pacote/package.json              |  4 +--
 package-lock.json                             | 34 +++++++++++++------
 package.json                                  |  2 +-
 workspaces/arborist/package.json              |  2 +-
 workspaces/libnpmdiff/package.json            |  2 +-
 workspaces/libnpmexec/package.json            |  2 +-
 workspaces/libnpmpack/package.json            |  2 +-
 12 files changed, 69 insertions(+), 26 deletions(-)
 rename node_modules/{ => pacote/node_modules}/npm-packlist/LICENSE (100%)
 rename node_modules/{ => pacote/node_modules}/npm-packlist/lib/index.js (94%)
 rename node_modules/{ => pacote/node_modules}/npm-packlist/package.json (92%)

diff --git a/mock-registry/package.json b/mock-registry/package.json
index d07851b37ae38..eb7544d8cfeaf 100644
--- a/mock-registry/package.json
+++ b/mock-registry/package.json
@@ -56,7 +56,7 @@
     "json-stringify-safe": "^5.0.1",
     "nock": "^13.3.3",
     "npm-package-arg": "^11.0.0",
-    "pacote": "^17.0.2",
+    "pacote": "^17.0.3",
     "tap": "^16.3.8"
   }
 }
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 5e29e58b28c44..ff9fee3432225 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -202,7 +202,6 @@
 !/npm-install-checks
 !/npm-normalize-package-bin
 !/npm-package-arg
-!/npm-packlist
 !/npm-pick-manifest
 !/npm-profile
 !/npm-registry-fetch
@@ -211,6 +210,9 @@
 !/once
 !/p-map
 !/pacote
+!/pacote/node_modules/
+/pacote/node_modules/*
+!/pacote/node_modules/npm-packlist
 !/parse-conflict-json
 !/path-is-absolute
 !/path-key
diff --git a/node_modules/npm-packlist/LICENSE b/node_modules/pacote/node_modules/npm-packlist/LICENSE
similarity index 100%
rename from node_modules/npm-packlist/LICENSE
rename to node_modules/pacote/node_modules/npm-packlist/LICENSE
diff --git a/node_modules/npm-packlist/lib/index.js b/node_modules/pacote/node_modules/npm-packlist/lib/index.js
similarity index 94%
rename from node_modules/npm-packlist/lib/index.js
rename to node_modules/pacote/node_modules/npm-packlist/lib/index.js
index 887018bd7d424..7577cba0b865d 100644
--- a/node_modules/npm-packlist/lib/index.js
+++ b/node_modules/pacote/node_modules/npm-packlist/lib/index.js
@@ -38,13 +38,22 @@ const defaults = [
 ]
 
 const strictDefaults = [
-  // these are forcibly included at all levels
+  // these are forcibly excluded
+  '/.git',
+]
+
+const allLevels = [
+  // these are included by default but can be excluded by package.json files array
   '!/readme{,.*[^~$]}',
   '!/copying{,.*[^~$]}',
   '!/license{,.*[^~$]}',
   '!/licence{,.*[^~$]}',
-  // these are forcibly excluded
-  '/.git',
+]
+
+const rootOnly = [
+  /^!.*readme/i,
+  /^!.*copying/i,
+  /^!.*licen[sc]e/i,
 ]
 
 const normalizePath = (path) => path.split('\\').join('/')
@@ -132,6 +141,7 @@ class PackWalker extends IgnoreWalker {
       // known required files for this directory
       this.injectRules(strictRules, [
         ...strictDefaults,
+        ...allLevels,
         ...this.requiredFiles.map((file) => `!${file}`),
       ])
     }
@@ -284,6 +294,7 @@ class PackWalker extends IgnoreWalker {
     const ignores = []
     const strict = [
       ...strictDefaults,
+      ...allLevels,
       '!/package.json',
       '/.git',
       '/node_modules',
@@ -304,6 +315,9 @@ class PackWalker extends IgnoreWalker {
           file = file.slice(0, -2)
         }
         const inverse = `!${file}`
+
+        this.excludeNonRoot(file)
+
         try {
           // if an entry in the files array is a specific file, then we need to include it as a
           // strict requirement for this package. if it's a directory or a pattern, it's a default
@@ -352,6 +366,20 @@ class PackWalker extends IgnoreWalker {
     this.injectRules(strictRules, strict, callback)
   }
 
+  // excludes non root files by checking if elements from the files array in
+  // package.json contain an ! and readme/license/licence/copying, and then
+  // removing readme/license/licence/copying accordingly from strict defaults
+  excludeNonRoot (file) {
+    // Find the pattern
+    const matchingPattern = rootOnly.find(regex => regex.test(file))
+
+    if (matchingPattern) {
+      // Find which index matches the pattern and remove it from allLevels
+      const indexToRemove = allLevels.findIndex(element => matchingPattern.test(element))
+      allLevels.splice(indexToRemove, 1)
+    }
+  }
+
   // custom method: after we've finished gathering the files for the root package, we call this
   // before emitting the 'done' event in order to gather all of the files for bundled deps
   async gatherBundles () {
diff --git a/node_modules/npm-packlist/package.json b/node_modules/pacote/node_modules/npm-packlist/package.json
similarity index 92%
rename from node_modules/npm-packlist/package.json
rename to node_modules/pacote/node_modules/npm-packlist/package.json
index 6023ad34df3b4..460ca7e30ad23 100644
--- a/node_modules/npm-packlist/package.json
+++ b/node_modules/pacote/node_modules/npm-packlist/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-packlist",
-  "version": "7.0.4",
+  "version": "8.0.0",
   "description": "Get a list of the files to add from a folder into an npm package",
   "directories": {
     "test": "test"
@@ -18,7 +18,7 @@
   "devDependencies": {
     "@npmcli/arborist": "^6.0.0 || ^6.0.0-pre.0",
     "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.10.0",
+    "@npmcli/template-oss": "4.18.0",
     "mutate-fs": "^2.1.1",
     "tap": "^16.0.1"
   },
@@ -55,6 +55,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.10.0"
+    "version": "4.18.0",
+    "publish": true
   }
 }
diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json
index dffd3aecf9d13..44236542285c8 100644
--- a/node_modules/pacote/package.json
+++ b/node_modules/pacote/package.json
@@ -1,6 +1,6 @@
 {
   "name": "pacote",
-  "version": "17.0.2",
+  "version": "17.0.3",
   "description": "JavaScript package downloader",
   "author": "GitHub Inc.",
   "bin": {
@@ -52,7 +52,7 @@
     "fs-minipass": "^3.0.0",
     "minipass": "^7.0.2",
     "npm-package-arg": "^11.0.0",
-    "npm-packlist": "^7.0.0",
+    "npm-packlist": "^8.0.0",
     "npm-pick-manifest": "^9.0.0",
     "npm-registry-fetch": "^16.0.0",
     "proc-log": "^3.0.0",
diff --git a/package-lock.json b/package-lock.json
index 3793db08f0bc8..234480e8bbfb4 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -139,7 +139,7 @@
         "npm-user-validate": "^2.0.0",
         "npmlog": "^7.0.1",
         "p-map": "^4.0.0",
-        "pacote": "^17.0.2",
+        "pacote": "^17.0.3",
         "parse-conflict-json": "^3.0.1",
         "proc-log": "^3.0.0",
         "qrcode-terminal": "^0.12.0",
@@ -230,7 +230,7 @@
         "json-stringify-safe": "^5.0.1",
         "nock": "^13.3.3",
         "npm-package-arg": "^11.0.0",
-        "pacote": "^17.0.2",
+        "pacote": "^17.0.3",
         "tap": "^16.3.8"
       },
       "engines": {
@@ -9791,7 +9791,7 @@
       "version": "7.0.4",
       "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-7.0.4.tgz",
       "integrity": "sha512-d6RGEuRrNS5/N84iglPivjaJPxhDbZmlbTwTDX2IbcRHG5bZCdtysYMhwiPvcF4GisXHGn7xsxv+GQ7T/02M5Q==",
-      "inBundle": true,
+      "dev": true,
       "dependencies": {
         "ignore-walk": "^6.0.0"
       },
@@ -10379,9 +10379,9 @@
       }
     },
     "node_modules/pacote": {
-      "version": "17.0.2",
-      "resolved": "https://registry.npmjs.org/pacote/-/pacote-17.0.2.tgz",
-      "integrity": "sha512-Us2QUwVHu4wwUhGZVOHlFIG9LhQ0Aq8zsv1ZvJ37rQwfxSIe/PaPfskz905hHycEXRfmlGKJ5xoEtrF+y66T6w==",
+      "version": "17.0.3",
+      "resolved": "https://registry.npmjs.org/pacote/-/pacote-17.0.3.tgz",
+      "integrity": "sha512-nT66y5NK2u/d7qV9lP6ye+powAufDl6OHT+aOZ4Cmtq89GSqgB05Ar6aQ7DM+0+bIE5NCdYUcqFlkK4m/0LVHA==",
       "inBundle": true,
       "dependencies": {
         "@npmcli/git": "^5.0.0",
@@ -10392,7 +10392,7 @@
         "fs-minipass": "^3.0.0",
         "minipass": "^7.0.2",
         "npm-package-arg": "^11.0.0",
-        "npm-packlist": "^7.0.0",
+        "npm-packlist": "^8.0.0",
         "npm-pick-manifest": "^9.0.0",
         "npm-registry-fetch": "^16.0.0",
         "proc-log": "^3.0.0",
@@ -10410,6 +10410,18 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/pacote/node_modules/npm-packlist": {
+      "version": "8.0.0",
+      "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-8.0.0.tgz",
+      "integrity": "sha512-ErAGFB5kJUciPy1mmx/C2YFbvxoJ0QJ9uwkCZOeR6CqLLISPZBOiFModAbSXnjjlwW5lOhuhXva+fURsSGJqyw==",
+      "inBundle": true,
+      "dependencies": {
+        "ignore-walk": "^6.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/parent-module": {
       "version": "1.0.1",
       "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
@@ -15852,7 +15864,7 @@
         "npm-pick-manifest": "^9.0.0",
         "npm-registry-fetch": "^16.0.0",
         "npmlog": "^7.0.1",
-        "pacote": "^17.0.2",
+        "pacote": "^17.0.3",
         "parse-conflict-json": "^3.0.0",
         "proc-log": "^3.0.0",
         "promise-all-reject-late": "^1.0.0",
@@ -15933,7 +15945,7 @@
         "diff": "^5.1.0",
         "minimatch": "^9.0.0",
         "npm-package-arg": "^11.0.0",
-        "pacote": "^17.0.2",
+        "pacote": "^17.0.3",
         "tar": "^6.1.13"
       },
       "devDependencies": {
@@ -15954,7 +15966,7 @@
         "ci-info": "^3.7.1",
         "npm-package-arg": "^11.0.0",
         "npmlog": "^7.0.1",
-        "pacote": "^17.0.2",
+        "pacote": "^17.0.3",
         "proc-log": "^3.0.0",
         "read": "^2.0.0",
         "read-package-json-fast": "^3.0.2",
@@ -16032,7 +16044,7 @@
         "@npmcli/arborist": "^6.3.0",
         "@npmcli/run-script": "^6.0.0",
         "npm-package-arg": "^11.0.0",
-        "pacote": "^17.0.2"
+        "pacote": "^17.0.3"
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^4.0.0",
diff --git a/package.json b/package.json
index f57afaa03a103..7cfe5efb2b3ee 100644
--- a/package.json
+++ b/package.json
@@ -104,7 +104,7 @@
     "npm-user-validate": "^2.0.0",
     "npmlog": "^7.0.1",
     "p-map": "^4.0.0",
-    "pacote": "^17.0.2",
+    "pacote": "^17.0.3",
     "parse-conflict-json": "^3.0.1",
     "proc-log": "^3.0.0",
     "qrcode-terminal": "^0.12.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 496b8137e03ce..c86504f333e9f 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -26,7 +26,7 @@
     "npm-pick-manifest": "^9.0.0",
     "npm-registry-fetch": "^16.0.0",
     "npmlog": "^7.0.1",
-    "pacote": "^17.0.2",
+    "pacote": "^17.0.3",
     "parse-conflict-json": "^3.0.0",
     "proc-log": "^3.0.0",
     "promise-all-reject-late": "^1.0.0",
diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json
index b9a6408ee3d6e..d2fe63d07219f 100644
--- a/workspaces/libnpmdiff/package.json
+++ b/workspaces/libnpmdiff/package.json
@@ -53,7 +53,7 @@
     "diff": "^5.1.0",
     "minimatch": "^9.0.0",
     "npm-package-arg": "^11.0.0",
-    "pacote": "^17.0.2",
+    "pacote": "^17.0.3",
     "tar": "^6.1.13"
   },
   "templateOSS": {
diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json
index 79f04a170170d..8e97e9f096100 100644
--- a/workspaces/libnpmexec/package.json
+++ b/workspaces/libnpmexec/package.json
@@ -64,7 +64,7 @@
     "ci-info": "^3.7.1",
     "npm-package-arg": "^11.0.0",
     "npmlog": "^7.0.1",
-    "pacote": "^17.0.2",
+    "pacote": "^17.0.3",
     "proc-log": "^3.0.0",
     "read": "^2.0.0",
     "read-package-json-fast": "^3.0.2",
diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json
index a5d1e14c4468e..4e1b055b75a41 100644
--- a/workspaces/libnpmpack/package.json
+++ b/workspaces/libnpmpack/package.json
@@ -39,7 +39,7 @@
     "@npmcli/arborist": "^6.3.0",
     "@npmcli/run-script": "^6.0.0",
     "npm-package-arg": "^11.0.0",
-    "pacote": "^17.0.2"
+    "pacote": "^17.0.3"
   },
   "engines": {
     "node": "^16.14.0 || >=18.0.0"

From d712ed26e73f0a7a7b753ea604abe32078dbbc33 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Wed, 30 Aug 2023 10:20:05 -0700
Subject: [PATCH 57/68] deps: npm-packlist@8.0.0

---
 node_modules/.gitignore                       |  4 +---
 .../node_modules => }/npm-packlist/LICENSE    |  0
 .../npm-packlist/lib/index.js                 |  0
 .../npm-packlist/package.json                 |  0
 package-lock.json                             | 22 +++++--------------
 package.json                                  |  2 +-
 6 files changed, 7 insertions(+), 21 deletions(-)
 rename node_modules/{pacote/node_modules => }/npm-packlist/LICENSE (100%)
 rename node_modules/{pacote/node_modules => }/npm-packlist/lib/index.js (100%)
 rename node_modules/{pacote/node_modules => }/npm-packlist/package.json (100%)

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index ff9fee3432225..5e29e58b28c44 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -202,6 +202,7 @@
 !/npm-install-checks
 !/npm-normalize-package-bin
 !/npm-package-arg
+!/npm-packlist
 !/npm-pick-manifest
 !/npm-profile
 !/npm-registry-fetch
@@ -210,9 +211,6 @@
 !/once
 !/p-map
 !/pacote
-!/pacote/node_modules/
-/pacote/node_modules/*
-!/pacote/node_modules/npm-packlist
 !/parse-conflict-json
 !/path-is-absolute
 !/path-key
diff --git a/node_modules/pacote/node_modules/npm-packlist/LICENSE b/node_modules/npm-packlist/LICENSE
similarity index 100%
rename from node_modules/pacote/node_modules/npm-packlist/LICENSE
rename to node_modules/npm-packlist/LICENSE
diff --git a/node_modules/pacote/node_modules/npm-packlist/lib/index.js b/node_modules/npm-packlist/lib/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/npm-packlist/lib/index.js
rename to node_modules/npm-packlist/lib/index.js
diff --git a/node_modules/pacote/node_modules/npm-packlist/package.json b/node_modules/npm-packlist/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/npm-packlist/package.json
rename to node_modules/npm-packlist/package.json
diff --git a/package-lock.json b/package-lock.json
index 234480e8bbfb4..be16c47b205cf 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -170,7 +170,7 @@
         "diff": "^5.1.0",
         "licensee": "^10.0.0",
         "nock": "^13.3.3",
-        "npm-packlist": "^7.0.4",
+        "npm-packlist": "^8.0.0",
         "remark": "^14.0.2",
         "remark-gfm": "^3.0.1",
         "remark-github": "^11.2.4",
@@ -9788,10 +9788,10 @@
       }
     },
     "node_modules/npm-packlist": {
-      "version": "7.0.4",
-      "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-7.0.4.tgz",
-      "integrity": "sha512-d6RGEuRrNS5/N84iglPivjaJPxhDbZmlbTwTDX2IbcRHG5bZCdtysYMhwiPvcF4GisXHGn7xsxv+GQ7T/02M5Q==",
-      "dev": true,
+      "version": "8.0.0",
+      "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-8.0.0.tgz",
+      "integrity": "sha512-ErAGFB5kJUciPy1mmx/C2YFbvxoJ0QJ9uwkCZOeR6CqLLISPZBOiFModAbSXnjjlwW5lOhuhXva+fURsSGJqyw==",
+      "inBundle": true,
       "dependencies": {
         "ignore-walk": "^6.0.0"
       },
@@ -10410,18 +10410,6 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/pacote/node_modules/npm-packlist": {
-      "version": "8.0.0",
-      "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-8.0.0.tgz",
-      "integrity": "sha512-ErAGFB5kJUciPy1mmx/C2YFbvxoJ0QJ9uwkCZOeR6CqLLISPZBOiFModAbSXnjjlwW5lOhuhXva+fURsSGJqyw==",
-      "inBundle": true,
-      "dependencies": {
-        "ignore-walk": "^6.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
     "node_modules/parent-module": {
       "version": "1.0.1",
       "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
diff --git a/package.json b/package.json
index 7cfe5efb2b3ee..a78ac6e0a4f56 100644
--- a/package.json
+++ b/package.json
@@ -201,7 +201,7 @@
     "diff": "^5.1.0",
     "licensee": "^10.0.0",
     "nock": "^13.3.3",
-    "npm-packlist": "^7.0.4",
+    "npm-packlist": "^8.0.0",
     "remark": "^14.0.2",
     "remark-gfm": "^3.0.1",
     "remark-github": "^11.2.4",

From 2f2824529158e704d2bbaa621ea186276d8a6f5d Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Wed, 30 Aug 2023 10:20:45 -0700
Subject: [PATCH 58/68] deps: which@4.0.0

---
 node_modules/.gitignore                       |  12 ++
 .../@npmcli/git/node_modules/which/LICENSE    |  15 +++
 .../git/node_modules/which/bin/which.js       |  52 ++++++++
 .../git/node_modules/which/lib/index.js       | 115 ++++++++++++++++++
 .../git/node_modules/which/package.json       |  51 ++++++++
 .../promise-spawn/node_modules/which/LICENSE  |  15 +++
 .../node_modules/which/bin/which.js           |  52 ++++++++
 .../node_modules/which/lib/index.js           | 115 ++++++++++++++++++
 .../node_modules/which/package.json           |  51 ++++++++
 .../run-script/node_modules/which/LICENSE     |  15 +++
 .../node_modules/which/bin/which.js           |  52 ++++++++
 .../node_modules/which/lib/index.js           | 115 ++++++++++++++++++
 .../node_modules/which/package.json           |  51 ++++++++
 node_modules/which/lib/index.js               |  10 +-
 node_modules/which/node_modules/isexe/LICENSE |  15 +++
 .../node_modules/isexe/dist/cjs/index.js      |  46 +++++++
 .../node_modules/isexe/dist/cjs/options.js    |   3 +
 .../node_modules/isexe/dist/cjs/package.json  |   3 +
 .../node_modules/isexe/dist/cjs/posix.js      |  67 ++++++++++
 .../node_modules/isexe/dist/cjs/win32.js      |  62 ++++++++++
 .../node_modules/isexe/dist/mjs/index.js      |  16 +++
 .../node_modules/isexe/dist/mjs/options.js    |   2 +
 .../node_modules/isexe/dist/mjs/package.json  |   3 +
 .../node_modules/isexe/dist/mjs/posix.js      |  62 ++++++++++
 .../node_modules/isexe/dist/mjs/win32.js      |  57 +++++++++
 .../which/node_modules/isexe/package.json     |  96 +++++++++++++++
 node_modules/which/package.json               |  16 ++-
 package-lock.json                             |  98 +++++++++++++--
 package.json                                  |   2 +-
 smoke-tests/package.json                      |   2 +-
 30 files changed, 1250 insertions(+), 21 deletions(-)
 create mode 100644 node_modules/@npmcli/git/node_modules/which/LICENSE
 create mode 100755 node_modules/@npmcli/git/node_modules/which/bin/which.js
 create mode 100644 node_modules/@npmcli/git/node_modules/which/lib/index.js
 create mode 100644 node_modules/@npmcli/git/node_modules/which/package.json
 create mode 100644 node_modules/@npmcli/promise-spawn/node_modules/which/LICENSE
 create mode 100755 node_modules/@npmcli/promise-spawn/node_modules/which/bin/which.js
 create mode 100644 node_modules/@npmcli/promise-spawn/node_modules/which/lib/index.js
 create mode 100644 node_modules/@npmcli/promise-spawn/node_modules/which/package.json
 create mode 100644 node_modules/@npmcli/run-script/node_modules/which/LICENSE
 create mode 100755 node_modules/@npmcli/run-script/node_modules/which/bin/which.js
 create mode 100644 node_modules/@npmcli/run-script/node_modules/which/lib/index.js
 create mode 100644 node_modules/@npmcli/run-script/node_modules/which/package.json
 create mode 100644 node_modules/which/node_modules/isexe/LICENSE
 create mode 100644 node_modules/which/node_modules/isexe/dist/cjs/index.js
 create mode 100644 node_modules/which/node_modules/isexe/dist/cjs/options.js
 create mode 100644 node_modules/which/node_modules/isexe/dist/cjs/package.json
 create mode 100644 node_modules/which/node_modules/isexe/dist/cjs/posix.js
 create mode 100644 node_modules/which/node_modules/isexe/dist/cjs/win32.js
 create mode 100644 node_modules/which/node_modules/isexe/dist/mjs/index.js
 create mode 100644 node_modules/which/node_modules/isexe/dist/mjs/options.js
 create mode 100644 node_modules/which/node_modules/isexe/dist/mjs/package.json
 create mode 100644 node_modules/which/node_modules/isexe/dist/mjs/posix.js
 create mode 100644 node_modules/which/node_modules/isexe/dist/mjs/win32.js
 create mode 100644 node_modules/which/node_modules/isexe/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 5e29e58b28c44..dda14819326ad 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -22,6 +22,9 @@
 !/@npmcli/disparity-colors
 !/@npmcli/fs
 !/@npmcli/git
+!/@npmcli/git/node_modules/
+/@npmcli/git/node_modules/*
+!/@npmcli/git/node_modules/which
 !/@npmcli/installed-package-contents
 !/@npmcli/map-workspaces
 !/@npmcli/metavuln-calculator
@@ -29,8 +32,14 @@
 !/@npmcli/node-gyp
 !/@npmcli/package-json
 !/@npmcli/promise-spawn
+!/@npmcli/promise-spawn/node_modules/
+/@npmcli/promise-spawn/node_modules/*
+!/@npmcli/promise-spawn/node_modules/which
 !/@npmcli/query
 !/@npmcli/run-script
+!/@npmcli/run-script/node_modules/
+/@npmcli/run-script/node_modules/*
+!/@npmcli/run-script/node_modules/which
 !/@pkgjs/
 /@pkgjs/*
 !/@pkgjs/parseargs
@@ -281,6 +290,9 @@
 !/walk-up-path
 !/wcwidth
 !/which
+!/which/node_modules/
+/which/node_modules/*
+!/which/node_modules/isexe
 !/wide-align
 !/wrap-ansi-cjs
 !/wrap-ansi
diff --git a/node_modules/@npmcli/git/node_modules/which/LICENSE b/node_modules/@npmcli/git/node_modules/which/LICENSE
new file mode 100644
index 0000000000000..19129e315fe59
--- /dev/null
+++ b/node_modules/@npmcli/git/node_modules/which/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/git/node_modules/which/bin/which.js b/node_modules/@npmcli/git/node_modules/which/bin/which.js
new file mode 100755
index 0000000000000..6df16f21acf93
--- /dev/null
+++ b/node_modules/@npmcli/git/node_modules/which/bin/which.js
@@ -0,0 +1,52 @@
+#!/usr/bin/env node
+
+const which = require('../lib')
+const argv = process.argv.slice(2)
+
+const usage = (err) => {
+  if (err) {
+    console.error(`which: ${err}`)
+  }
+  console.error('usage: which [-as] program ...')
+  process.exit(1)
+}
+
+if (!argv.length) {
+  return usage()
+}
+
+let dashdash = false
+const [commands, flags] = argv.reduce((acc, arg) => {
+  if (dashdash || arg === '--') {
+    dashdash = true
+    return acc
+  }
+
+  if (!/^-/.test(arg)) {
+    acc[0].push(arg)
+    return acc
+  }
+
+  for (const flag of arg.slice(1).split('')) {
+    if (flag === 's') {
+      acc[1].silent = true
+    } else if (flag === 'a') {
+      acc[1].all = true
+    } else {
+      usage(`illegal option -- ${flag}`)
+    }
+  }
+
+  return acc
+}, [[], {}])
+
+for (const command of commands) {
+  try {
+    const res = which.sync(command, { all: flags.all })
+    if (!flags.silent) {
+      console.log([].concat(res).join('\n'))
+    }
+  } catch (err) {
+    process.exitCode = 1
+  }
+}
diff --git a/node_modules/@npmcli/git/node_modules/which/lib/index.js b/node_modules/@npmcli/git/node_modules/which/lib/index.js
new file mode 100644
index 0000000000000..52e9ea62377e7
--- /dev/null
+++ b/node_modules/@npmcli/git/node_modules/which/lib/index.js
@@ -0,0 +1,115 @@
+const isexe = require('isexe')
+const { join, delimiter, sep, posix } = require('path')
+
+const isWindows = process.platform === 'win32'
+
+// used to check for slashed in commands passed in. always checks for the posix
+// seperator on all platforms, and checks for the current separator when not on
+// a posix platform. don't use the isWindows check for this since that is mocked
+// in tests but we still need the code to actually work when called. that is also
+// why it is ignored from coverage.
+/* istanbul ignore next */
+const rSlash = new RegExp(`[${posix.sep}${sep === posix.sep ? '' : sep}]`.replace(/(\\)/g, '\\$1'))
+const rRel = new RegExp(`^\\.${rSlash.source}`)
+
+const getNotFoundError = (cmd) =>
+  Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' })
+
+const getPathInfo = (cmd, {
+  path: optPath = process.env.PATH,
+  pathExt: optPathExt = process.env.PATHEXT,
+  delimiter: optDelimiter = delimiter,
+}) => {
+  // If it has a slash, then we don't bother searching the pathenv.
+  // just check the file itself, and that's it.
+  const pathEnv = cmd.match(rSlash) ? [''] : [
+    // windows always checks the cwd first
+    ...(isWindows ? [process.cwd()] : []),
+    ...(optPath || /* istanbul ignore next: very unusual */ '').split(optDelimiter),
+  ]
+
+  if (isWindows) {
+    const pathExtExe = optPathExt ||
+      ['.EXE', '.CMD', '.BAT', '.COM'].join(optDelimiter)
+    const pathExt = pathExtExe.split(optDelimiter).reduce((acc, item) => {
+      acc.push(item)
+      acc.push(item.toLowerCase())
+      return acc
+    }, [])
+    if (cmd.includes('.') && pathExt[0] !== '') {
+      pathExt.unshift('')
+    }
+    return { pathEnv, pathExt, pathExtExe }
+  }
+
+  return { pathEnv, pathExt: [''] }
+}
+
+const getPathPart = (raw, cmd) => {
+  const pathPart = /^".*"$/.test(raw) ? raw.slice(1, -1) : raw
+  const prefix = !pathPart && rRel.test(cmd) ? cmd.slice(0, 2) : ''
+  return prefix + join(pathPart, cmd)
+}
+
+const which = async (cmd, opt = {}) => {
+  const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt)
+  const found = []
+
+  for (const envPart of pathEnv) {
+    const p = getPathPart(envPart, cmd)
+
+    for (const ext of pathExt) {
+      const withExt = p + ext
+      const is = await isexe(withExt, { pathExt: pathExtExe, ignoreErrors: true })
+      if (is) {
+        if (!opt.all) {
+          return withExt
+        }
+        found.push(withExt)
+      }
+    }
+  }
+
+  if (opt.all && found.length) {
+    return found
+  }
+
+  if (opt.nothrow) {
+    return null
+  }
+
+  throw getNotFoundError(cmd)
+}
+
+const whichSync = (cmd, opt = {}) => {
+  const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt)
+  const found = []
+
+  for (const pathEnvPart of pathEnv) {
+    const p = getPathPart(pathEnvPart, cmd)
+
+    for (const ext of pathExt) {
+      const withExt = p + ext
+      const is = isexe.sync(withExt, { pathExt: pathExtExe, ignoreErrors: true })
+      if (is) {
+        if (!opt.all) {
+          return withExt
+        }
+        found.push(withExt)
+      }
+    }
+  }
+
+  if (opt.all && found.length) {
+    return found
+  }
+
+  if (opt.nothrow) {
+    return null
+  }
+
+  throw getNotFoundError(cmd)
+}
+
+module.exports = which
+which.sync = whichSync
diff --git a/node_modules/@npmcli/git/node_modules/which/package.json b/node_modules/@npmcli/git/node_modules/which/package.json
new file mode 100644
index 0000000000000..989e01c9a3683
--- /dev/null
+++ b/node_modules/@npmcli/git/node_modules/which/package.json
@@ -0,0 +1,51 @@
+{
+  "author": "GitHub Inc.",
+  "name": "which",
+  "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.",
+  "version": "3.0.1",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/node-which.git"
+  },
+  "main": "lib/index.js",
+  "bin": {
+    "node-which": "./bin/which.js"
+  },
+  "license": "ISC",
+  "dependencies": {
+    "isexe": "^2.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.14.1",
+    "tap": "^16.3.0"
+  },
+  "scripts": {
+    "test": "tap",
+    "lint": "eslint \"**/*.js\"",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force",
+    "lintfix": "npm run lint -- --fix",
+    "snap": "tap",
+    "posttest": "npm run lint"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "tap": {
+    "check-coverage": true,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.14.1",
+    "publish": "true"
+  }
+}
diff --git a/node_modules/@npmcli/promise-spawn/node_modules/which/LICENSE b/node_modules/@npmcli/promise-spawn/node_modules/which/LICENSE
new file mode 100644
index 0000000000000..19129e315fe59
--- /dev/null
+++ b/node_modules/@npmcli/promise-spawn/node_modules/which/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/promise-spawn/node_modules/which/bin/which.js b/node_modules/@npmcli/promise-spawn/node_modules/which/bin/which.js
new file mode 100755
index 0000000000000..6df16f21acf93
--- /dev/null
+++ b/node_modules/@npmcli/promise-spawn/node_modules/which/bin/which.js
@@ -0,0 +1,52 @@
+#!/usr/bin/env node
+
+const which = require('../lib')
+const argv = process.argv.slice(2)
+
+const usage = (err) => {
+  if (err) {
+    console.error(`which: ${err}`)
+  }
+  console.error('usage: which [-as] program ...')
+  process.exit(1)
+}
+
+if (!argv.length) {
+  return usage()
+}
+
+let dashdash = false
+const [commands, flags] = argv.reduce((acc, arg) => {
+  if (dashdash || arg === '--') {
+    dashdash = true
+    return acc
+  }
+
+  if (!/^-/.test(arg)) {
+    acc[0].push(arg)
+    return acc
+  }
+
+  for (const flag of arg.slice(1).split('')) {
+    if (flag === 's') {
+      acc[1].silent = true
+    } else if (flag === 'a') {
+      acc[1].all = true
+    } else {
+      usage(`illegal option -- ${flag}`)
+    }
+  }
+
+  return acc
+}, [[], {}])
+
+for (const command of commands) {
+  try {
+    const res = which.sync(command, { all: flags.all })
+    if (!flags.silent) {
+      console.log([].concat(res).join('\n'))
+    }
+  } catch (err) {
+    process.exitCode = 1
+  }
+}
diff --git a/node_modules/@npmcli/promise-spawn/node_modules/which/lib/index.js b/node_modules/@npmcli/promise-spawn/node_modules/which/lib/index.js
new file mode 100644
index 0000000000000..52e9ea62377e7
--- /dev/null
+++ b/node_modules/@npmcli/promise-spawn/node_modules/which/lib/index.js
@@ -0,0 +1,115 @@
+const isexe = require('isexe')
+const { join, delimiter, sep, posix } = require('path')
+
+const isWindows = process.platform === 'win32'
+
+// used to check for slashed in commands passed in. always checks for the posix
+// seperator on all platforms, and checks for the current separator when not on
+// a posix platform. don't use the isWindows check for this since that is mocked
+// in tests but we still need the code to actually work when called. that is also
+// why it is ignored from coverage.
+/* istanbul ignore next */
+const rSlash = new RegExp(`[${posix.sep}${sep === posix.sep ? '' : sep}]`.replace(/(\\)/g, '\\$1'))
+const rRel = new RegExp(`^\\.${rSlash.source}`)
+
+const getNotFoundError = (cmd) =>
+  Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' })
+
+const getPathInfo = (cmd, {
+  path: optPath = process.env.PATH,
+  pathExt: optPathExt = process.env.PATHEXT,
+  delimiter: optDelimiter = delimiter,
+}) => {
+  // If it has a slash, then we don't bother searching the pathenv.
+  // just check the file itself, and that's it.
+  const pathEnv = cmd.match(rSlash) ? [''] : [
+    // windows always checks the cwd first
+    ...(isWindows ? [process.cwd()] : []),
+    ...(optPath || /* istanbul ignore next: very unusual */ '').split(optDelimiter),
+  ]
+
+  if (isWindows) {
+    const pathExtExe = optPathExt ||
+      ['.EXE', '.CMD', '.BAT', '.COM'].join(optDelimiter)
+    const pathExt = pathExtExe.split(optDelimiter).reduce((acc, item) => {
+      acc.push(item)
+      acc.push(item.toLowerCase())
+      return acc
+    }, [])
+    if (cmd.includes('.') && pathExt[0] !== '') {
+      pathExt.unshift('')
+    }
+    return { pathEnv, pathExt, pathExtExe }
+  }
+
+  return { pathEnv, pathExt: [''] }
+}
+
+const getPathPart = (raw, cmd) => {
+  const pathPart = /^".*"$/.test(raw) ? raw.slice(1, -1) : raw
+  const prefix = !pathPart && rRel.test(cmd) ? cmd.slice(0, 2) : ''
+  return prefix + join(pathPart, cmd)
+}
+
+const which = async (cmd, opt = {}) => {
+  const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt)
+  const found = []
+
+  for (const envPart of pathEnv) {
+    const p = getPathPart(envPart, cmd)
+
+    for (const ext of pathExt) {
+      const withExt = p + ext
+      const is = await isexe(withExt, { pathExt: pathExtExe, ignoreErrors: true })
+      if (is) {
+        if (!opt.all) {
+          return withExt
+        }
+        found.push(withExt)
+      }
+    }
+  }
+
+  if (opt.all && found.length) {
+    return found
+  }
+
+  if (opt.nothrow) {
+    return null
+  }
+
+  throw getNotFoundError(cmd)
+}
+
+const whichSync = (cmd, opt = {}) => {
+  const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt)
+  const found = []
+
+  for (const pathEnvPart of pathEnv) {
+    const p = getPathPart(pathEnvPart, cmd)
+
+    for (const ext of pathExt) {
+      const withExt = p + ext
+      const is = isexe.sync(withExt, { pathExt: pathExtExe, ignoreErrors: true })
+      if (is) {
+        if (!opt.all) {
+          return withExt
+        }
+        found.push(withExt)
+      }
+    }
+  }
+
+  if (opt.all && found.length) {
+    return found
+  }
+
+  if (opt.nothrow) {
+    return null
+  }
+
+  throw getNotFoundError(cmd)
+}
+
+module.exports = which
+which.sync = whichSync
diff --git a/node_modules/@npmcli/promise-spawn/node_modules/which/package.json b/node_modules/@npmcli/promise-spawn/node_modules/which/package.json
new file mode 100644
index 0000000000000..989e01c9a3683
--- /dev/null
+++ b/node_modules/@npmcli/promise-spawn/node_modules/which/package.json
@@ -0,0 +1,51 @@
+{
+  "author": "GitHub Inc.",
+  "name": "which",
+  "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.",
+  "version": "3.0.1",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/node-which.git"
+  },
+  "main": "lib/index.js",
+  "bin": {
+    "node-which": "./bin/which.js"
+  },
+  "license": "ISC",
+  "dependencies": {
+    "isexe": "^2.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.14.1",
+    "tap": "^16.3.0"
+  },
+  "scripts": {
+    "test": "tap",
+    "lint": "eslint \"**/*.js\"",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force",
+    "lintfix": "npm run lint -- --fix",
+    "snap": "tap",
+    "posttest": "npm run lint"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "tap": {
+    "check-coverage": true,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.14.1",
+    "publish": "true"
+  }
+}
diff --git a/node_modules/@npmcli/run-script/node_modules/which/LICENSE b/node_modules/@npmcli/run-script/node_modules/which/LICENSE
new file mode 100644
index 0000000000000..19129e315fe59
--- /dev/null
+++ b/node_modules/@npmcli/run-script/node_modules/which/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/run-script/node_modules/which/bin/which.js b/node_modules/@npmcli/run-script/node_modules/which/bin/which.js
new file mode 100755
index 0000000000000..6df16f21acf93
--- /dev/null
+++ b/node_modules/@npmcli/run-script/node_modules/which/bin/which.js
@@ -0,0 +1,52 @@
+#!/usr/bin/env node
+
+const which = require('../lib')
+const argv = process.argv.slice(2)
+
+const usage = (err) => {
+  if (err) {
+    console.error(`which: ${err}`)
+  }
+  console.error('usage: which [-as] program ...')
+  process.exit(1)
+}
+
+if (!argv.length) {
+  return usage()
+}
+
+let dashdash = false
+const [commands, flags] = argv.reduce((acc, arg) => {
+  if (dashdash || arg === '--') {
+    dashdash = true
+    return acc
+  }
+
+  if (!/^-/.test(arg)) {
+    acc[0].push(arg)
+    return acc
+  }
+
+  for (const flag of arg.slice(1).split('')) {
+    if (flag === 's') {
+      acc[1].silent = true
+    } else if (flag === 'a') {
+      acc[1].all = true
+    } else {
+      usage(`illegal option -- ${flag}`)
+    }
+  }
+
+  return acc
+}, [[], {}])
+
+for (const command of commands) {
+  try {
+    const res = which.sync(command, { all: flags.all })
+    if (!flags.silent) {
+      console.log([].concat(res).join('\n'))
+    }
+  } catch (err) {
+    process.exitCode = 1
+  }
+}
diff --git a/node_modules/@npmcli/run-script/node_modules/which/lib/index.js b/node_modules/@npmcli/run-script/node_modules/which/lib/index.js
new file mode 100644
index 0000000000000..52e9ea62377e7
--- /dev/null
+++ b/node_modules/@npmcli/run-script/node_modules/which/lib/index.js
@@ -0,0 +1,115 @@
+const isexe = require('isexe')
+const { join, delimiter, sep, posix } = require('path')
+
+const isWindows = process.platform === 'win32'
+
+// used to check for slashed in commands passed in. always checks for the posix
+// seperator on all platforms, and checks for the current separator when not on
+// a posix platform. don't use the isWindows check for this since that is mocked
+// in tests but we still need the code to actually work when called. that is also
+// why it is ignored from coverage.
+/* istanbul ignore next */
+const rSlash = new RegExp(`[${posix.sep}${sep === posix.sep ? '' : sep}]`.replace(/(\\)/g, '\\$1'))
+const rRel = new RegExp(`^\\.${rSlash.source}`)
+
+const getNotFoundError = (cmd) =>
+  Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' })
+
+const getPathInfo = (cmd, {
+  path: optPath = process.env.PATH,
+  pathExt: optPathExt = process.env.PATHEXT,
+  delimiter: optDelimiter = delimiter,
+}) => {
+  // If it has a slash, then we don't bother searching the pathenv.
+  // just check the file itself, and that's it.
+  const pathEnv = cmd.match(rSlash) ? [''] : [
+    // windows always checks the cwd first
+    ...(isWindows ? [process.cwd()] : []),
+    ...(optPath || /* istanbul ignore next: very unusual */ '').split(optDelimiter),
+  ]
+
+  if (isWindows) {
+    const pathExtExe = optPathExt ||
+      ['.EXE', '.CMD', '.BAT', '.COM'].join(optDelimiter)
+    const pathExt = pathExtExe.split(optDelimiter).reduce((acc, item) => {
+      acc.push(item)
+      acc.push(item.toLowerCase())
+      return acc
+    }, [])
+    if (cmd.includes('.') && pathExt[0] !== '') {
+      pathExt.unshift('')
+    }
+    return { pathEnv, pathExt, pathExtExe }
+  }
+
+  return { pathEnv, pathExt: [''] }
+}
+
+const getPathPart = (raw, cmd) => {
+  const pathPart = /^".*"$/.test(raw) ? raw.slice(1, -1) : raw
+  const prefix = !pathPart && rRel.test(cmd) ? cmd.slice(0, 2) : ''
+  return prefix + join(pathPart, cmd)
+}
+
+const which = async (cmd, opt = {}) => {
+  const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt)
+  const found = []
+
+  for (const envPart of pathEnv) {
+    const p = getPathPart(envPart, cmd)
+
+    for (const ext of pathExt) {
+      const withExt = p + ext
+      const is = await isexe(withExt, { pathExt: pathExtExe, ignoreErrors: true })
+      if (is) {
+        if (!opt.all) {
+          return withExt
+        }
+        found.push(withExt)
+      }
+    }
+  }
+
+  if (opt.all && found.length) {
+    return found
+  }
+
+  if (opt.nothrow) {
+    return null
+  }
+
+  throw getNotFoundError(cmd)
+}
+
+const whichSync = (cmd, opt = {}) => {
+  const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt)
+  const found = []
+
+  for (const pathEnvPart of pathEnv) {
+    const p = getPathPart(pathEnvPart, cmd)
+
+    for (const ext of pathExt) {
+      const withExt = p + ext
+      const is = isexe.sync(withExt, { pathExt: pathExtExe, ignoreErrors: true })
+      if (is) {
+        if (!opt.all) {
+          return withExt
+        }
+        found.push(withExt)
+      }
+    }
+  }
+
+  if (opt.all && found.length) {
+    return found
+  }
+
+  if (opt.nothrow) {
+    return null
+  }
+
+  throw getNotFoundError(cmd)
+}
+
+module.exports = which
+which.sync = whichSync
diff --git a/node_modules/@npmcli/run-script/node_modules/which/package.json b/node_modules/@npmcli/run-script/node_modules/which/package.json
new file mode 100644
index 0000000000000..989e01c9a3683
--- /dev/null
+++ b/node_modules/@npmcli/run-script/node_modules/which/package.json
@@ -0,0 +1,51 @@
+{
+  "author": "GitHub Inc.",
+  "name": "which",
+  "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.",
+  "version": "3.0.1",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/node-which.git"
+  },
+  "main": "lib/index.js",
+  "bin": {
+    "node-which": "./bin/which.js"
+  },
+  "license": "ISC",
+  "dependencies": {
+    "isexe": "^2.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.14.1",
+    "tap": "^16.3.0"
+  },
+  "scripts": {
+    "test": "tap",
+    "lint": "eslint \"**/*.js\"",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force",
+    "lintfix": "npm run lint -- --fix",
+    "snap": "tap",
+    "posttest": "npm run lint"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "tap": {
+    "check-coverage": true,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.14.1",
+    "publish": "true"
+  }
+}
diff --git a/node_modules/which/lib/index.js b/node_modules/which/lib/index.js
index 52e9ea62377e7..2fd358baf888f 100644
--- a/node_modules/which/lib/index.js
+++ b/node_modules/which/lib/index.js
@@ -1,4 +1,4 @@
-const isexe = require('isexe')
+const { isexe, sync: isexeSync } = require('isexe')
 const { join, delimiter, sep, posix } = require('path')
 
 const isWindows = process.platform === 'win32'
@@ -31,11 +31,7 @@ const getPathInfo = (cmd, {
   if (isWindows) {
     const pathExtExe = optPathExt ||
       ['.EXE', '.CMD', '.BAT', '.COM'].join(optDelimiter)
-    const pathExt = pathExtExe.split(optDelimiter).reduce((acc, item) => {
-      acc.push(item)
-      acc.push(item.toLowerCase())
-      return acc
-    }, [])
+    const pathExt = pathExtExe.split(optDelimiter).flatMap((item) => [item, item.toLowerCase()])
     if (cmd.includes('.') && pathExt[0] !== '') {
       pathExt.unshift('')
     }
@@ -90,7 +86,7 @@ const whichSync = (cmd, opt = {}) => {
 
     for (const ext of pathExt) {
       const withExt = p + ext
-      const is = isexe.sync(withExt, { pathExt: pathExtExe, ignoreErrors: true })
+      const is = isexeSync(withExt, { pathExt: pathExtExe, ignoreErrors: true })
       if (is) {
         if (!opt.all) {
           return withExt
diff --git a/node_modules/which/node_modules/isexe/LICENSE b/node_modules/which/node_modules/isexe/LICENSE
new file mode 100644
index 0000000000000..c925dbe826b67
--- /dev/null
+++ b/node_modules/which/node_modules/isexe/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2016-2022 Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/which/node_modules/isexe/dist/cjs/index.js b/node_modules/which/node_modules/isexe/dist/cjs/index.js
new file mode 100644
index 0000000000000..cefcb66b5c543
--- /dev/null
+++ b/node_modules/which/node_modules/isexe/dist/cjs/index.js
@@ -0,0 +1,46 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.sync = exports.isexe = exports.posix = exports.win32 = void 0;
+const posix = __importStar(require("./posix.js"));
+exports.posix = posix;
+const win32 = __importStar(require("./win32.js"));
+exports.win32 = win32;
+__exportStar(require("./options.js"), exports);
+const platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform;
+const impl = platform === 'win32' ? win32 : posix;
+/**
+ * Determine whether a path is executable on the current platform.
+ */
+exports.isexe = impl.isexe;
+/**
+ * Synchronously determine whether a path is executable on the
+ * current platform.
+ */
+exports.sync = impl.sync;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/which/node_modules/isexe/dist/cjs/options.js b/node_modules/which/node_modules/isexe/dist/cjs/options.js
new file mode 100644
index 0000000000000..0dfad0762cc32
--- /dev/null
+++ b/node_modules/which/node_modules/isexe/dist/cjs/options.js
@@ -0,0 +1,3 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+//# sourceMappingURL=options.js.map
\ No newline at end of file
diff --git a/node_modules/which/node_modules/isexe/dist/cjs/package.json b/node_modules/which/node_modules/isexe/dist/cjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/which/node_modules/isexe/dist/cjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/which/node_modules/isexe/dist/cjs/posix.js b/node_modules/which/node_modules/isexe/dist/cjs/posix.js
new file mode 100644
index 0000000000000..3bc5e79d7007e
--- /dev/null
+++ b/node_modules/which/node_modules/isexe/dist/cjs/posix.js
@@ -0,0 +1,67 @@
+"use strict";
+/**
+ * This is the Posix implementation of isexe, which uses the file
+ * mode and uid/gid values.
+ *
+ * @module
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.sync = exports.isexe = void 0;
+const fs_1 = require("fs");
+const promises_1 = require("fs/promises");
+/**
+ * Determine whether a path is executable according to the mode and
+ * current (or specified) user and group IDs.
+ */
+const isexe = async (path, options = {}) => {
+    const { ignoreErrors = false } = options;
+    try {
+        return checkStat(await (0, promises_1.stat)(path), options);
+    }
+    catch (e) {
+        const er = e;
+        if (ignoreErrors || er.code === 'EACCES')
+            return false;
+        throw er;
+    }
+};
+exports.isexe = isexe;
+/**
+ * Synchronously determine whether a path is executable according to
+ * the mode and current (or specified) user and group IDs.
+ */
+const sync = (path, options = {}) => {
+    const { ignoreErrors = false } = options;
+    try {
+        return checkStat((0, fs_1.statSync)(path), options);
+    }
+    catch (e) {
+        const er = e;
+        if (ignoreErrors || er.code === 'EACCES')
+            return false;
+        throw er;
+    }
+};
+exports.sync = sync;
+const checkStat = (stat, options) => stat.isFile() && checkMode(stat, options);
+const checkMode = (stat, options) => {
+    const myUid = options.uid ?? process.getuid?.();
+    const myGroups = options.groups ?? process.getgroups?.() ?? [];
+    const myGid = options.gid ?? process.getgid?.() ?? myGroups[0];
+    if (myUid === undefined || myGid === undefined) {
+        throw new Error('cannot get uid or gid');
+    }
+    const groups = new Set([myGid, ...myGroups]);
+    const mod = stat.mode;
+    const uid = stat.uid;
+    const gid = stat.gid;
+    const u = parseInt('100', 8);
+    const g = parseInt('010', 8);
+    const o = parseInt('001', 8);
+    const ug = u | g;
+    return !!(mod & o ||
+        (mod & g && groups.has(gid)) ||
+        (mod & u && uid === myUid) ||
+        (mod & ug && myUid === 0));
+};
+//# sourceMappingURL=posix.js.map
\ No newline at end of file
diff --git a/node_modules/which/node_modules/isexe/dist/cjs/win32.js b/node_modules/which/node_modules/isexe/dist/cjs/win32.js
new file mode 100644
index 0000000000000..fa7a4d2f7d240
--- /dev/null
+++ b/node_modules/which/node_modules/isexe/dist/cjs/win32.js
@@ -0,0 +1,62 @@
+"use strict";
+/**
+ * This is the Windows implementation of isexe, which uses the file
+ * extension and PATHEXT setting.
+ *
+ * @module
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.sync = exports.isexe = void 0;
+const fs_1 = require("fs");
+const promises_1 = require("fs/promises");
+/**
+ * Determine whether a path is executable based on the file extension
+ * and PATHEXT environment variable (or specified pathExt option)
+ */
+const isexe = async (path, options = {}) => {
+    const { ignoreErrors = false } = options;
+    try {
+        return checkStat(await (0, promises_1.stat)(path), path, options);
+    }
+    catch (e) {
+        const er = e;
+        if (ignoreErrors || er.code === 'EACCES')
+            return false;
+        throw er;
+    }
+};
+exports.isexe = isexe;
+/**
+ * Synchronously determine whether a path is executable based on the file
+ * extension and PATHEXT environment variable (or specified pathExt option)
+ */
+const sync = (path, options = {}) => {
+    const { ignoreErrors = false } = options;
+    try {
+        return checkStat((0, fs_1.statSync)(path), path, options);
+    }
+    catch (e) {
+        const er = e;
+        if (ignoreErrors || er.code === 'EACCES')
+            return false;
+        throw er;
+    }
+};
+exports.sync = sync;
+const checkPathExt = (path, options) => {
+    const { pathExt = process.env.PATHEXT || '' } = options;
+    const peSplit = pathExt.split(';');
+    if (peSplit.indexOf('') !== -1) {
+        return true;
+    }
+    for (let i = 0; i < peSplit.length; i++) {
+        const p = peSplit[i].toLowerCase();
+        const ext = path.substring(path.length - p.length).toLowerCase();
+        if (p && ext === p) {
+            return true;
+        }
+    }
+    return false;
+};
+const checkStat = (stat, path, options) => stat.isFile() && checkPathExt(path, options);
+//# sourceMappingURL=win32.js.map
\ No newline at end of file
diff --git a/node_modules/which/node_modules/isexe/dist/mjs/index.js b/node_modules/which/node_modules/isexe/dist/mjs/index.js
new file mode 100644
index 0000000000000..1e309acd7355e
--- /dev/null
+++ b/node_modules/which/node_modules/isexe/dist/mjs/index.js
@@ -0,0 +1,16 @@
+import * as posix from './posix.js';
+import * as win32 from './win32.js';
+export * from './options.js';
+export { win32, posix };
+const platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform;
+const impl = platform === 'win32' ? win32 : posix;
+/**
+ * Determine whether a path is executable on the current platform.
+ */
+export const isexe = impl.isexe;
+/**
+ * Synchronously determine whether a path is executable on the
+ * current platform.
+ */
+export const sync = impl.sync;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/which/node_modules/isexe/dist/mjs/options.js b/node_modules/which/node_modules/isexe/dist/mjs/options.js
new file mode 100644
index 0000000000000..e9ded40bd5b2c
--- /dev/null
+++ b/node_modules/which/node_modules/isexe/dist/mjs/options.js
@@ -0,0 +1,2 @@
+export {};
+//# sourceMappingURL=options.js.map
\ No newline at end of file
diff --git a/node_modules/which/node_modules/isexe/dist/mjs/package.json b/node_modules/which/node_modules/isexe/dist/mjs/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/which/node_modules/isexe/dist/mjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/which/node_modules/isexe/dist/mjs/posix.js b/node_modules/which/node_modules/isexe/dist/mjs/posix.js
new file mode 100644
index 0000000000000..c453776c0452f
--- /dev/null
+++ b/node_modules/which/node_modules/isexe/dist/mjs/posix.js
@@ -0,0 +1,62 @@
+/**
+ * This is the Posix implementation of isexe, which uses the file
+ * mode and uid/gid values.
+ *
+ * @module
+ */
+import { statSync } from 'fs';
+import { stat } from 'fs/promises';
+/**
+ * Determine whether a path is executable according to the mode and
+ * current (or specified) user and group IDs.
+ */
+export const isexe = async (path, options = {}) => {
+    const { ignoreErrors = false } = options;
+    try {
+        return checkStat(await stat(path), options);
+    }
+    catch (e) {
+        const er = e;
+        if (ignoreErrors || er.code === 'EACCES')
+            return false;
+        throw er;
+    }
+};
+/**
+ * Synchronously determine whether a path is executable according to
+ * the mode and current (or specified) user and group IDs.
+ */
+export const sync = (path, options = {}) => {
+    const { ignoreErrors = false } = options;
+    try {
+        return checkStat(statSync(path), options);
+    }
+    catch (e) {
+        const er = e;
+        if (ignoreErrors || er.code === 'EACCES')
+            return false;
+        throw er;
+    }
+};
+const checkStat = (stat, options) => stat.isFile() && checkMode(stat, options);
+const checkMode = (stat, options) => {
+    const myUid = options.uid ?? process.getuid?.();
+    const myGroups = options.groups ?? process.getgroups?.() ?? [];
+    const myGid = options.gid ?? process.getgid?.() ?? myGroups[0];
+    if (myUid === undefined || myGid === undefined) {
+        throw new Error('cannot get uid or gid');
+    }
+    const groups = new Set([myGid, ...myGroups]);
+    const mod = stat.mode;
+    const uid = stat.uid;
+    const gid = stat.gid;
+    const u = parseInt('100', 8);
+    const g = parseInt('010', 8);
+    const o = parseInt('001', 8);
+    const ug = u | g;
+    return !!(mod & o ||
+        (mod & g && groups.has(gid)) ||
+        (mod & u && uid === myUid) ||
+        (mod & ug && myUid === 0));
+};
+//# sourceMappingURL=posix.js.map
\ No newline at end of file
diff --git a/node_modules/which/node_modules/isexe/dist/mjs/win32.js b/node_modules/which/node_modules/isexe/dist/mjs/win32.js
new file mode 100644
index 0000000000000..a354ee2a5115c
--- /dev/null
+++ b/node_modules/which/node_modules/isexe/dist/mjs/win32.js
@@ -0,0 +1,57 @@
+/**
+ * This is the Windows implementation of isexe, which uses the file
+ * extension and PATHEXT setting.
+ *
+ * @module
+ */
+import { statSync } from 'fs';
+import { stat } from 'fs/promises';
+/**
+ * Determine whether a path is executable based on the file extension
+ * and PATHEXT environment variable (or specified pathExt option)
+ */
+export const isexe = async (path, options = {}) => {
+    const { ignoreErrors = false } = options;
+    try {
+        return checkStat(await stat(path), path, options);
+    }
+    catch (e) {
+        const er = e;
+        if (ignoreErrors || er.code === 'EACCES')
+            return false;
+        throw er;
+    }
+};
+/**
+ * Synchronously determine whether a path is executable based on the file
+ * extension and PATHEXT environment variable (or specified pathExt option)
+ */
+export const sync = (path, options = {}) => {
+    const { ignoreErrors = false } = options;
+    try {
+        return checkStat(statSync(path), path, options);
+    }
+    catch (e) {
+        const er = e;
+        if (ignoreErrors || er.code === 'EACCES')
+            return false;
+        throw er;
+    }
+};
+const checkPathExt = (path, options) => {
+    const { pathExt = process.env.PATHEXT || '' } = options;
+    const peSplit = pathExt.split(';');
+    if (peSplit.indexOf('') !== -1) {
+        return true;
+    }
+    for (let i = 0; i < peSplit.length; i++) {
+        const p = peSplit[i].toLowerCase();
+        const ext = path.substring(path.length - p.length).toLowerCase();
+        if (p && ext === p) {
+            return true;
+        }
+    }
+    return false;
+};
+const checkStat = (stat, path, options) => stat.isFile() && checkPathExt(path, options);
+//# sourceMappingURL=win32.js.map
\ No newline at end of file
diff --git a/node_modules/which/node_modules/isexe/package.json b/node_modules/which/node_modules/isexe/package.json
new file mode 100644
index 0000000000000..a0e2cd04bfdbf
--- /dev/null
+++ b/node_modules/which/node_modules/isexe/package.json
@@ -0,0 +1,96 @@
+{
+  "name": "isexe",
+  "version": "3.1.1",
+  "description": "Minimal module to check if a file is executable.",
+  "main": "./dist/cjs/index.js",
+  "module": "./dist/mjs/index.js",
+  "types": "./dist/cjs/index.js",
+  "files": [
+    "dist"
+  ],
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/mjs/index.d.ts",
+        "default": "./dist/mjs/index.js"
+      },
+      "require": {
+        "types": "./dist/cjs/index.d.ts",
+        "default": "./dist/cjs/index.js"
+      }
+    },
+    "./posix": {
+      "import": {
+        "types": "./dist/mjs/posix.d.ts",
+        "default": "./dist/mjs/posix.js"
+      },
+      "require": {
+        "types": "./dist/cjs/posix.d.ts",
+        "default": "./dist/cjs/posix.js"
+      }
+    },
+    "./win32": {
+      "import": {
+        "types": "./dist/mjs/win32.d.ts",
+        "default": "./dist/mjs/win32.js"
+      },
+      "require": {
+        "types": "./dist/cjs/win32.d.ts",
+        "default": "./dist/cjs/win32.js"
+      }
+    },
+    "./package.json": "./package.json"
+  },
+  "devDependencies": {
+    "@types/node": "^20.4.5",
+    "@types/tap": "^15.0.8",
+    "c8": "^8.0.1",
+    "mkdirp": "^0.5.1",
+    "prettier": "^2.8.8",
+    "rimraf": "^2.5.0",
+    "sync-content": "^1.0.2",
+    "tap": "^16.3.8",
+    "ts-node": "^10.9.1",
+    "typedoc": "^0.24.8",
+    "typescript": "^5.1.6"
+  },
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "prepare": "tsc -p tsconfig/cjs.json && tsc -p tsconfig/esm.json && bash ./scripts/fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "c8 tap",
+    "snap": "c8 tap",
+    "format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache",
+    "typedoc": "typedoc --tsconfig tsconfig/esm.json ./src/*.ts"
+  },
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "ISC",
+  "tap": {
+    "coverage": false,
+    "node-arg": [
+      "--enable-source-maps",
+      "--no-warnings",
+      "--loader",
+      "ts-node/esm"
+    ],
+    "ts": false
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "repository": "https://github.com/isaacs/isexe",
+  "engines": {
+    "node": ">=16"
+  }
+}
diff --git a/node_modules/which/package.json b/node_modules/which/package.json
index 989e01c9a3683..515bfb22ca0e1 100644
--- a/node_modules/which/package.json
+++ b/node_modules/which/package.json
@@ -2,7 +2,7 @@
   "author": "GitHub Inc.",
   "name": "which",
   "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.",
-  "version": "3.0.1",
+  "version": "4.0.0",
   "repository": {
     "type": "git",
     "url": "https://github.com/npm/node-which.git"
@@ -13,11 +13,11 @@
   },
   "license": "ISC",
   "dependencies": {
-    "isexe": "^2.0.0"
+    "isexe": "^3.1.1"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.14.1",
+    "@npmcli/template-oss": "4.18.0",
     "tap": "^16.3.0"
   },
   "scripts": {
@@ -41,11 +41,17 @@
     ]
   },
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.13.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.14.1",
+    "ciVersions": [
+      "16.13.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ],
+    "version": "4.18.0",
     "publish": "true"
   }
 }
diff --git a/package-lock.json b/package-lock.json
index be16c47b205cf..b184218e2b1b8 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -152,7 +152,7 @@
         "tiny-relative-date": "^1.3.0",
         "treeverse": "^3.0.0",
         "validate-npm-package-name": "^5.0.0",
-        "which": "^3.0.1",
+        "which": "^4.0.0",
         "write-file-atomic": "^5.0.1"
       },
       "bin": {
@@ -2370,6 +2370,21 @@
         "eslint-plugin-promise": "^6.0.0"
       }
     },
+    "node_modules/@npmcli/eslint-config/node_modules/which": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz",
+      "integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==",
+      "dev": true,
+      "dependencies": {
+        "isexe": "^2.0.0"
+      },
+      "bin": {
+        "node-which": "bin/which.js"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/fs": {
       "version": "3.1.0",
       "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.0.tgz",
@@ -2401,6 +2416,21 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/git/node_modules/which": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz",
+      "integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==",
+      "inBundle": true,
+      "dependencies": {
+        "isexe": "^2.0.0"
+      },
+      "bin": {
+        "node-which": "bin/which.js"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/installed-package-contents": {
       "version": "2.0.2",
       "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-2.0.2.tgz",
@@ -2502,6 +2532,21 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/promise-spawn/node_modules/which": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz",
+      "integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==",
+      "inBundle": true,
+      "dependencies": {
+        "isexe": "^2.0.0"
+      },
+      "bin": {
+        "node-which": "bin/which.js"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/query": {
       "version": "3.0.0",
       "resolved": "https://registry.npmjs.org/@npmcli/query/-/query-3.0.0.tgz",
@@ -2529,6 +2574,21 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/run-script/node_modules/which": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz",
+      "integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==",
+      "inBundle": true,
+      "dependencies": {
+        "isexe": "^2.0.0"
+      },
+      "bin": {
+        "node-which": "bin/which.js"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/smoke-tests": {
       "resolved": "smoke-tests",
       "link": true
@@ -2678,6 +2738,21 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/which": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz",
+      "integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==",
+      "dev": true,
+      "dependencies": {
+        "isexe": "^2.0.0"
+      },
+      "bin": {
+        "node-which": "bin/which.js"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@octokit/auth-token": {
       "version": "3.0.3",
       "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-3.0.3.tgz",
@@ -15385,18 +15460,18 @@
       }
     },
     "node_modules/which": {
-      "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz",
-      "integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==",
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz",
+      "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==",
       "inBundle": true,
       "dependencies": {
-        "isexe": "^2.0.0"
+        "isexe": "^3.1.1"
       },
       "bin": {
         "node-which": "bin/which.js"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.13.0 || >=18.0.0"
       }
     },
     "node_modules/which-boxed-primitive": {
@@ -15443,6 +15518,15 @@
         "url": "https://github.com/sponsors/ljharb"
       }
     },
+    "node_modules/which/node_modules/isexe": {
+      "version": "3.1.1",
+      "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz",
+      "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==",
+      "inBundle": true,
+      "engines": {
+        "node": ">=16"
+      }
+    },
     "node_modules/wide-align": {
       "version": "1.1.5",
       "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz",
@@ -15818,7 +15902,7 @@
         "@npmcli/template-oss": "4.18.0",
         "http-proxy": "^1.18.1",
         "tap": "^16.3.8",
-        "which": "^3.0.0"
+        "which": "^4.0.0"
       },
       "engines": {
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
diff --git a/package.json b/package.json
index a78ac6e0a4f56..3080858070548 100644
--- a/package.json
+++ b/package.json
@@ -117,7 +117,7 @@
     "tiny-relative-date": "^1.3.0",
     "treeverse": "^3.0.0",
     "validate-npm-package-name": "^5.0.0",
-    "which": "^3.0.1",
+    "which": "^4.0.0",
     "write-file-atomic": "^5.0.1"
   },
   "bundleDependencies": [
diff --git a/smoke-tests/package.json b/smoke-tests/package.json
index 79f633a6d7976..14ee2f5b367cb 100644
--- a/smoke-tests/package.json
+++ b/smoke-tests/package.json
@@ -24,7 +24,7 @@
     "@npmcli/template-oss": "4.18.0",
     "http-proxy": "^1.18.1",
     "tap": "^16.3.8",
-    "which": "^3.0.0"
+    "which": "^4.0.0"
   },
   "author": "GitHub Inc.",
   "license": "ISC",

From 3858118f8a240ef83fab0f18e4a46ce84b81ed6d Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Wed, 30 Aug 2023 10:22:57 -0700
Subject: [PATCH 59/68] deps: @sigstore/tuf@2.1.0

---
 node_modules/@sigstore/tuf/package.json |  4 ++--
 node_modules/tuf-js/package.json        |  2 +-
 package-lock.json                       | 16 ++++++++--------
 package.json                            |  2 +-
 4 files changed, 12 insertions(+), 12 deletions(-)

diff --git a/node_modules/@sigstore/tuf/package.json b/node_modules/@sigstore/tuf/package.json
index 4c534fa766a5e..a655d52a0407a 100644
--- a/node_modules/@sigstore/tuf/package.json
+++ b/node_modules/@sigstore/tuf/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@sigstore/tuf",
-  "version": "2.0.0",
+  "version": "2.1.0",
   "description": "Client for the Sigstore TUF repository",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
@@ -33,7 +33,7 @@
   },
   "dependencies": {
     "@sigstore/protobuf-specs": "^0.2.1",
-    "tuf-js": "^2.0.0"
+    "tuf-js": "^2.1.0"
   },
   "engines": {
     "node": "^16.14.0 || >=18.0.0"
diff --git a/node_modules/tuf-js/package.json b/node_modules/tuf-js/package.json
index 6286e034453d6..c757d6a00d700 100644
--- a/node_modules/tuf-js/package.json
+++ b/node_modules/tuf-js/package.json
@@ -1,6 +1,6 @@
 {
   "name": "tuf-js",
-  "version": "2.0.0",
+  "version": "2.1.0",
   "description": "JavaScript implementation of The Update Framework (TUF)",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
diff --git a/package-lock.json b/package-lock.json
index b184218e2b1b8..cc58e15126f0f 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -94,7 +94,7 @@
         "@npmcli/package-json": "^5.0.0",
         "@npmcli/promise-spawn": "^6.0.2",
         "@npmcli/run-script": "^6.0.2",
-        "@sigstore/tuf": "^2.0.0",
+        "@sigstore/tuf": "^2.1.0",
         "abbrev": "^2.0.0",
         "archy": "~1.0.0",
         "cacache": "^18.0.0",
@@ -2958,13 +2958,13 @@
       }
     },
     "node_modules/@sigstore/tuf": {
-      "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-2.0.0.tgz",
-      "integrity": "sha512-Ow/ZMFH9kdHbMNOH//rDuINblqufpqD+e3xS9JY5RRce+euh9eUsjSc6jodioMMi2roN9rSAk8LCuyW2hngAKw==",
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-2.1.0.tgz",
+      "integrity": "sha512-BUoVCx+7Wj+8moEGvUU2MyBI+f93lmg1CLmoG6KrhQMeDyAG8HAZNk+YRCNuvwvSDCfPhwsj37Bg63/Q+bnGsw==",
       "inBundle": true,
       "dependencies": {
         "@sigstore/protobuf-specs": "^0.2.1",
-        "tuf-js": "^2.0.0"
+        "tuf-js": "^2.1.0"
       },
       "engines": {
         "node": "^16.14.0 || >=18.0.0"
@@ -14935,9 +14935,9 @@
       }
     },
     "node_modules/tuf-js": {
-      "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-2.0.0.tgz",
-      "integrity": "sha512-Oq6w0MMFihvxCM0o733TIeLeuUrDuaVaOEUVXrQtq/J6YXoUmQU84JcAftJcDkxDkuTZ9jumZN7Dh7VlyNaeWA==",
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-2.1.0.tgz",
+      "integrity": "sha512-eD7YPPjVlMzdggrOeE8zwoegUaG/rt6Bt3jwoQPunRiNVzgcCE009UDFJKJjG+Gk9wFu6W/Vi+P5d/5QpdD9jA==",
       "inBundle": true,
       "dependencies": {
         "@tufjs/models": "2.0.0",
diff --git a/package.json b/package.json
index 3080858070548..dec1f7707b071 100644
--- a/package.json
+++ b/package.json
@@ -59,7 +59,7 @@
     "@npmcli/package-json": "^5.0.0",
     "@npmcli/promise-spawn": "^6.0.2",
     "@npmcli/run-script": "^6.0.2",
-    "@sigstore/tuf": "^2.0.0",
+    "@sigstore/tuf": "^2.1.0",
     "abbrev": "^2.0.0",
     "archy": "~1.0.0",
     "cacache": "^18.0.0",

From b50d5dffa04d65a9406645f55ebd36e8e7159b76 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Wed, 30 Aug 2023 10:23:31 -0700
Subject: [PATCH 60/68] deps: sigstore@2.1.0

---
 node_modules/@sigstore/bundle/dist/build.js   |  4 +--
 node_modules/@sigstore/bundle/package.json    |  2 +-
 node_modules/@sigstore/sign/dist/error.js     | 29 ++++++++++++++++-
 .../@sigstore/sign/dist/external/error.js     | 31 ++++++++++++++-----
 .../@sigstore/sign/dist/external/fulcio.js    |  2 +-
 .../@sigstore/sign/dist/external/rekor.js     |  8 ++---
 .../@sigstore/sign/dist/external/tsa.js       |  2 +-
 node_modules/@sigstore/sign/dist/index.js     |  4 ++-
 .../@sigstore/sign/dist/signer/fulcio/ca.js   |  6 +---
 .../sign/dist/signer/fulcio/index.js          | 20 ++++++++++--
 .../@sigstore/sign/dist/signer/index.js       |  3 +-
 .../@sigstore/sign/dist/witness/index.js      |  3 +-
 .../sign/dist/witness/tlog/client.js          | 12 ++-----
 .../@sigstore/sign/dist/witness/tlog/index.js |  8 +++--
 .../@sigstore/sign/dist/witness/tsa/client.js |  6 +---
 node_modules/@sigstore/sign/package.json      |  6 ++--
 node_modules/sigstore/dist/config.js          |  8 ++---
 node_modules/sigstore/dist/index.js           |  7 ++---
 node_modules/sigstore/package.json            | 10 +++---
 package-lock.json                             | 28 ++++++++---------
 workspaces/libnpmpublish/package.json         |  2 +-
 21 files changed, 124 insertions(+), 77 deletions(-)

diff --git a/node_modules/@sigstore/bundle/dist/build.js b/node_modules/@sigstore/bundle/dist/build.js
index 0ccea62eaba87..6990f5451a2d3 100644
--- a/node_modules/@sigstore/bundle/dist/build.js
+++ b/node_modules/@sigstore/bundle/dist/build.js
@@ -21,7 +21,7 @@ const bundle_1 = require("./bundle");
 // Message signature bundle - $case: 'messageSignature'
 function toMessageSignatureBundle(options) {
     return {
-        mediaType: bundle_1.BUNDLE_V01_MEDIA_TYPE,
+        mediaType: bundle_1.BUNDLE_V02_MEDIA_TYPE,
         content: {
             $case: 'messageSignature',
             messageSignature: {
@@ -39,7 +39,7 @@ exports.toMessageSignatureBundle = toMessageSignatureBundle;
 // DSSE envelope bundle - $case: 'dsseEnvelope'
 function toDSSEBundle(options) {
     return {
-        mediaType: bundle_1.BUNDLE_V01_MEDIA_TYPE,
+        mediaType: bundle_1.BUNDLE_V02_MEDIA_TYPE,
         content: {
             $case: 'dsseEnvelope',
             dsseEnvelope: toEnvelope(options),
diff --git a/node_modules/@sigstore/bundle/package.json b/node_modules/@sigstore/bundle/package.json
index 2b15d08060753..7e26efa11a21d 100644
--- a/node_modules/@sigstore/bundle/package.json
+++ b/node_modules/@sigstore/bundle/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@sigstore/bundle",
-  "version": "2.0.0",
+  "version": "2.1.0",
   "description": "Sigstore bundle type",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
diff --git a/node_modules/@sigstore/sign/dist/error.js b/node_modules/@sigstore/sign/dist/error.js
index b52ea7eef5d9b..d57e4567fb89e 100644
--- a/node_modules/@sigstore/sign/dist/error.js
+++ b/node_modules/@sigstore/sign/dist/error.js
@@ -1,6 +1,22 @@
 "use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.InternalError = void 0;
+exports.internalError = exports.InternalError = void 0;
+const error_1 = require("./external/error");
 class InternalError extends Error {
     constructor({ code, message, cause, }) {
         super(message);
@@ -10,3 +26,14 @@ class InternalError extends Error {
     }
 }
 exports.InternalError = InternalError;
+function internalError(err, code, message) {
+    if (err instanceof error_1.HTTPError) {
+        message += ` - ${err.message}`;
+    }
+    throw new InternalError({
+        code: code,
+        message: message,
+        cause: err,
+    });
+}
+exports.internalError = internalError;
diff --git a/node_modules/@sigstore/sign/dist/external/error.js b/node_modules/@sigstore/sign/dist/external/error.js
index d1e1c3df8a878..0dad92ea69414 100644
--- a/node_modules/@sigstore/sign/dist/external/error.js
+++ b/node_modules/@sigstore/sign/dist/external/error.js
@@ -2,20 +2,37 @@
 Object.defineProperty(exports, "__esModule", { value: true });
 exports.checkStatus = exports.HTTPError = void 0;
 class HTTPError extends Error {
-    constructor(response) {
-        super(`HTTP Error: ${response.status} ${response.statusText}`);
-        this.response = response;
-        this.statusCode = response.status;
-        this.location = response.headers?.get('Location') || undefined;
+    constructor({ status, message, location, }) {
+        super(`(${status}) ${message}`);
+        this.statusCode = status;
+        this.location = location;
     }
 }
 exports.HTTPError = HTTPError;
-const checkStatus = (response) => {
+const checkStatus = async (response) => {
     if (response.ok) {
         return response;
     }
     else {
-        throw new HTTPError(response);
+        let message = response.statusText;
+        const location = response.headers?.get('Location') || undefined;
+        const contentType = response.headers?.get('Content-Type');
+        // If response type is JSON, try to parse the body for a message
+        if (contentType?.includes('application/json')) {
+            try {
+                await response.json().then((body) => {
+                    message = body.message;
+                });
+            }
+            catch (e) {
+                // ignore
+            }
+        }
+        throw new HTTPError({
+            status: response.status,
+            message: message,
+            location: location,
+        });
     }
 };
 exports.checkStatus = checkStatus;
diff --git a/node_modules/@sigstore/sign/dist/external/fulcio.js b/node_modules/@sigstore/sign/dist/external/fulcio.js
index b27637c2dc570..f00b62e147cd7 100644
--- a/node_modules/@sigstore/sign/dist/external/fulcio.js
+++ b/node_modules/@sigstore/sign/dist/external/fulcio.js
@@ -43,7 +43,7 @@ class Fulcio {
             method: 'POST',
             body: JSON.stringify(request),
         });
-        (0, error_1.checkStatus)(response);
+        await (0, error_1.checkStatus)(response);
         const data = await response.json();
         return data;
     }
diff --git a/node_modules/@sigstore/sign/dist/external/rekor.js b/node_modules/@sigstore/sign/dist/external/rekor.js
index 9b4e66b656251..6f6cb96cc9c5c 100644
--- a/node_modules/@sigstore/sign/dist/external/rekor.js
+++ b/node_modules/@sigstore/sign/dist/external/rekor.js
@@ -49,7 +49,7 @@ class Rekor {
             headers: { 'Content-Type': 'application/json' },
             body: JSON.stringify(propsedEntry),
         });
-        (0, error_1.checkStatus)(response);
+        await (0, error_1.checkStatus)(response);
         const data = await response.json();
         return entryFromResponse(data);
     }
@@ -61,7 +61,7 @@ class Rekor {
     async getEntry(uuid) {
         const url = `${this.baseUrl}/api/v1/log/entries/${uuid}`;
         const response = await this.fetch(url);
-        (0, error_1.checkStatus)(response);
+        await (0, error_1.checkStatus)(response);
         const data = await response.json();
         return entryFromResponse(data);
     }
@@ -77,7 +77,7 @@ class Rekor {
             body: JSON.stringify(opts),
             headers: { 'Content-Type': 'application/json' },
         });
-        (0, error_1.checkStatus)(response);
+        await (0, error_1.checkStatus)(response);
         const data = await response.json();
         return data;
     }
@@ -93,7 +93,7 @@ class Rekor {
             body: JSON.stringify(opts),
             headers: { 'Content-Type': 'application/json' },
         });
-        (0, error_1.checkStatus)(response);
+        await (0, error_1.checkStatus)(response);
         const rawData = await response.json();
         const data = rawData.map((d) => entryFromResponse(d));
         return data;
diff --git a/node_modules/@sigstore/sign/dist/external/tsa.js b/node_modules/@sigstore/sign/dist/external/tsa.js
index 5277d7d3f9707..252c14f2d32d8 100644
--- a/node_modules/@sigstore/sign/dist/external/tsa.js
+++ b/node_modules/@sigstore/sign/dist/external/tsa.js
@@ -40,7 +40,7 @@ class TimestampAuthority {
             method: 'POST',
             body: JSON.stringify(request),
         });
-        (0, error_1.checkStatus)(response);
+        await (0, error_1.checkStatus)(response);
         return response.buffer();
     }
 }
diff --git a/node_modules/@sigstore/sign/dist/index.js b/node_modules/@sigstore/sign/dist/index.js
index f6d97c673ec62..383b76083361b 100644
--- a/node_modules/@sigstore/sign/dist/index.js
+++ b/node_modules/@sigstore/sign/dist/index.js
@@ -1,6 +1,6 @@
 "use strict";
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.TSAWitness = exports.RekorWitness = exports.FulcioSigner = exports.CIContextProvider = exports.InternalError = exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0;
+exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = exports.CIContextProvider = exports.InternalError = exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0;
 var bundler_1 = require("./bundler");
 Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return bundler_1.DSSEBundleBuilder; } });
 Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return bundler_1.MessageSignatureBundleBuilder; } });
@@ -9,7 +9,9 @@ Object.defineProperty(exports, "InternalError", { enumerable: true, get: functio
 var identity_1 = require("./identity");
 Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return identity_1.CIContextProvider; } });
 var signer_1 = require("./signer");
+Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return signer_1.DEFAULT_FULCIO_URL; } });
 Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return signer_1.FulcioSigner; } });
 var witness_1 = require("./witness");
+Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return witness_1.DEFAULT_REKOR_URL; } });
 Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return witness_1.RekorWitness; } });
 Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return witness_1.TSAWitness; } });
diff --git a/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js b/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
index 9c0af0e914493..81b421eabadb2 100644
--- a/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
+++ b/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
@@ -39,11 +39,7 @@ class CAClient {
             return cert.chain.certificates;
         }
         catch (err) {
-            throw new error_1.InternalError({
-                code: 'CA_CREATE_SIGNING_CERTIFICATE_ERROR',
-                message: 'error creating signing certificate',
-                cause: err,
-            });
+            (0, error_1.internalError)(err, 'CA_CREATE_SIGNING_CERTIFICATE_ERROR', 'error creating signing certificate');
         }
     }
 }
diff --git a/node_modules/@sigstore/sign/dist/signer/fulcio/index.js b/node_modules/@sigstore/sign/dist/signer/fulcio/index.js
index b2eff7e1b981f..89a432548d2b4 100644
--- a/node_modules/@sigstore/sign/dist/signer/fulcio/index.js
+++ b/node_modules/@sigstore/sign/dist/signer/fulcio/index.js
@@ -1,6 +1,6 @@
 "use strict";
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.FulcioSigner = void 0;
+exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0;
 /*
 Copyright 2023 The Sigstore Authors.
 
@@ -20,13 +20,17 @@ const error_1 = require("../../error");
 const util_1 = require("../../util");
 const ca_1 = require("./ca");
 const ephemeral_1 = require("./ephemeral");
+exports.DEFAULT_FULCIO_URL = 'https://fulcio.sigstore.dev';
 // Signer implementation which can be used to decorate another signer
 // with a Fulcio-issued signing certificate for the signer's public key.
 // Must be instantiated with an identity provider which can provide a JWT
 // which represents the identity to be bound to the signing certificate.
 class FulcioSigner {
     constructor(options) {
-        this.ca = new ca_1.CAClient(options);
+        this.ca = new ca_1.CAClient({
+            ...options,
+            fulcioBaseURL: options.fulcioBaseURL || /* istanbul ignore next */ exports.DEFAULT_FULCIO_URL,
+        });
         this.identityProvider = options.identityProvider;
         this.keyHolder = options.keyHolder || new ephemeral_1.EphemeralSigner();
     }
@@ -34,7 +38,17 @@ class FulcioSigner {
         // Retrieve identity token from the supplied identity provider
         const identityToken = await this.getIdentityToken();
         // Extract challenge claim from OIDC token
-        const subject = util_1.oidc.extractJWTSubject(identityToken);
+        let subject;
+        try {
+            subject = util_1.oidc.extractJWTSubject(identityToken);
+        }
+        catch (err) {
+            throw new error_1.InternalError({
+                code: 'IDENTITY_TOKEN_PARSE_ERROR',
+                message: `invalid identity token: ${identityToken}`,
+                cause: err,
+            });
+        }
         // Construct challenge value by signing the subject claim
         const challenge = await this.keyHolder.sign(Buffer.from(subject));
         if (challenge.key.$case !== 'publicKey') {
diff --git a/node_modules/@sigstore/sign/dist/signer/index.js b/node_modules/@sigstore/sign/dist/signer/index.js
index 4f64adf41ed8d..06ec9dbe72fe1 100644
--- a/node_modules/@sigstore/sign/dist/signer/index.js
+++ b/node_modules/@sigstore/sign/dist/signer/index.js
@@ -1,6 +1,6 @@
 "use strict";
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.FulcioSigner = void 0;
+exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0;
 /*
 Copyright 2023 The Sigstore Authors.
 
@@ -17,4 +17,5 @@ See the License for the specific language governing permissions and
 limitations under the License.
 */
 var fulcio_1 = require("./fulcio");
+Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return fulcio_1.DEFAULT_FULCIO_URL; } });
 Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return fulcio_1.FulcioSigner; } });
diff --git a/node_modules/@sigstore/sign/dist/witness/index.js b/node_modules/@sigstore/sign/dist/witness/index.js
index 7218ea41bce6d..e200d0638350b 100644
--- a/node_modules/@sigstore/sign/dist/witness/index.js
+++ b/node_modules/@sigstore/sign/dist/witness/index.js
@@ -1,6 +1,6 @@
 "use strict";
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.TSAWitness = exports.RekorWitness = void 0;
+exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0;
 /*
 Copyright 2023 The Sigstore Authors.
 
@@ -17,6 +17,7 @@ See the License for the specific language governing permissions and
 limitations under the License.
 */
 var tlog_1 = require("./tlog");
+Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return tlog_1.DEFAULT_REKOR_URL; } });
 Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return tlog_1.RekorWitness; } });
 var tsa_1 = require("./tsa");
 Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return tsa_1.TSAWitness; } });
diff --git a/node_modules/@sigstore/sign/dist/witness/tlog/client.js b/node_modules/@sigstore/sign/dist/witness/tlog/client.js
index 3c1b5212e4265..22c895f2ca7ed 100644
--- a/node_modules/@sigstore/sign/dist/witness/tlog/client.js
+++ b/node_modules/@sigstore/sign/dist/witness/tlog/client.js
@@ -43,19 +43,11 @@ class TLogClient {
                     entry = await this.rekor.getEntry(uuid);
                 }
                 catch (err) {
-                    throw new error_1.InternalError({
-                        code: 'TLOG_FETCH_ENTRY_ERROR',
-                        message: 'error fetching tlog entry',
-                        cause: err,
-                    });
+                    (0, error_1.internalError)(err, 'TLOG_FETCH_ENTRY_ERROR', 'error fetching tlog entry');
                 }
             }
             else {
-                throw new error_1.InternalError({
-                    code: 'TLOG_CREATE_ENTRY_ERROR',
-                    message: 'error creating tlog entry',
-                    cause: err,
-                });
+                (0, error_1.internalError)(err, 'TLOG_CREATE_ENTRY_ERROR', 'error creating tlog entry');
             }
         }
         return entry;
diff --git a/node_modules/@sigstore/sign/dist/witness/tlog/index.js b/node_modules/@sigstore/sign/dist/witness/tlog/index.js
index 7d5487c2cb3c6..1f098df85390c 100644
--- a/node_modules/@sigstore/sign/dist/witness/tlog/index.js
+++ b/node_modules/@sigstore/sign/dist/witness/tlog/index.js
@@ -1,6 +1,6 @@
 "use strict";
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.RekorWitness = void 0;
+exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0;
 /*
 Copyright 2023 The Sigstore Authors.
 
@@ -19,9 +19,13 @@ limitations under the License.
 const util_1 = require("../../util");
 const client_1 = require("./client");
 const entry_1 = require("./entry");
+exports.DEFAULT_REKOR_URL = 'https://rekor.sigstore.dev';
 class RekorWitness {
     constructor(options) {
-        this.tlog = new client_1.TLogClient(options);
+        this.tlog = new client_1.TLogClient({
+            ...options,
+            rekorBaseURL: options.rekorBaseURL || /* istanbul ignore next */ exports.DEFAULT_REKOR_URL,
+        });
     }
     async testify(content, publicKey) {
         const proposedEntry = (0, entry_1.toProposedEntry)(content, publicKey);
diff --git a/node_modules/@sigstore/sign/dist/witness/tsa/client.js b/node_modules/@sigstore/sign/dist/witness/tsa/client.js
index d2a7610401c4e..a334deb00b775 100644
--- a/node_modules/@sigstore/sign/dist/witness/tsa/client.js
+++ b/node_modules/@sigstore/sign/dist/witness/tsa/client.js
@@ -36,11 +36,7 @@ class TSAClient {
             return await this.tsa.createTimestamp(request);
         }
         catch (err) {
-            throw new error_1.InternalError({
-                code: 'TSA_CREATE_TIMESTAMP_ERROR',
-                message: 'error creating timestamp',
-                cause: err,
-            });
+            (0, error_1.internalError)(err, 'TSA_CREATE_TIMESTAMP_ERROR', 'error creating timestamp');
         }
     }
 }
diff --git a/node_modules/@sigstore/sign/package.json b/node_modules/@sigstore/sign/package.json
index 732c94f9fcd49..cd8dc14412e4d 100644
--- a/node_modules/@sigstore/sign/package.json
+++ b/node_modules/@sigstore/sign/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@sigstore/sign",
-  "version": "2.0.0",
+  "version": "2.1.0",
   "description": "Sigstore signing library",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
@@ -27,12 +27,12 @@
   },
   "devDependencies": {
     "@sigstore/jest": "^0.0.0",
-    "@sigstore/mock": "^0.3.0",
+    "@sigstore/mock": "^0.4.0",
     "@sigstore/rekor-types": "^2.0.0",
     "@types/make-fetch-happen": "^10.0.0"
   },
   "dependencies": {
-    "@sigstore/bundle": "^2.0.0",
+    "@sigstore/bundle": "^2.1.0",
     "@sigstore/protobuf-specs": "^0.2.1",
     "make-fetch-happen": "^13.0.0"
   },
diff --git a/node_modules/sigstore/dist/config.js b/node_modules/sigstore/dist/config.js
index 65b20fbaa9829..43c236f0eebd0 100644
--- a/node_modules/sigstore/dist/config.js
+++ b/node_modules/sigstore/dist/config.js
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
     return result;
 };
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.artifactVerificationOptions = exports.createBundleBuilder = exports.DEFAULT_TIMEOUT = exports.DEFAULT_RETRY = exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = void 0;
+exports.artifactVerificationOptions = exports.createBundleBuilder = exports.DEFAULT_TIMEOUT = exports.DEFAULT_RETRY = void 0;
 /*
 Copyright 2023 The Sigstore Authors.
 
@@ -41,8 +41,6 @@ limitations under the License.
 */
 const sign_1 = require("@sigstore/sign");
 const sigstore = __importStar(require("./types/sigstore"));
-exports.DEFAULT_FULCIO_URL = 'https://fulcio.sigstore.dev';
-exports.DEFAULT_REKOR_URL = 'https://rekor.sigstore.dev';
 exports.DEFAULT_RETRY = { retries: 2 };
 exports.DEFAULT_TIMEOUT = 5000;
 function createBundleBuilder(bundleType, options) {
@@ -61,7 +59,7 @@ exports.createBundleBuilder = createBundleBuilder;
 // Instantiate the FulcioSigner based on the supplied options.
 function initSigner(options) {
     return new sign_1.FulcioSigner({
-        fulcioBaseURL: options.fulcioURL || exports.DEFAULT_FULCIO_URL,
+        fulcioBaseURL: options.fulcioURL,
         identityProvider: options.identityProvider || initIdentityProvider(options),
         retry: options.retry ?? exports.DEFAULT_RETRY,
         timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
@@ -84,7 +82,7 @@ function initWitnesses(options) {
     const witnesses = [];
     if (isRekorEnabled(options)) {
         witnesses.push(new sign_1.RekorWitness({
-            rekorBaseURL: options.rekorURL || exports.DEFAULT_REKOR_URL,
+            rekorBaseURL: options.rekorURL,
             fetchOnConflict: false,
             retry: options.retry ?? exports.DEFAULT_RETRY,
             timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
diff --git a/node_modules/sigstore/dist/index.js b/node_modules/sigstore/dist/index.js
index d281e5b8d2ab7..341c1fa504d1e 100644
--- a/node_modules/sigstore/dist/index.js
+++ b/node_modules/sigstore/dist/index.js
@@ -1,6 +1,6 @@
 "use strict";
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.verify = exports.sign = exports.createVerifier = exports.attest = exports.VerificationError = exports.PolicyError = exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = exports.TUFError = exports.InternalError = exports.ValidationError = void 0;
+exports.verify = exports.sign = exports.createVerifier = exports.attest = exports.VerificationError = exports.PolicyError = exports.TUFError = exports.InternalError = exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = exports.ValidationError = void 0;
 /*
 Copyright 2022 The Sigstore Authors.
 
@@ -19,12 +19,11 @@ limitations under the License.
 var bundle_1 = require("@sigstore/bundle");
 Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return bundle_1.ValidationError; } });
 var sign_1 = require("@sigstore/sign");
+Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return sign_1.DEFAULT_FULCIO_URL; } });
+Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return sign_1.DEFAULT_REKOR_URL; } });
 Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return sign_1.InternalError; } });
 var tuf_1 = require("@sigstore/tuf");
 Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return tuf_1.TUFError; } });
-var config_1 = require("./config");
-Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return config_1.DEFAULT_FULCIO_URL; } });
-Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return config_1.DEFAULT_REKOR_URL; } });
 var error_1 = require("./error");
 Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return error_1.PolicyError; } });
 Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return error_1.VerificationError; } });
diff --git a/node_modules/sigstore/package.json b/node_modules/sigstore/package.json
index 26e58edd47af3..daf50ba601884 100644
--- a/node_modules/sigstore/package.json
+++ b/node_modules/sigstore/package.json
@@ -1,6 +1,6 @@
 {
   "name": "sigstore",
-  "version": "2.0.0",
+  "version": "2.1.0",
   "description": "code-signing for npm packages",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
@@ -29,15 +29,15 @@
   "devDependencies": {
     "@sigstore/rekor-types": "^2.0.0",
     "@sigstore/jest": "^0.0.0",
-    "@sigstore/mock": "^0.3.0",
+    "@sigstore/mock": "^0.4.0",
     "@tufjs/repo-mock": "^2.0.0",
     "@types/make-fetch-happen": "^10.0.0"
   },
   "dependencies": {
-    "@sigstore/bundle": "^2.0.0",
+    "@sigstore/bundle": "^2.1.0",
     "@sigstore/protobuf-specs": "^0.2.1",
-    "@sigstore/sign": "^2.0.0",
-    "@sigstore/tuf": "^2.0.0"
+    "@sigstore/sign": "^2.1.0",
+    "@sigstore/tuf": "^2.1.0"
   },
   "engines": {
     "node": "^16.14.0 || >=18.0.0"
diff --git a/package-lock.json b/package-lock.json
index cc58e15126f0f..26628070149f4 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -2923,9 +2923,9 @@
       }
     },
     "node_modules/@sigstore/bundle": {
-      "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-2.0.0.tgz",
-      "integrity": "sha512-EO7D7/kMtUsYn596WP+b5N/txWTgOt7N8vsZ2gyneMsxfrPW4FJHRZtMlZeGKCgBNCcjZhZ8ItyawkZqJC8XiA==",
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-2.1.0.tgz",
+      "integrity": "sha512-89uOo6yh/oxaU8AeOUnVrTdVMcGk9Q1hJa7Hkvalc6G3Z3CupWk4Xe9djSgJm9fMkH69s0P0cVHUoKSOemLdng==",
       "inBundle": true,
       "dependencies": {
         "@sigstore/protobuf-specs": "^0.2.1"
@@ -2944,12 +2944,12 @@
       }
     },
     "node_modules/@sigstore/sign": {
-      "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-2.0.0.tgz",
-      "integrity": "sha512-f+r1jEDwM5969DTORRln9sDmWjTy1cOQzhU/iisGNzFdbF2TglmwNScbH6aiQ6QH4lc3jOXNMgKP6sec1kSVKA==",
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-2.1.0.tgz",
+      "integrity": "sha512-4VRpfJxs+8eLqzLVrZngVNExVA/zAhVbi4UT4zmtLi4xRd7vz5qie834OgkrGsLlLB1B2nz/3wUxT1XAUBe8gw==",
       "inBundle": true,
       "dependencies": {
-        "@sigstore/bundle": "^2.0.0",
+        "@sigstore/bundle": "^2.1.0",
         "@sigstore/protobuf-specs": "^0.2.1",
         "make-fetch-happen": "^13.0.0"
       },
@@ -11808,15 +11808,15 @@
       }
     },
     "node_modules/sigstore": {
-      "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-2.0.0.tgz",
-      "integrity": "sha512-RtTi90xIdzFmQAAKb9+Ki1nx4IR2Z5c+mFn3dN0xuPHgk3gTt3f7ZqKsZ9UFQP40ZAlm7un8LMyjhwgrTIXNPA==",
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-2.1.0.tgz",
+      "integrity": "sha512-kPIj+ZLkyI3QaM0qX8V/nSsweYND3W448pwkDgS6CQ74MfhEkIR8ToK5Iyx46KJYRjseVcD3Rp9zAmUAj6ZjPw==",
       "inBundle": true,
       "dependencies": {
-        "@sigstore/bundle": "^2.0.0",
+        "@sigstore/bundle": "^2.1.0",
         "@sigstore/protobuf-specs": "^0.2.1",
-        "@sigstore/sign": "^2.0.0",
-        "@sigstore/tuf": "^2.0.0"
+        "@sigstore/sign": "^2.1.0",
+        "@sigstore/tuf": "^2.1.0"
       },
       "engines": {
         "node": "^16.14.0 || >=18.0.0"
@@ -16139,7 +16139,7 @@
         "npm-registry-fetch": "^16.0.0",
         "proc-log": "^3.0.0",
         "semver": "^7.3.7",
-        "sigstore": "^2.0.0",
+        "sigstore": "^2.1.0",
         "ssri": "^10.0.5"
       },
       "devDependencies": {
diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json
index 094414e716102..87a878173fdbe 100644
--- a/workspaces/libnpmpublish/package.json
+++ b/workspaces/libnpmpublish/package.json
@@ -44,7 +44,7 @@
     "npm-registry-fetch": "^16.0.0",
     "proc-log": "^3.0.0",
     "semver": "^7.3.7",
-    "sigstore": "^2.0.0",
+    "sigstore": "^2.1.0",
     "ssri": "^10.0.5"
   },
   "engines": {

From 8eb8fe87ea40a17747ef1656e4cf083f1eb3b057 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Wed, 30 Aug 2023 10:24:16 -0700
Subject: [PATCH 61/68] deps: @npmcli/agent@2.1.0

---
 DEPENDENCIES.md                               |   6 +-
 node_modules/.gitignore                       |   6 +
 node_modules/@npmcli/agent/lib/agents.js      | 201 ++++++++++++++++++
 node_modules/@npmcli/agent/lib/dns.js         |  68 +++---
 node_modules/@npmcli/agent/lib/errors.js      |  34 ++-
 node_modules/@npmcli/agent/lib/http.js        |  33 ---
 node_modules/@npmcli/agent/lib/https.js       |  33 ---
 node_modules/@npmcli/agent/lib/index.js       | 149 +++----------
 node_modules/@npmcli/agent/lib/options.js     |  74 +++++++
 node_modules/@npmcli/agent/lib/proxy.js       |  96 +++++++++
 node_modules/@npmcli/agent/lib/proxy/http.js  | 146 -------------
 node_modules/@npmcli/agent/lib/proxy/index.js |  25 ---
 node_modules/@npmcli/agent/lib/proxy/null.js  |  97 ---------
 node_modules/@npmcli/agent/lib/proxy/socks.js | 153 -------------
 node_modules/@npmcli/agent/lib/util.js        |  85 ++++++--
 .../node_modules/agent-base/dist/helpers.js   |  66 ++++++
 .../node_modules/agent-base/dist/index.js     | 112 ++++++++++
 .../node_modules/agent-base/package.json      |  49 +++++
 .../node_modules/http-proxy-agent/LICENSE     |  25 +++
 .../http-proxy-agent/dist/index.js            | 147 +++++++++++++
 .../http-proxy-agent/package.json             |  47 ++++
 .../https-proxy-agent/dist/index.js           | 170 +++++++++++++++
 .../dist/parse-proxy-response.js              |  98 +++++++++
 .../https-proxy-agent/package.json            |  50 +++++
 .../socks-proxy-agent/dist/index.js           | 181 ++++++++++++++++
 .../socks-proxy-agent/package.json            | 142 +++++++++++++
 node_modules/@npmcli/agent/package.json       |  12 +-
 package-lock.json                             |  62 +++++-
 28 files changed, 1681 insertions(+), 686 deletions(-)
 create mode 100644 node_modules/@npmcli/agent/lib/agents.js
 delete mode 100644 node_modules/@npmcli/agent/lib/http.js
 delete mode 100644 node_modules/@npmcli/agent/lib/https.js
 create mode 100644 node_modules/@npmcli/agent/lib/options.js
 create mode 100644 node_modules/@npmcli/agent/lib/proxy.js
 delete mode 100644 node_modules/@npmcli/agent/lib/proxy/http.js
 delete mode 100644 node_modules/@npmcli/agent/lib/proxy/index.js
 delete mode 100644 node_modules/@npmcli/agent/lib/proxy/null.js
 delete mode 100644 node_modules/@npmcli/agent/lib/proxy/socks.js
 create mode 100644 node_modules/@npmcli/agent/node_modules/agent-base/dist/helpers.js
 create mode 100644 node_modules/@npmcli/agent/node_modules/agent-base/dist/index.js
 create mode 100644 node_modules/@npmcli/agent/node_modules/agent-base/package.json
 create mode 100644 node_modules/@npmcli/agent/node_modules/http-proxy-agent/LICENSE
 create mode 100644 node_modules/@npmcli/agent/node_modules/http-proxy-agent/dist/index.js
 create mode 100644 node_modules/@npmcli/agent/node_modules/http-proxy-agent/package.json
 create mode 100644 node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/index.js
 create mode 100644 node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/parse-proxy-response.js
 create mode 100644 node_modules/@npmcli/agent/node_modules/https-proxy-agent/package.json
 create mode 100644 node_modules/@npmcli/agent/node_modules/socks-proxy-agent/dist/index.js
 create mode 100644 node_modules/@npmcli/agent/node_modules/socks-proxy-agent/package.json

diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md
index b080058cb387e..773b6aeffa54d 100644
--- a/DEPENDENCIES.md
+++ b/DEPENDENCIES.md
@@ -115,6 +115,7 @@ graph LR;
   npm-->npm-profile;
   npm-->npm-registry-fetch;
   npm-->npm-user-validate;
+  npm-->npmcli-agent["@npmcli/agent"];
   npm-->npmcli-arborist["@npmcli/arborist"];
   npm-->npmcli-config["@npmcli/config"];
   npm-->npmcli-docs["@npmcli/docs"];
@@ -540,6 +541,7 @@ graph LR;
   npm-->npm-profile;
   npm-->npm-registry-fetch;
   npm-->npm-user-validate;
+  npm-->npmcli-agent["@npmcli/agent"];
   npm-->npmcli-arborist["@npmcli/arborist"];
   npm-->npmcli-config["@npmcli/config"];
   npm-->npmcli-docs["@npmcli/docs"];
@@ -598,8 +600,10 @@ graph LR;
   npm-registry-fetch-->minizlib;
   npm-registry-fetch-->npm-package-arg;
   npm-registry-fetch-->proc-log;
+  npmcli-agent-->http-proxy-agent;
+  npmcli-agent-->https-proxy-agent;
   npmcli-agent-->lru-cache;
-  npmcli-agent-->socks;
+  npmcli-agent-->socks-proxy-agent;
   npmcli-arborist-->benchmark;
   npmcli-arborist-->bin-links;
   npmcli-arborist-->cacache;
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index dda14819326ad..9076b038b0ceb 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -19,6 +19,12 @@
 !/@npmcli/
 /@npmcli/*
 !/@npmcli/agent
+!/@npmcli/agent/node_modules/
+/@npmcli/agent/node_modules/*
+!/@npmcli/agent/node_modules/agent-base
+!/@npmcli/agent/node_modules/http-proxy-agent
+!/@npmcli/agent/node_modules/https-proxy-agent
+!/@npmcli/agent/node_modules/socks-proxy-agent
 !/@npmcli/disparity-colors
 !/@npmcli/fs
 !/@npmcli/git
diff --git a/node_modules/@npmcli/agent/lib/agents.js b/node_modules/@npmcli/agent/lib/agents.js
new file mode 100644
index 0000000000000..db997403f7579
--- /dev/null
+++ b/node_modules/@npmcli/agent/lib/agents.js
@@ -0,0 +1,201 @@
+'use strict'
+
+const http = require('http')
+const https = require('https')
+const net = require('net')
+const tls = require('tls')
+const { once } = require('events')
+const { createTimeout, abortRace, urlify, appendPort, cacheAgent } = require('./util')
+const { normalizeOptions, cacheOptions } = require('./options')
+const { getProxy, getProxyType, isSecureProxy, proxyCache } = require('./proxy.js')
+const Errors = require('./errors.js')
+
+const createAgent = (base, name) => {
+  const SECURE = base === https
+  const SOCKET_TYPE = SECURE ? tls : net
+
+  const agent = class extends base.Agent {
+    #options
+    #timeouts
+    #proxy
+    #socket
+
+    constructor (_options) {
+      const { timeouts, proxy, noProxy, ...options } = normalizeOptions(_options)
+
+      super(options)
+
+      this.#options = options
+      this.#timeouts = timeouts
+      this.#proxy = proxy ? { proxies: getProxyType(proxy), proxy: urlify(proxy), noProxy } : null
+    }
+
+    get proxy () {
+      return this.#proxy ? { url: this.#proxy.proxy } : {}
+    }
+
+    #getProxy (options) {
+      const proxy = this.#proxy
+        ? getProxy(appendPort(`${options.protocol}//${options.host}`, options.port), this.#proxy)
+        : null
+
+      if (!proxy) {
+        return
+      }
+
+      const secure = isSecureProxy(proxy)
+
+      return cacheAgent({
+        key: cacheOptions({
+          ...options,
+          ...this.#options,
+          secure,
+          timeouts: this.#timeouts,
+          proxy,
+        }),
+        cache: proxyCache,
+        secure,
+        proxies: this.#proxy.proxies,
+      }, proxy, this.#options)
+    }
+
+    #setKeepAlive (socket) {
+      socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs)
+      socket.setNoDelay(this.keepAlive)
+    }
+
+    #setIdleTimeout (socket, options) {
+      if (this.#timeouts.idle) {
+        socket.setTimeout(this.#timeouts.idle, () => {
+          socket.destroy(new Errors.IdleTimeoutError(options))
+        })
+      }
+    }
+
+    async #proxyConnect (proxy, request, options) {
+      // socks-proxy-agent accepts a dns lookup function
+      options.lookup ??= this.#options.lookup
+
+      // all the proxy agents use this secureEndpoint option to determine
+      // if the proxy should connect over tls or not. we can set it based
+      // on if the HttpAgent or HttpsAgent is used.
+      options.secureEndpoint = SECURE
+
+      const socket = await abortRace([
+        (ac) => createTimeout(this.#timeouts.connection, ac).catch(() => {
+          throw new Errors.ConnectionTimeoutError(options)
+        }),
+        (ac) => proxy.connect(request, options).then((s) => {
+          this.#setKeepAlive(s)
+
+          const connectEvent = SECURE ? 'secureConnect' : 'connect'
+          const connectingEvent = SECURE ? 'secureConnecting' : 'connecting'
+
+          if (!s[connectingEvent]) {
+            return s
+          }
+
+          return abortRace([
+            () => once(s, 'error', ac).then((err) => {
+              throw err
+            }),
+            () => once(s, connectEvent, ac).then(() => s),
+          ], ac)
+        }),
+      ])
+
+      this.#setIdleTimeout(socket, options)
+
+      return socket
+    }
+
+    async connect (request, options) {
+      const proxy = this.#getProxy(options)
+      if (proxy) {
+        return this.#proxyConnect(proxy, request, options)
+      }
+
+      const socket = SOCKET_TYPE.connect(options)
+
+      this.#setKeepAlive(socket)
+
+      await abortRace([
+        (s) => createTimeout(this.#timeouts.connection, s).catch(() => {
+          throw new Errors.ConnectionTimeoutError(options)
+        }),
+        (s) => once(socket, 'error', s).then((err) => {
+          throw err
+        }),
+        (s) => once(socket, 'connect', s),
+      ])
+
+      this.#setIdleTimeout(socket, options)
+
+      return socket
+    }
+
+    addRequest (request, options) {
+      const proxy = this.#getProxy(options)
+      // it would be better to call proxy.addRequest here but this causes the
+      // http-proxy-agent to call its super.addRequest which causes the request
+      // to be added to the agent twice. since we only support 3 agents
+      // currently (see the required agents in proxy.js) we have manually
+      // checked that the only public methods we need to call are called in the
+      // next block. this could change in the future and presumably we would get
+      // failing tests until we have properly called the necessary methods on
+      // each of our proxy agents
+      if (proxy?.setRequestProps) {
+        proxy.setRequestProps(request, options)
+      }
+
+      request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close')
+
+      const responseTimeout = createTimeout(this.#timeouts.response)
+      if (responseTimeout) {
+        request.once('finish', () => {
+          responseTimeout.start(() => {
+            request.destroy(new Errors.ResponseTimeoutError(request, this.proxy?.url))
+          })
+        })
+        request.once('response', () => {
+          responseTimeout.clear()
+        })
+      }
+
+      const transferTimeout = createTimeout(this.#timeouts.transfer)
+      if (transferTimeout) {
+        request.once('response', (res) => {
+          transferTimeout.start(() => {
+            res.destroy(new Errors.TransferTimeoutError(request, this.proxy?.url))
+          })
+          res.once('close', () => {
+            transferTimeout.clear()
+          })
+        })
+      }
+
+      return super.addRequest(request, options)
+    }
+
+    createSocket (req, options, cb) {
+      return Promise.resolve()
+        .then(() => this.connect(req, options))
+        .then((socket) => {
+          this.#socket = socket
+          return super.createSocket(req, options, cb)
+        }, cb)
+    }
+
+    createConnection () {
+      return this.#socket
+    }
+  }
+
+  Object.defineProperty(agent, 'name', { value: name })
+  return agent
+}
+
+module.exports = {
+  HttpAgent: createAgent(http, 'HttpAgent'),
+  HttpsAgent: createAgent(https, 'HttpsAgent'),
+}
diff --git a/node_modules/@npmcli/agent/lib/dns.js b/node_modules/@npmcli/agent/lib/dns.js
index 7f1a7c9a80e19..3c6946c566d73 100644
--- a/node_modules/@npmcli/agent/lib/dns.js
+++ b/node_modules/@npmcli/agent/lib/dns.js
@@ -3,49 +3,51 @@
 const { LRUCache } = require('lru-cache')
 const dns = require('dns')
 
-const defaultOptions = exports.defaultOptions = {
-  family: undefined,
-  hints: dns.ADDRCONFIG,
-  all: false,
-  verbatim: undefined,
-}
-
-const lookupCache = exports.lookupCache = new LRUCache({ max: 50 })
-
 // this is a factory so that each request can have its own opts (i.e. ttl)
 // while still sharing the cache across all requests
-exports.getLookup = (dnsOptions) => {
-  return (hostname, options, callback) => {
-    if (typeof options === 'function') {
-      callback = options
-      options = null
-    } else if (typeof options === 'number') {
-      options = { family: options }
+const cache = new LRUCache({ max: 50 })
+
+const getOptions = ({
+  family = 0,
+  hints = dns.ADDRCONFIG,
+  all = false,
+  verbatim = undefined,
+  ttl = 5 * 60 * 1000,
+  lookup = dns.lookup,
+}) => ({
+  // hints and lookup are returned since both are top level properties to (net|tls).connect
+  hints,
+  lookup: (hostname, ...args) => {
+    const callback = args.pop() // callback is always last arg
+    const lookupOptions = args[0] ?? {}
+
+    const options = {
+      family,
+      hints,
+      all,
+      verbatim,
+      ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions),
     }
 
-    options = { ...defaultOptions, ...options }
+    const key = JSON.stringify({ hostname, ...options })
 
-    const key = JSON.stringify({
-      hostname,
-      family: options.family,
-      hints: options.hints,
-      all: options.all,
-      verbatim: options.verbatim,
-    })
-
-    if (lookupCache.has(key)) {
-      const [address, family] = lookupCache.get(key)
-      process.nextTick(callback, null, address, family)
-      return
+    if (cache.has(key)) {
+      const cached = cache.get(key)
+      return process.nextTick(callback, null, ...cached)
     }
 
-    dnsOptions.lookup(hostname, options, (err, address, family) => {
+    lookup(hostname, options, (err, ...result) => {
       if (err) {
         return callback(err)
       }
 
-      lookupCache.set(key, [address, family], { ttl: dnsOptions.ttl })
-      return callback(null, address, family)
+      cache.set(key, result, { ttl })
+      return callback(null, ...result)
     })
-  }
+  },
+})
+
+module.exports = {
+  cache,
+  getOptions,
 }
diff --git a/node_modules/@npmcli/agent/lib/errors.js b/node_modules/@npmcli/agent/lib/errors.js
index 9c664aeb39757..f41b4a065d713 100644
--- a/node_modules/@npmcli/agent/lib/errors.js
+++ b/node_modules/@npmcli/agent/lib/errors.js
@@ -1,5 +1,7 @@
 'use strict'
 
+const { appendPort } = require('./util')
+
 class InvalidProxyProtocolError extends Error {
   constructor (url) {
     super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``)
@@ -8,17 +10,9 @@ class InvalidProxyProtocolError extends Error {
   }
 }
 
-class InvalidProxyResponseError extends Error {
-  constructor (url, status) {
-    super(`Invalid status code \`${status}\` connecting to proxy \`${url.host}\``)
-    this.code = 'EINVALIDRESPONSE'
-    this.proxy = url
-    this.status = status
-  }
-}
-
 class ConnectionTimeoutError extends Error {
-  constructor (host) {
+  constructor ({ host, port }) {
+    host = appendPort(host, port)
     super(`Timeout connecting to host \`${host}\``)
     this.code = 'ECONNECTIONTIMEOUT'
     this.host = host
@@ -26,7 +20,8 @@ class ConnectionTimeoutError extends Error {
 }
 
 class IdleTimeoutError extends Error {
-  constructor (host) {
+  constructor ({ host, port }) {
+    host = appendPort(host, port)
     super(`Idle timeout reached for host \`${host}\``)
     this.code = 'EIDLETIMEOUT'
     this.host = host
@@ -34,36 +29,35 @@ class IdleTimeoutError extends Error {
 }
 
 class ResponseTimeoutError extends Error {
-  constructor (proxy, request) {
+  constructor (request, proxy) {
     let msg = 'Response timeout '
-    if (proxy.url) {
-      msg += `from proxy \`${proxy.url.host}\` `
+    if (proxy) {
+      msg += `from proxy \`${proxy.host}\` `
     }
     msg += `connecting to host \`${request.host}\``
     super(msg)
     this.code = 'ERESPONSETIMEOUT'
-    this.proxy = proxy.url
+    this.proxy = proxy
     this.request = request
   }
 }
 
 class TransferTimeoutError extends Error {
-  constructor (proxy, request) {
+  constructor (request, proxy) {
     let msg = 'Transfer timeout '
-    if (proxy.url) {
-      msg += `from proxy \`${proxy.url.host}\` `
+    if (proxy) {
+      msg += `from proxy \`${proxy.host}\` `
     }
     msg += `for \`${request.host}\``
     super(msg)
     this.code = 'ETRANSFERTIMEOUT'
-    this.proxy = proxy.url
+    this.proxy = proxy
     this.request = request
   }
 }
 
 module.exports = {
   InvalidProxyProtocolError,
-  InvalidProxyResponseError,
   ConnectionTimeoutError,
   IdleTimeoutError,
   ResponseTimeoutError,
diff --git a/node_modules/@npmcli/agent/lib/http.js b/node_modules/@npmcli/agent/lib/http.js
deleted file mode 100644
index 23512393caf3f..0000000000000
--- a/node_modules/@npmcli/agent/lib/http.js
+++ /dev/null
@@ -1,33 +0,0 @@
-'use strict'
-
-const http = require('http')
-
-const { getLookup } = require('./dns.js')
-const { normalizeOptions } = require('./util.js')
-const createProxy = require('./proxy/index.js')
-
-class HttpAgent extends http.Agent {
-  constructor (_options = {}) {
-    const options = normalizeOptions(_options)
-    super(options)
-    this.proxy = createProxy({
-      agent: this,
-      lookup: getLookup(options.dns),
-      proxy: options.proxy,
-      secure: false,
-    })
-  }
-
-  createConnection (_options, callback) {
-    const options = normalizeOptions(_options)
-    return this.proxy.createConnection(options, callback)
-  }
-
-  addRequest (request, _options) {
-    const options = normalizeOptions(_options)
-    super.addRequest(request, _options)
-    return this.proxy.addRequest(request, options)
-  }
-}
-
-module.exports = HttpAgent
diff --git a/node_modules/@npmcli/agent/lib/https.js b/node_modules/@npmcli/agent/lib/https.js
deleted file mode 100644
index b544614d7f47f..0000000000000
--- a/node_modules/@npmcli/agent/lib/https.js
+++ /dev/null
@@ -1,33 +0,0 @@
-'use strict'
-
-const https = require('https')
-
-const { getLookup } = require('./dns.js')
-const { normalizeOptions } = require('./util.js')
-const createProxy = require('./proxy/index.js')
-
-class HttpsAgent extends https.Agent {
-  constructor (_options) {
-    const options = normalizeOptions(_options)
-    super(options)
-    this.proxy = createProxy({
-      agent: this,
-      lookup: getLookup(options.dns),
-      proxy: options.proxy,
-      secure: true,
-    })
-  }
-
-  createConnection (_options, callback) {
-    const options = normalizeOptions(_options)
-    return this.proxy.createConnection(options, callback)
-  }
-
-  addRequest (request, _options) {
-    const options = normalizeOptions(_options)
-    super.addRequest(request, options)
-    return this.proxy.addRequest(request, options)
-  }
-}
-
-module.exports = HttpsAgent
diff --git a/node_modules/@npmcli/agent/lib/index.js b/node_modules/@npmcli/agent/lib/index.js
index a6f556964d86d..2cd69390ea77e 100644
--- a/node_modules/@npmcli/agent/lib/index.js
+++ b/node_modules/@npmcli/agent/lib/index.js
@@ -1,135 +1,46 @@
 'use strict'
 
-const { normalizeOptions } = require('./util.js')
-const HttpAgent = require('./http.js')
-const HttpsAgent = require('./https.js')
+const { LRUCache } = require('lru-cache')
+const { urlify, cacheAgent } = require('./util')
+const { normalizeOptions, cacheOptions } = require('./options')
+const { getProxy, proxyCache } = require('./proxy.js')
+const dns = require('./dns.js')
+const { HttpAgent, HttpsAgent } = require('./agents.js')
 
-const AgentCache = new Map()
-
-const proxyEnv = {}
-for (const [key, value] of Object.entries(process.env)) {
-  const lowerKey = key.toLowerCase()
-  if (['https_proxy', 'http_proxy', 'proxy', 'no_proxy'].includes(lowerKey)) {
-    proxyEnv[lowerKey] = value
-  }
-}
-
-const getAgent = (url, options) => {
-  url = new URL(url)
-  options = normalizeOptions(options)
+const agentCache = new LRUCache({ max: 20 })
 
+const getAgent = (url, { agent: _agent, proxy: _proxy, noProxy, ..._options } = {}) => {
   // false has meaning so this can't be a simple truthiness check
-  if (options.agent != null) {
-    return options.agent
-  }
-
-  const isHttps = url.protocol === 'https:'
-
-  let proxy = options.proxy
-  if (!proxy) {
-    proxy = isHttps
-      ? proxyEnv.https_proxy
-      : (proxyEnv.https_proxy || proxyEnv.http_proxy || proxyEnv.proxy)
-  }
-
-  if (proxy) {
-    proxy = new URL(proxy)
-    let noProxy = options.noProxy || proxyEnv.no_proxy
-    if (typeof noProxy === 'string') {
-      noProxy = noProxy.split(',').map((p) => p.trim())
-    }
-
-    if (noProxy) {
-      const hostSegments = url.hostname.split('.').reverse()
-      const matches = noProxy.some((no) => {
-        const noSegments = no.split('.').filter(Boolean).reverse()
-        if (!noSegments.length) {
-          return false
-        }
-
-        for (let i = 0; i < noSegments.length; ++i) {
-          if (hostSegments[i] !== noSegments[i]) {
-            return false
-          }
-        }
-
-        return true
-      })
-
-      if (matches) {
-        proxy = ''
-      }
-    }
+  if (_agent != null) {
+    return _agent
   }
 
-  const timeouts = [
-    options.timeouts.connection || 0,
-    options.timeouts.idle || 0,
-    options.timeouts.response || 0,
-    options.timeouts.transfer || 0,
-  ].join('.')
-
-  const maxSockets = options.maxSockets || 15
-
-  let proxyDescriptor = 'proxy:'
-  if (!proxy) {
-    proxyDescriptor += 'null'
-  } else {
-    proxyDescriptor += `${proxy.protocol}//`
-    let auth = ''
-
-    if (proxy.username) {
-      auth += proxy.username
-    }
-
-    if (proxy.password) {
-      auth += `:${proxy.password}`
-    }
-
-    if (auth) {
-      proxyDescriptor += `${auth}@`
-    }
-
-    proxyDescriptor += proxy.host
-  }
-
-  const key = [
-    `https:${isHttps}`,
-    proxyDescriptor,
-    `local-address:${options.localAddress || 'null'}`,
-    `strict-ssl:${isHttps ? options.rejectUnauthorized : 'false'}`,
-    `ca:${isHttps && options.ca || 'null'}`,
-    `cert:${isHttps && options.cert || 'null'}`,
-    `key:${isHttps && options.key || 'null'}`,
-    `timeouts:${timeouts}`,
-    `maxSockets:${maxSockets}`,
-  ].join(':')
-
-  if (AgentCache.has(key)) {
-    return AgentCache.get(key)
-  }
-
-  const agentOptions = {
-    ca: options.ca,
-    cert: options.cert,
-    key: options.key,
-    rejectUnauthorized: options.rejectUnauthorized,
-    maxSockets,
-    timeouts: options.timeouts,
-    localAddress: options.localAddress,
-    proxy,
-  }
+  url = urlify(url)
 
-  const agent = isHttps
-    ? new HttpsAgent(agentOptions)
-    : new HttpAgent(agentOptions)
+  const secure = url.protocol === 'https:'
+  const proxy = getProxy(url, { proxy: _proxy, noProxy })
+  const options = { ...normalizeOptions(_options), proxy }
 
-  AgentCache.set(key, agent)
-  return agent
+  return cacheAgent({
+    key: cacheOptions({ ...options, secure }),
+    cache: agentCache,
+    secure,
+    proxies: [HttpAgent, HttpsAgent],
+  }, options)
 }
 
 module.exports = {
   getAgent,
   HttpAgent,
   HttpsAgent,
+  cache: {
+    proxy: proxyCache,
+    agent: agentCache,
+    dns: dns.cache,
+    clear: () => {
+      proxyCache.clear()
+      agentCache.clear()
+      dns.cache.clear()
+    },
+  },
 }
diff --git a/node_modules/@npmcli/agent/lib/options.js b/node_modules/@npmcli/agent/lib/options.js
new file mode 100644
index 0000000000000..cd87c09d6a25a
--- /dev/null
+++ b/node_modules/@npmcli/agent/lib/options.js
@@ -0,0 +1,74 @@
+'use strict'
+
+const dns = require('./dns')
+const { createKey } = require('./util')
+
+const normalizeOptions = (opts) => {
+  const family = parseInt(opts.family ?? '0', 10)
+  const keepAlive = opts.keepAlive ?? true
+
+  const normalized = {
+    // nodejs http agent options. these are all the defaults
+    // but kept here to increase the likelihood of cache hits
+    // https://nodejs.org/api/http.html#new-agentoptions
+    keepAliveMsecs: keepAlive ? 1000 : undefined,
+    maxSockets: opts.maxSockets ?? 15,
+    maxTotalSockets: Infinity,
+    maxFreeSockets: keepAlive ? 256 : undefined,
+    scheduling: 'fifo',
+    // then spread the rest of the options
+    ...opts,
+    // we already set these to their defaults that we want
+    family,
+    keepAlive,
+    // our custom timeout options
+    timeouts: {
+      // the standard timeout option is mapped to our idle timeout
+      // and then deleted below
+      idle: opts.timeout ?? 0,
+      connection: 0,
+      response: 0,
+      transfer: 0,
+      ...opts.timeouts,
+    },
+    // get the dns options that go at the top level of socket connection
+    ...dns.getOptions({ family, ...opts.dns }),
+  }
+
+  // remove timeout since we already used it to set our own idle timeout
+  delete normalized.timeout
+
+  return normalized
+}
+
+const cacheOptions = (options) => {
+  const { secure } = options
+  return createKey({
+    secure: !!secure,
+    // socket connect options
+    family: options.family,
+    hints: options.hints,
+    localAddress: options.localAddress,
+    // tls specific connect options
+    strictSsl: secure ? !!options.rejectUnauthorized : false,
+    ca: secure ? options.ca : null,
+    cert: secure ? options.cert : null,
+    key: secure ? options.key : null,
+    // http agent options
+    keepAlive: options.keepAlive,
+    keepAliveMsecs: options.keepAliveMsecs,
+    maxSockets: options.maxSockets,
+    maxTotalSockets: options.maxTotalSockets,
+    maxFreeSockets: options.maxFreeSockets,
+    scheduling: options.scheduling,
+    // timeout options
+    timeouts: options.timeouts,
+    // proxy
+    proxy: options.proxy,
+  })
+}
+
+module.exports = {
+  normalizeOptions,
+  cacheOptions,
+}
diff --git a/node_modules/@npmcli/agent/lib/proxy.js b/node_modules/@npmcli/agent/lib/proxy.js
new file mode 100644
index 0000000000000..81afdad74c1e5
--- /dev/null
+++ b/node_modules/@npmcli/agent/lib/proxy.js
@@ -0,0 +1,96 @@
+'use strict'
+
+const { HttpProxyAgent } = require('http-proxy-agent')
+const { HttpsProxyAgent } = require('https-proxy-agent')
+const { SocksProxyAgent } = require('socks-proxy-agent')
+const { LRUCache } = require('lru-cache')
+const { InvalidProxyProtocolError } = require('./errors.js')
+const { urlify } = require('./util.js')
+
+const PROXY_CACHE = new LRUCache({ max: 20 })
+
+const PROXY_ENV = (() => {
+  const keys = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy'])
+  const values = {}
+  for (let [key, value] of Object.entries(process.env)) {
+    key = key.toLowerCase()
+    if (keys.has(key)) {
+      values[key] = value
+    }
+  }
+  return values
+})()
+
+const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols)
+const SECURE_PROTOCOLS = new Set([...SocksProxyAgent.protocols, 'https'])
+
+const isSecureProxy = (url) => {
+  url = urlify(url)
+  const protocol = url.protocol.slice(0, -1)
+  return SECURE_PROTOCOLS.has(protocol)
+}
+
+const getProxyType = (url) => {
+  url = urlify(url)
+
+  const protocol = url.protocol.slice(0, -1)
+  if (SOCKS_PROTOCOLS.has(protocol)) {
+    return [SocksProxyAgent]
+  }
+  if (protocol === 'https' || protocol === 'http') {
+    return [HttpProxyAgent, HttpsProxyAgent]
+  }
+
+  throw new InvalidProxyProtocolError(url)
+}
+
+const isNoProxy = (url, noProxy) => {
+  if (typeof noProxy === 'string') {
+    noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean)
+  }
+
+  if (!noProxy || !noProxy.length) {
+    return false
+  }
+
+  const hostSegments = url.hostname.split('.').reverse()
+
+  return noProxy.some((no) => {
+    const noSegments = no.split('.').filter(Boolean).reverse()
+    if (!noSegments.length) {
+      return false
+    }
+
+    for (let i = 0; i < noSegments.length; i++) {
+      if (hostSegments[i] !== noSegments[i]) {
+        return false
+      }
+    }
+
+    return true
+  })
+}
+
+const getProxy = (url, {
+  proxy = PROXY_ENV.https_proxy,
+  noProxy = PROXY_ENV.no_proxy,
+}) => {
+  url = urlify(url)
+
+  if (!proxy && url.protocol !== 'https:') {
+    proxy = PROXY_ENV.http_proxy || PROXY_ENV.proxy
+  }
+
+  if (!proxy || isNoProxy(url, noProxy)) {
+    return null
+  }
+
+  return urlify(proxy)
+}
+
+module.exports = {
+  getProxyType,
+  getProxy,
+  isSecureProxy,
+  proxyCache: PROXY_CACHE,
+}
diff --git a/node_modules/@npmcli/agent/lib/proxy/http.js b/node_modules/@npmcli/agent/lib/proxy/http.js
deleted file mode 100644
index 8d092e963c084..0000000000000
--- a/node_modules/@npmcli/agent/lib/proxy/http.js
+++ /dev/null
@@ -1,146 +0,0 @@
-'use strict'
-
-const http = require('http')
-const https = require('https')
-const net = require('net')
-const tls = require('tls')
-
-const {
-  ConnectionTimeoutError,
-  IdleTimeoutError,
-  InvalidProxyResponseError,
-  ResponseTimeoutError,
-  TransferTimeoutError,
-} = require('../errors.js')
-
-// this proxy class uses the http CONNECT method
-class HttpProxy {
-  constructor ({ agent, lookup, url, secure }) {
-    this.agent = agent
-    this.lookup = lookup
-    this.url = url
-    this.secure = secure
-  }
-
-  createConnection (options, callback) {
-    const requestOptions = {
-      // pass createConnection so this request doesn't go through an agent
-      createConnection: (opts, cb) => {
-        // delete the path first, otherwise (net|tls).connect will try to open a unix socket
-        delete opts.path
-        // we also delete the timeout since we control it ourselves
-        delete opts.timeout
-        opts.family = this.agent.options.family
-        opts.lookup = this.lookup
-
-        if (this.url.protocol === 'https:') {
-          return tls.connect(opts, cb)
-        }
-
-        return net.connect(opts, cb)
-      },
-      method: 'CONNECT',
-      host: this.url.hostname,
-      port: this.url.port,
-      servername: this.url.hostname,
-      path: `${options.host}:${options.port}`,
-      setHost: false,
-      timeout: options.timeout,
-      headers: {
-        connection: this.agent.keepAlive ? 'keep-alive' : 'close',
-        host: `${options.host}:${options.port}`,
-      },
-      rejectUnauthorized: options.rejectUnauthorized,
-    }
-
-    if (this.url.username || this.url.password) {
-      const username = decodeURIComponent(this.url.username)
-      const password = decodeURIComponent(this.url.password)
-      requestOptions.headers['proxy-authentication'] =
-        Buffer.from(`${username}:${password}`).toString('base64')
-    }
-
-    let connectionTimeout
-
-    const onConnect = (res, socket) => {
-      clearTimeout(connectionTimeout)
-      req.removeListener('error', onError)
-
-      if (res.statusCode !== 200) {
-        return callback(new InvalidProxyResponseError(this.url, res.statusCode))
-      }
-
-      if (this.secure) {
-        socket = tls.connect({ ...options, socket })
-      }
-
-      socket.setKeepAlive(this.agent.keepAlive, this.agent.keepAliveMsecs)
-      socket.setNoDelay(this.agent.keepAlive)
-
-      if (options.timeouts.idle) {
-        socket.setTimeout(options.timeouts.idle)
-        socket.once('timeout', () => {
-          socket.destroy(new IdleTimeoutError(this.url.host))
-        })
-      }
-
-      return callback(null, socket)
-    }
-
-    const onError = (err) => {
-      req.removeListener('connect', onConnect)
-      return callback(err)
-    }
-
-    const req = this.secure
-      ? https.request(requestOptions)
-      : http.request(requestOptions)
-
-    req.once('connect', onConnect)
-    req.once('error', onError)
-    req.end()
-
-    if (options.timeouts.connection) {
-      connectionTimeout = setTimeout(() => {
-        return callback(new ConnectionTimeoutError(this.url.host))
-      }, options.timeouts.connection)
-    }
-  }
-
-  addRequest (request, options) {
-    if (this.agent.options.timeouts.response) {
-      let responseTimeout
-
-      const onFinish = () => {
-        responseTimeout = setTimeout(() => {
-          request.destroy(new ResponseTimeoutError(this, request))
-        }, this.agent.options.timeouts.response)
-      }
-
-      const onResponse = () => {
-        clearTimeout(responseTimeout)
-      }
-
-      request.once('finish', onFinish)
-      request.once('response', onResponse)
-    }
-
-    if (this.agent.options.timeouts.transfer) {
-      let transferTimeout
-
-      const onResponse = (res) => {
-        transferTimeout = setTimeout(() => {
-          res.destroy(new TransferTimeoutError(this, request))
-        }, this.agent.options.timeouts.transfer)
-
-        res.once('close', () => {
-          clearTimeout(transferTimeout)
-        })
-      }
-
-      request.once('response', onResponse)
-    }
-  }
-}
-
-module.exports = HttpProxy
diff --git a/node_modules/@npmcli/agent/lib/proxy/index.js b/node_modules/@npmcli/agent/lib/proxy/index.js
deleted file mode 100644
index 87f628c5bbf94..0000000000000
--- a/node_modules/@npmcli/agent/lib/proxy/index.js
+++ /dev/null
@@ -1,25 +0,0 @@
-'use strict'
-
-const { InvalidProxyProtocolError } = require('../errors.js')
-const HttpProxy = require('./http.js')
-const NullProxy = require('./null.js')
-const SocksProxy = require('./socks.js')
-
-const createProxy = ({ agent, lookup, proxy, secure }) => {
-  if (!proxy) {
-    return new NullProxy({ agent, lookup, secure })
-  }
-
-  const parsed = new URL(proxy)
-  if (parsed.protocol === 'http:' || parsed.protocol === 'https:') {
-    return new HttpProxy({ agent, lookup, url: parsed, secure })
-  }
-
-  if (parsed.protocol.startsWith('socks')) {
-    return new SocksProxy({ agent, lookup, url: parsed, secure })
-  }
-
-  throw new InvalidProxyProtocolError(parsed)
-}
-
-module.exports = createProxy
diff --git a/node_modules/@npmcli/agent/lib/proxy/null.js b/node_modules/@npmcli/agent/lib/proxy/null.js
deleted file mode 100644
index d2b2f6f777e92..0000000000000
--- a/node_modules/@npmcli/agent/lib/proxy/null.js
+++ /dev/null
@@ -1,97 +0,0 @@
-'use strict'
-
-const net = require('net')
-const tls = require('tls')
-
-const {
-  ConnectionTimeoutError,
-  IdleTimeoutError,
-  ResponseTimeoutError,
-  TransferTimeoutError,
-} = require('../errors.js')
-
-class NullProxy {
-  constructor ({ agent, lookup, secure }) {
-    this.agent = agent
-    this.lookup = lookup
-    this.secure = secure
-  }
-
-  createConnection (options, callback) {
-    const socket = this.secure
-      ? tls.connect({ ...options, family: this.agent.options.family, lookup: this.lookup })
-      : net.connect({ ...options, family: this.agent.options.family, lookup: this.lookup })
-
-    socket.setKeepAlive(this.agent.keepAlive, this.agent.keepAliveMsecs)
-    socket.setNoDelay(this.agent.keepAlive)
-
-    let connectionTimeout
-
-    if (options.timeouts.connection) {
-      connectionTimeout = setTimeout(() => {
-        callback(new ConnectionTimeoutError(options.host))
-      }, options.timeouts.connection)
-    }
-
-    if (options.timeouts.idle) {
-      socket.setTimeout(options.timeouts.idle)
-      socket.once('timeout', () => {
-        socket.destroy(new IdleTimeoutError(options.host))
-      })
-    }
-
-    const onConnect = () => {
-      clearTimeout(connectionTimeout)
-      socket.removeListener('error', onError)
-      callback(null, socket)
-    }
-
-    const onError = (err) => {
-      socket.removeListener('connect', onConnect)
-      callback(err)
-    }
-
-    socket.once('error', onError)
-    socket.once(this.secure ? 'secureConnect' : 'connect', onConnect)
-  }
-
-  addRequest (request, options) {
-    if (this.agent.options.timeouts.response) {
-      let responseTimeout
-
-      const onFinish = () => {
-        responseTimeout = setTimeout(() => {
-          request.destroy(new ResponseTimeoutError(this, request))
-        }, this.agent.options.timeouts.response)
-      }
-
-      const onResponse = () => {
-        clearTimeout(responseTimeout)
-      }
-
-      request.once('finish', onFinish)
-      request.once('response', onResponse)
-    }
-
-    if (this.agent.options.timeouts.transfer) {
-      let transferTimeout
-
-      const onResponse = (res) => {
-        transferTimeout = setTimeout(() => {
-          // swallow the error event on the request, this allows the one on the response
-          // to make it to the end user
-          request.once('error', () => {})
-          res.destroy(new TransferTimeoutError(this, request))
-        }, this.agent.options.timeouts.transfer)
-
-        res.once('close', () => {
-          clearTimeout(transferTimeout)
-        })
-      }
-
-      request.once('response', onResponse)
-    }
-  }
-}
-
-module.exports = NullProxy
diff --git a/node_modules/@npmcli/agent/lib/proxy/socks.js b/node_modules/@npmcli/agent/lib/proxy/socks.js
deleted file mode 100644
index 8cad7148e9227..0000000000000
--- a/node_modules/@npmcli/agent/lib/proxy/socks.js
+++ /dev/null
@@ -1,153 +0,0 @@
-'use strict'
-
-const { SocksClient } = require('socks')
-const tls = require('tls')
-
-const {
-  ConnectionTimeoutError,
-  IdleTimeoutError,
-  InvalidProxyProtocolError,
-  ResponseTimeoutError,
-  TransferTimeoutError,
-} = require('../errors.js')
-
-class SocksProxy {
-  constructor ({ agent, lookup, secure, url }) {
-    this.agent = agent
-    this.lookup = lookup
-    this.secure = secure
-    this.url = url
-    if (!this.url.port) {
-      this.url.port = 1080
-    }
-
-    if (this.url.protocol === 'socks4:') {
-      this.shouldLookup = true
-      this.type = 4
-    } else if (this.url.protocol === 'socks4a:') {
-      this.shouldLookup = false
-      this.type = 4
-    } else if (this.url.protocol === 'socks5:') {
-      this.shouldLookup = true
-      this.type = 5
-    } else if (this.url.protocol === 'socks5h:' || this.url.protocol === 'socks:') {
-      this.shouldLookup = false
-      this.type = 5
-    } else {
-      throw new InvalidProxyProtocolError(this.url)
-    }
-  }
-
-  createConnection (options, callback) {
-    const socksOptions = {
-      proxy: {
-        host: this.url.hostname,
-        port: parseInt(this.url.port, 10),
-        type: this.type,
-        userId: this.url.username,
-        password: this.url.password,
-      },
-      destination: {
-        host: options.host,
-        port: parseInt(options.port, 10),
-      },
-      command: 'connect',
-      socket_options: {
-        family: this.agent.options.family,
-        lookup: this.lookup,
-      },
-    }
-
-    const connect = () => {
-      let connectionTimeout
-      const socksClient = new SocksClient(socksOptions)
-
-      const onError = (err) => {
-        socksClient.removeListener('established', onEstablished)
-        return callback(err)
-      }
-
-      const onEstablished = (connection) => {
-        clearTimeout(connectionTimeout)
-        socksClient.removeListener('error', onError)
-
-        if (this.secure) {
-          connection.socket = tls.connect({ ...options, socket: connection.socket })
-        }
-
-        connection.socket.setKeepAlive(this.agent.keepAlive, this.agent.keepAliveMsecs)
-        connection.socket.setNoDelay(this.agent.keepAlive)
-
-        if (options.timeouts.idle) {
-          connection.socket.setTimeout(options.timeouts.idle)
-          connection.socket.once('timeout', () => {
-            connection.socket.destroy(new IdleTimeoutError(this.url.host))
-          })
-        }
-
-        return callback(null, connection.socket)
-      }
-
-      socksClient.once('error', onError)
-      socksClient.once('established', onEstablished)
-
-      if (options.timeouts.connection) {
-        connectionTimeout = setTimeout(() => {
-          return callback(new ConnectionTimeoutError(this.url.host))
-        }, options.timeouts.connection)
-      }
-
-      socksClient.connect()
-    }
-
-    if (!this.shouldLookup) {
-      return connect()
-    }
-
-    this.lookup(options.host, (err, result) => {
-      if (err) {
-        return callback(err)
-      }
-
-      socksOptions.destination.host = result
-      connect()
-    })
-  }
-
-  addRequest (request, options) {
-    if (this.agent.options.timeouts.response) {
-      let responseTimeout
-
-      const onFinish = () => {
-        responseTimeout = setTimeout(() => {
-          request.destroy(new ResponseTimeoutError(this, request))
-        }, this.agent.options.timeouts.response)
-      }
-
-      const onResponse = () => {
-        clearTimeout(responseTimeout)
-      }
-
-      request.once('finish', onFinish)
-      request.once('response', onResponse)
-    }
-
-    if (this.agent.options.timeouts.transfer) {
-      let transferTimeout
-
-      const onResponse = (res) => {
-        transferTimeout = setTimeout(() => {
-          res.destroy(new TransferTimeoutError(this, request))
-        }, this.agent.options.timeouts.transfer)
-
-        res.once('close', () => {
-          clearTimeout(transferTimeout)
-        })
-      }
-
-      request.once('response', onResponse)
-    }
-  }
-}
-
-module.exports = SocksProxy
diff --git a/node_modules/@npmcli/agent/lib/util.js b/node_modules/@npmcli/agent/lib/util.js
index 512207084d23e..6d42a2e202c1f 100644
--- a/node_modules/@npmcli/agent/lib/util.js
+++ b/node_modules/@npmcli/agent/lib/util.js
@@ -1,33 +1,84 @@
 'use strict'
 
-const dns = require('dns')
+const timers = require('timers/promises')
 
-const normalizeOptions = (_options) => {
-  const options = { ..._options }
+const createKey = (obj) => {
+  let key = ''
+  const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0])
+  for (let [k, v] of sorted) {
+    if (v == null) {
+      v = 'null'
+    } else if (v instanceof URL) {
+      v = v.toString()
+    } else if (typeof v === 'object') {
+      v = createKey(v)
+    }
+    key += `${k}:${v}:`
+  }
+  return key
+}
 
-  if (typeof options.keepAlive === 'undefined') {
-    options.keepAlive = true
+const createTimeout = (delay, signal) => {
+  if (!delay) {
+    return signal ? new Promise(() => {}) : null
   }
 
-  if (!options.timeouts) {
-    options.timeouts = {}
+  if (!signal) {
+    let timeout
+    return {
+      start: (cb) => (timeout = setTimeout(cb, delay)),
+      clear: () => clearTimeout(timeout),
+    }
   }
 
-  if (options.timeout) {
-    options.timeouts.idle = options.timeout
-    delete options.timeout
+  return timers.setTimeout(delay, null, signal)
+    .then(() => {
+      throw new Error()
+    }).catch((err) => {
+      if (err.name === 'AbortError') {
+        return
+      }
+      throw err
+    })
+}
+
+const abortRace = async (promises, ac = new AbortController()) => {
+  let res
+  try {
+    res = await Promise.race(promises.map((p) => p(ac)))
+    ac.abort()
+  } catch (err) {
+    ac.abort()
+    throw err
   }
+  return res
+}
+
+const urlify = (url) => typeof url === 'string' ? new URL(url) : url
 
-  options.family = !isNaN(+options.family) ? +options.family : 0
-  options.dns = {
-    ttl: 5 * 60 * 1000,
-    lookup: dns.lookup,
-    ...options.dns,
+const appendPort = (host, port) => {
+  // istanbul ignore next
+  if (port) {
+    host += `:${port}`
   }
+  return host
+}
 
-  return options
+const cacheAgent = ({ key, cache, secure, proxies }, ...args) => {
+  if (cache.has(key)) {
+    return cache.get(key)
+  }
+  const Ctor = (secure ? proxies[1] : proxies[0]) ?? proxies[0]
+  const agent = new Ctor(...args)
+  cache.set(key, agent)
+  return agent
 }
 
 module.exports = {
-  normalizeOptions,
+  createKey,
+  createTimeout,
+  abortRace,
+  urlify,
+  cacheAgent,
+  appendPort,
 }
diff --git a/node_modules/@npmcli/agent/node_modules/agent-base/dist/helpers.js b/node_modules/@npmcli/agent/node_modules/agent-base/dist/helpers.js
new file mode 100644
index 0000000000000..ef3f92022d455
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/agent-base/dist/helpers.js
@@ -0,0 +1,66 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.req = exports.json = exports.toBuffer = void 0;
+const http = __importStar(require("http"));
+const https = __importStar(require("https"));
+async function toBuffer(stream) {
+    let length = 0;
+    const chunks = [];
+    for await (const chunk of stream) {
+        length += chunk.length;
+        chunks.push(chunk);
+    }
+    return Buffer.concat(chunks, length);
+}
+exports.toBuffer = toBuffer;
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+async function json(stream) {
+    const buf = await toBuffer(stream);
+    const str = buf.toString('utf8');
+    try {
+        return JSON.parse(str);
+    }
+    catch (_err) {
+        const err = _err;
+        err.message += ` (input: ${str})`;
+        throw err;
+    }
+}
+exports.json = json;
+function req(url, opts = {}) {
+    const href = typeof url === 'string' ? url : url.href;
+    const req = (href.startsWith('https:') ? https : http).request(url, opts);
+    const promise = new Promise((resolve, reject) => {
+        req
+            .once('response', resolve)
+            .once('error', reject)
+            .end();
+    });
+    req.then = promise.then.bind(promise);
+    return req;
+}
+exports.req = req;
+//# sourceMappingURL=helpers.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/agent/node_modules/agent-base/dist/index.js b/node_modules/@npmcli/agent/node_modules/agent-base/dist/index.js
new file mode 100644
index 0000000000000..7bafc8c68604f
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/agent-base/dist/index.js
@@ -0,0 +1,112 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Agent = void 0;
+const http = __importStar(require("http"));
+__exportStar(require("./helpers"), exports);
+const INTERNAL = Symbol('AgentBaseInternalState');
+class Agent extends http.Agent {
+    constructor(opts) {
+        super(opts);
+        this[INTERNAL] = {};
+    }
+    /**
+     * Determine whether this is an `http` or `https` request.
+     */
+    isSecureEndpoint(options) {
+        if (options) {
+            // First check the `secureEndpoint` property explicitly, since this
+            // means that a parent `Agent` is "passing through" to this instance.
+            // eslint-disable-next-line @typescript-eslint/no-explicit-any
+            if (typeof options.secureEndpoint === 'boolean') {
+                return options.secureEndpoint;
+            }
+            // If no explicit `secure` endpoint, check if `protocol` property is
+            // set. This will usually be the case since using a full string URL
+            // or `URL` instance should be the most common usage.
+            if (typeof options.protocol === 'string') {
+                return options.protocol === 'https:';
+            }
+        }
+        // Finally, if no `protocol` property was set, then fall back to
+        // checking the stack trace of the current call stack, and try to
+        // detect the "https" module.
+        const { stack } = new Error();
+        if (typeof stack !== 'string')
+            return false;
+        return stack
+            .split('\n')
+            .some((l) => l.indexOf('(https.js:') !== -1 ||
+            l.indexOf('node:https:') !== -1);
+    }
+    createSocket(req, options, cb) {
+        const connectOpts = {
+            ...options,
+            secureEndpoint: this.isSecureEndpoint(options),
+        };
+        Promise.resolve()
+            .then(() => this.connect(req, connectOpts))
+            .then((socket) => {
+            if (socket instanceof http.Agent) {
+                // @ts-expect-error `addRequest()` isn't defined in `@types/node`
+                return socket.addRequest(req, connectOpts);
+            }
+            this[INTERNAL].currentSocket = socket;
+            // @ts-expect-error `createSocket()` isn't defined in `@types/node`
+            super.createSocket(req, options, cb);
+        }, cb);
+    }
+    createConnection() {
+        const socket = this[INTERNAL].currentSocket;
+        this[INTERNAL].currentSocket = undefined;
+        if (!socket) {
+            throw new Error('No socket was returned in the `connect()` function');
+        }
+        return socket;
+    }
+    get defaultPort() {
+        return (this[INTERNAL].defaultPort ??
+            (this.protocol === 'https:' ? 443 : 80));
+    }
+    set defaultPort(v) {
+        if (this[INTERNAL]) {
+            this[INTERNAL].defaultPort = v;
+        }
+    }
+    get protocol() {
+        return (this[INTERNAL].protocol ??
+            (this.isSecureEndpoint() ? 'https:' : 'http:'));
+    }
+    set protocol(v) {
+        if (this[INTERNAL]) {
+            this[INTERNAL].protocol = v;
+        }
+    }
+}
+exports.Agent = Agent;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/agent/node_modules/agent-base/package.json b/node_modules/@npmcli/agent/node_modules/agent-base/package.json
new file mode 100644
index 0000000000000..7178f4983f4fb
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/agent-base/package.json
@@ -0,0 +1,49 @@
+{
+  "name": "agent-base",
+  "version": "7.1.0",
+  "description": "Turn a function into an `http.Agent` instance",
+  "main": "./dist/index.js",
+  "types": "./dist/index.d.ts",
+  "files": [
+    "dist"
+  ],
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/TooTallNate/proxy-agents.git",
+    "directory": "packages/agent-base"
+  },
+  "keywords": [
+    "http",
+    "agent",
+    "base",
+    "barebones",
+    "https"
+  ],
+  "author": "Nathan Rajlich  (http://n8.io/)",
+  "license": "MIT",
+  "dependencies": {
+    "debug": "^4.3.4"
+  },
+  "devDependencies": {
+    "@types/debug": "^4.1.7",
+    "@types/jest": "^29.5.1",
+    "@types/node": "^14.18.45",
+    "@types/semver": "^7.3.13",
+    "@types/ws": "^6.0.4",
+    "async-listen": "^3.0.0",
+    "jest": "^29.5.0",
+    "ts-jest": "^29.1.0",
+    "typescript": "^5.0.4",
+    "ws": "^3.3.3",
+    "tsconfig": "0.0.0"
+  },
+  "engines": {
+    "node": ">= 14"
+  },
+  "scripts": {
+    "build": "tsc",
+    "test": "jest --env node --verbose --bail",
+    "lint": "eslint . --ext .ts",
+    "pack": "node ../../scripts/pack.mjs"
+  }
+}
\ No newline at end of file
diff --git a/node_modules/@npmcli/agent/node_modules/http-proxy-agent/LICENSE b/node_modules/@npmcli/agent/node_modules/http-proxy-agent/LICENSE
new file mode 100644
index 0000000000000..aad14057fad57
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/http-proxy-agent/LICENSE
@@ -0,0 +1,25 @@
+License
+-------
+
+(The MIT License)
+
+Copyright (c) 2013 Nathan Rajlich <nathan@tootallnate.net>
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+'Software'), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/@npmcli/agent/node_modules/http-proxy-agent/dist/index.js b/node_modules/@npmcli/agent/node_modules/http-proxy-agent/dist/index.js
new file mode 100644
index 0000000000000..4a7daf6156f94
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/http-proxy-agent/dist/index.js
@@ -0,0 +1,147 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.HttpProxyAgent = void 0;
+const net = __importStar(require("net"));
+const tls = __importStar(require("tls"));
+const debug_1 = __importDefault(require("debug"));
+const events_1 = require("events");
+const agent_base_1 = require("agent-base");
+const debug = (0, debug_1.default)('http-proxy-agent');
+/**
+ * The `HttpProxyAgent` implements an HTTP Agent subclass that connects
+ * to the specified "HTTP proxy server" in order to proxy HTTP requests.
+ */
+class HttpProxyAgent extends agent_base_1.Agent {
+    constructor(proxy, opts) {
+        super(opts);
+        this.proxy = typeof proxy === 'string' ? new URL(proxy) : proxy;
+        this.proxyHeaders = opts?.headers ?? {};
+        debug('Creating new HttpProxyAgent instance: %o', this.proxy.href);
+        // Trim off the brackets from IPv6 addresses
+        const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, '');
+        const port = this.proxy.port
+            ? parseInt(this.proxy.port, 10)
+            : this.proxy.protocol === 'https:'
+                ? 443
+                : 80;
+        this.connectOpts = {
+            ...(opts ? omit(opts, 'headers') : null),
+            host,
+            port,
+        };
+    }
+    addRequest(req, opts) {
+        req._header = null;
+        this.setRequestProps(req, opts);
+        // @ts-expect-error `addRequest()` isn't defined in `@types/node`
+        super.addRequest(req, opts);
+    }
+    setRequestProps(req, opts) {
+        const { proxy } = this;
+        const protocol = opts.secureEndpoint ? 'https:' : 'http:';
+        const hostname = req.getHeader('host') || 'localhost';
+        const base = `${protocol}//${hostname}`;
+        const url = new URL(req.path, base);
+        if (opts.port !== 80) {
+            url.port = String(opts.port);
+        }
+        // Change the `http.ClientRequest` instance's "path" field
+        // to the absolute path of the URL that will be requested.
+        req.path = String(url);
+        // Inject the `Proxy-Authorization` header if necessary.
+        const headers = typeof this.proxyHeaders === 'function'
+            ? this.proxyHeaders()
+            : { ...this.proxyHeaders };
+        if (proxy.username || proxy.password) {
+            const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`;
+            headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`;
+        }
+        if (!headers['Proxy-Connection']) {
+            headers['Proxy-Connection'] = this.keepAlive
+                ? 'Keep-Alive'
+                : 'close';
+        }
+        for (const name of Object.keys(headers)) {
+            const value = headers[name];
+            if (value) {
+                req.setHeader(name, value);
+            }
+        }
+    }
+    async connect(req, opts) {
+        req._header = null;
+        if (!req.path.includes('://')) {
+            this.setRequestProps(req, opts);
+        }
+        // At this point, the http ClientRequest's internal `_header` field
+        // might have already been set. If this is the case then we'll need
+        // to re-generate the string since we just changed the `req.path`.
+        let first;
+        let endOfHeaders;
+        debug('Regenerating stored HTTP header string for request');
+        req._implicitHeader();
+        if (req.outputData && req.outputData.length > 0) {
+            debug('Patching connection write() output buffer with updated header');
+            first = req.outputData[0].data;
+            endOfHeaders = first.indexOf('\r\n\r\n') + 4;
+            req.outputData[0].data =
+                req._header + first.substring(endOfHeaders);
+            debug('Output buffer: %o', req.outputData[0].data);
+        }
+        // Create a socket connection to the proxy server.
+        let socket;
+        if (this.proxy.protocol === 'https:') {
+            debug('Creating `tls.Socket`: %o', this.connectOpts);
+            socket = tls.connect(this.connectOpts);
+        }
+        else {
+            debug('Creating `net.Socket`: %o', this.connectOpts);
+            socket = net.connect(this.connectOpts);
+        }
+        // Wait for the socket's `connect` event, so that this `callback()`
+        // function throws instead of the `http` request machinery. This is
+        // important for i.e. `PacProxyAgent` which determines a failed proxy
+        // connection via the `callback()` function throwing.
+        await (0, events_1.once)(socket, 'connect');
+        return socket;
+    }
+}
+HttpProxyAgent.protocols = ['http', 'https'];
+exports.HttpProxyAgent = HttpProxyAgent;
+function omit(obj, ...keys) {
+    const ret = {};
+    let key;
+    for (key in obj) {
+        if (!keys.includes(key)) {
+            ret[key] = obj[key];
+        }
+    }
+    return ret;
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/agent/node_modules/http-proxy-agent/package.json b/node_modules/@npmcli/agent/node_modules/http-proxy-agent/package.json
new file mode 100644
index 0000000000000..08c650cbb22aa
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/http-proxy-agent/package.json
@@ -0,0 +1,47 @@
+{
+  "name": "http-proxy-agent",
+  "version": "7.0.0",
+  "description": "An HTTP(s) proxy `http.Agent` implementation for HTTP",
+  "main": "./dist/index.js",
+  "types": "./dist/index.d.ts",
+  "files": [
+    "dist"
+  ],
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/TooTallNate/proxy-agents.git",
+    "directory": "packages/http-proxy-agent"
+  },
+  "keywords": [
+    "http",
+    "proxy",
+    "endpoint",
+    "agent"
+  ],
+  "author": "Nathan Rajlich  (http://n8.io/)",
+  "license": "MIT",
+  "dependencies": {
+    "agent-base": "^7.1.0",
+    "debug": "^4.3.4"
+  },
+  "devDependencies": {
+    "@types/debug": "^4.1.7",
+    "@types/jest": "^29.5.1",
+    "@types/node": "^14.18.45",
+    "async-listen": "^3.0.0",
+    "jest": "^29.5.0",
+    "ts-jest": "^29.1.0",
+    "typescript": "^5.0.4",
+    "proxy": "2.1.1",
+    "tsconfig": "0.0.0"
+  },
+  "engines": {
+    "node": ">= 14"
+  },
+  "scripts": {
+    "build": "tsc",
+    "test": "jest --env node --verbose --bail",
+    "lint": "eslint . --ext .ts",
+    "pack": "node ../../scripts/pack.mjs"
+  }
+}
\ No newline at end of file
diff --git a/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/index.js b/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/index.js
new file mode 100644
index 0000000000000..e3bbfe632c454
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/index.js
@@ -0,0 +1,170 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.HttpsProxyAgent = void 0;
+const net = __importStar(require("net"));
+const tls = __importStar(require("tls"));
+const assert_1 = __importDefault(require("assert"));
+const debug_1 = __importDefault(require("debug"));
+const agent_base_1 = require("agent-base");
+const parse_proxy_response_1 = require("./parse-proxy-response");
+const debug = (0, debug_1.default)('https-proxy-agent');
+/**
+ * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to
+ * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests.
+ *
+ * Outgoing HTTP requests are first tunneled through the proxy server using the
+ * `CONNECT` HTTP request method to establish a connection to the proxy server,
+ * and then the proxy server connects to the destination target and issues the
+ * HTTP request from the proxy server.
+ *
+ * `https:` requests have their socket connection upgraded to TLS once
+ * the connection to the proxy server has been established.
+ */
+class HttpsProxyAgent extends agent_base_1.Agent {
+    constructor(proxy, opts) {
+        super(opts);
+        this.options = { path: undefined };
+        this.proxy = typeof proxy === 'string' ? new URL(proxy) : proxy;
+        this.proxyHeaders = opts?.headers ?? {};
+        debug('Creating new HttpsProxyAgent instance: %o', this.proxy.href);
+        // Trim off the brackets from IPv6 addresses
+        const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, '');
+        const port = this.proxy.port
+            ? parseInt(this.proxy.port, 10)
+            : this.proxy.protocol === 'https:'
+                ? 443
+                : 80;
+        this.connectOpts = {
+            // Attempt to negotiate http/1.1 for proxy servers that support http/2
+            ALPNProtocols: ['http/1.1'],
+            ...(opts ? omit(opts, 'headers') : null),
+            host,
+            port,
+        };
+    }
+    /**
+     * Called when the node-core HTTP client library is creating a
+     * new HTTP request.
+     */
+    async connect(req, opts) {
+        const { proxy } = this;
+        if (!opts.host) {
+            throw new TypeError('No "host" provided');
+        }
+        // Create a socket connection to the proxy server.
+        let socket;
+        if (proxy.protocol === 'https:') {
+            debug('Creating `tls.Socket`: %o', this.connectOpts);
+            socket = tls.connect(this.connectOpts);
+        }
+        else {
+            debug('Creating `net.Socket`: %o', this.connectOpts);
+            socket = net.connect(this.connectOpts);
+        }
+        const headers = typeof this.proxyHeaders === 'function'
+            ? this.proxyHeaders()
+            : { ...this.proxyHeaders };
+        const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host;
+        let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r\n`;
+        // Inject the `Proxy-Authorization` header if necessary.
+        if (proxy.username || proxy.password) {
+            const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`;
+            headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`;
+        }
+        headers.Host = `${host}:${opts.port}`;
+        if (!headers['Proxy-Connection']) {
+            headers['Proxy-Connection'] = this.keepAlive
+                ? 'Keep-Alive'
+                : 'close';
+        }
+        for (const name of Object.keys(headers)) {
+            payload += `${name}: ${headers[name]}\r\n`;
+        }
+        const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket);
+        socket.write(`${payload}\r\n`);
+        const { connect, buffered } = await proxyResponsePromise;
+        req.emit('proxyConnect', connect);
+        this.emit('proxyConnect', connect, req);
+        if (connect.statusCode === 200) {
+            req.once('socket', resume);
+            if (opts.secureEndpoint) {
+                // The proxy is connecting to a TLS server, so upgrade
+                // this socket connection to a TLS connection.
+                debug('Upgrading socket connection to TLS');
+                const servername = opts.servername || opts.host;
+                return tls.connect({
+                    ...omit(opts, 'host', 'path', 'port'),
+                    socket,
+                    servername: net.isIP(servername) ? undefined : servername,
+                });
+            }
+            return socket;
+        }
+        // Some other status code that's not 200... need to re-play the HTTP
+        // header "data" events onto the socket once the HTTP machinery is
+        // attached so that the node core `http` can parse and handle the
+        // error status code.
+        // Close the original socket, and a new "fake" socket is returned
+        // instead, so that the proxy doesn't get the HTTP request
+        // written to it (which may contain `Authorization` headers or other
+        // sensitive data).
+        //
+        // See: https://hackerone.com/reports/541502
+        socket.destroy();
+        const fakeSocket = new net.Socket({ writable: false });
+        fakeSocket.readable = true;
+        // Need to wait for the "socket" event to re-play the "data" events.
+        req.once('socket', (s) => {
+            debug('Replaying proxy buffer for failed request');
+            (0, assert_1.default)(s.listenerCount('data') > 0);
+            // Replay the "buffered" Buffer onto the fake `socket`, since at
+            // this point the HTTP module machinery has been hooked up for
+            // the user.
+            s.push(buffered);
+            s.push(null);
+        });
+        return fakeSocket;
+    }
+}
+HttpsProxyAgent.protocols = ['http', 'https'];
+exports.HttpsProxyAgent = HttpsProxyAgent;
+function resume(socket) {
+    socket.resume();
+}
+function omit(obj, ...keys) {
+    const ret = {};
+    let key;
+    for (key in obj) {
+        if (!keys.includes(key)) {
+            ret[key] = obj[key];
+        }
+    }
+    return ret;
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/parse-proxy-response.js b/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/parse-proxy-response.js
new file mode 100644
index 0000000000000..a28f1d811805f
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/parse-proxy-response.js
@@ -0,0 +1,98 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.parseProxyResponse = void 0;
+const debug_1 = __importDefault(require("debug"));
+const debug = (0, debug_1.default)('https-proxy-agent:parse-proxy-response');
+function parseProxyResponse(socket) {
+    return new Promise((resolve, reject) => {
+        // we need to buffer any HTTP traffic that happens with the proxy before we get
+        // the CONNECT response, so that if the response is anything other than an "200"
+        // response code, then we can re-play the "data" events on the socket once the
+        // HTTP parser is hooked up...
+        let buffersLength = 0;
+        const buffers = [];
+        function read() {
+            const b = socket.read();
+            if (b)
+                ondata(b);
+            else
+                socket.once('readable', read);
+        }
+        function cleanup() {
+            socket.removeListener('end', onend);
+            socket.removeListener('error', onerror);
+            socket.removeListener('readable', read);
+        }
+        function onend() {
+            cleanup();
+            debug('onend');
+            reject(new Error('Proxy connection ended before receiving CONNECT response'));
+        }
+        function onerror(err) {
+            cleanup();
+            debug('onerror %o', err);
+            reject(err);
+        }
+        function ondata(b) {
+            buffers.push(b);
+            buffersLength += b.length;
+            const buffered = Buffer.concat(buffers, buffersLength);
+            const endOfHeaders = buffered.indexOf('\r\n\r\n');
+            if (endOfHeaders === -1) {
+                // keep buffering
+                debug('have not received end of HTTP headers yet...');
+                read();
+                return;
+            }
+            const headerParts = buffered.slice(0, endOfHeaders).toString('ascii').split('\r\n');
+            const firstLine = headerParts.shift();
+            if (!firstLine) {
+                socket.destroy();
+                return reject(new Error('No header received from proxy CONNECT response'));
+            }
+            const firstLineParts = firstLine.split(' ');
+            const statusCode = +firstLineParts[1];
+            const statusText = firstLineParts.slice(2).join(' ');
+            const headers = {};
+            for (const header of headerParts) {
+                if (!header)
+                    continue;
+                const firstColon = header.indexOf(':');
+                if (firstColon === -1) {
+                    socket.destroy();
+                    return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`));
+                }
+                const key = header.slice(0, firstColon).toLowerCase();
+                const value = header.slice(firstColon + 1).trimStart();
+                const current = headers[key];
+                if (typeof current === 'string') {
+                    headers[key] = [current, value];
+                }
+                else if (Array.isArray(current)) {
+                    current.push(value);
+                }
+                else {
+                    headers[key] = value;
+                }
+            }
+            debug('got proxy server response: %o %o', firstLine, headers);
+            cleanup();
+            resolve({
+                connect: {
+                    statusCode,
+                    statusText,
+                    headers,
+                },
+                buffered,
+            });
+        }
+        socket.on('error', onerror);
+        socket.on('end', onend);
+        read();
+    });
+}
+exports.parseProxyResponse = parseProxyResponse;
+//# sourceMappingURL=parse-proxy-response.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/agent/node_modules/https-proxy-agent/package.json b/node_modules/@npmcli/agent/node_modules/https-proxy-agent/package.json
new file mode 100644
index 0000000000000..fc5f988d3b02b
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/https-proxy-agent/package.json
@@ -0,0 +1,50 @@
+{
+  "name": "https-proxy-agent",
+  "version": "7.0.1",
+  "description": "An HTTP(s) proxy `http.Agent` implementation for HTTPS",
+  "main": "./dist/index.js",
+  "types": "./dist/index.d.ts",
+  "files": [
+    "dist"
+  ],
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/TooTallNate/proxy-agents.git",
+    "directory": "packages/https-proxy-agent"
+  },
+  "keywords": [
+    "https",
+    "proxy",
+    "endpoint",
+    "agent"
+  ],
+  "author": "Nathan Rajlich  (http://n8.io/)",
+  "license": "MIT",
+  "dependencies": {
+    "agent-base": "^7.0.2",
+    "debug": "4"
+  },
+  "devDependencies": {
+    "@types/async-retry": "^1.4.5",
+    "@types/debug": "4",
+    "@types/jest": "^29.5.1",
+    "@types/node": "^14.18.45",
+    "async-listen": "^3.0.0",
+    "async-retry": "^1.3.3",
+    "jest": "^29.5.0",
+    "ts-jest": "^29.1.0",
+    "typescript": "^5.0.4",
+    "proxy": "2.1.1",
+    "tsconfig": "0.0.0"
+  },
+  "engines": {
+    "node": ">= 14"
+  },
+  "scripts": {
+    "build": "tsc",
+    "test": "jest --env node --verbose --bail test/test.ts",
+    "test-e2e": "jest --env node --verbose --bail test/e2e.test.ts",
+    "lint": "eslint --ext .ts",
+    "pack": "node ../../scripts/pack.mjs"
+  }
+}
\ No newline at end of file
diff --git a/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/dist/index.js b/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/dist/index.js
new file mode 100644
index 0000000000000..8189e014c13a0
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/dist/index.js
@@ -0,0 +1,181 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.SocksProxyAgent = void 0;
+const socks_1 = require("socks");
+const agent_base_1 = require("agent-base");
+const debug_1 = __importDefault(require("debug"));
+const dns = __importStar(require("dns"));
+const net = __importStar(require("net"));
+const tls = __importStar(require("tls"));
+const debug = (0, debug_1.default)('socks-proxy-agent');
+function parseSocksURL(url) {
+    let lookup = false;
+    let type = 5;
+    const host = url.hostname;
+    // From RFC 1928, Section 3: https://tools.ietf.org/html/rfc1928#section-3
+    // "The SOCKS service is conventionally located on TCP port 1080"
+    const port = parseInt(url.port, 10) || 1080;
+    // figure out if we want socks v4 or v5, based on the "protocol" used.
+    // Defaults to 5.
+    switch (url.protocol.replace(':', '')) {
+        case 'socks4':
+            lookup = true;
+            type = 4;
+            break;
+        // pass through
+        case 'socks4a':
+            type = 4;
+            break;
+        case 'socks5':
+            lookup = true;
+            type = 5;
+            break;
+        // pass through
+        case 'socks': // no version specified, default to 5h
+            type = 5;
+            break;
+        case 'socks5h':
+            type = 5;
+            break;
+        default:
+            throw new TypeError(`A "socks" protocol must be specified! Got: ${String(url.protocol)}`);
+    }
+    const proxy = {
+        host,
+        port,
+        type,
+    };
+    if (url.username) {
+        Object.defineProperty(proxy, 'userId', {
+            value: decodeURIComponent(url.username),
+            enumerable: false,
+        });
+    }
+    if (url.password != null) {
+        Object.defineProperty(proxy, 'password', {
+            value: decodeURIComponent(url.password),
+            enumerable: false,
+        });
+    }
+    return { lookup, proxy };
+}
+class SocksProxyAgent extends agent_base_1.Agent {
+    constructor(uri, opts) {
+        super(opts);
+        const url = typeof uri === 'string' ? new URL(uri) : uri;
+        const { proxy, lookup } = parseSocksURL(url);
+        this.shouldLookup = lookup;
+        this.proxy = proxy;
+        this.timeout = opts?.timeout ?? null;
+    }
+    /**
+     * Initiates a SOCKS connection to the specified SOCKS proxy server,
+     * which in turn connects to the specified remote host and port.
+     */
+    async connect(req, opts) {
+        const { shouldLookup, proxy, timeout } = this;
+        if (!opts.host) {
+            throw new Error('No `host` defined!');
+        }
+        let { host } = opts;
+        const { port, lookup: lookupFn = dns.lookup } = opts;
+        if (shouldLookup) {
+            // Client-side DNS resolution for "4" and "5" socks proxy versions.
+            host = await new Promise((resolve, reject) => {
+                // Use the request's custom lookup, if one was configured:
+                lookupFn(host, {}, (err, res) => {
+                    if (err) {
+                        reject(err);
+                    }
+                    else {
+                        resolve(res);
+                    }
+                });
+            });
+        }
+        const socksOpts = {
+            proxy,
+            destination: {
+                host,
+                port: typeof port === 'number' ? port : parseInt(port, 10),
+            },
+            command: 'connect',
+            timeout: timeout ?? undefined,
+        };
+        const cleanup = (tlsSocket) => {
+            req.destroy();
+            socket.destroy();
+            if (tlsSocket)
+                tlsSocket.destroy();
+        };
+        debug('Creating socks proxy connection: %o', socksOpts);
+        const { socket } = await socks_1.SocksClient.createConnection(socksOpts);
+        debug('Successfully created socks proxy connection');
+        if (timeout !== null) {
+            socket.setTimeout(timeout);
+            socket.on('timeout', () => cleanup());
+        }
+        if (opts.secureEndpoint) {
+            // The proxy is connecting to a TLS server, so upgrade
+            // this socket connection to a TLS connection.
+            debug('Upgrading socket connection to TLS');
+            const servername = opts.servername || opts.host;
+            const tlsSocket = tls.connect({
+                ...omit(opts, 'host', 'path', 'port'),
+                socket,
+                servername: net.isIP(servername) ? undefined : servername,
+            });
+            tlsSocket.once('error', (error) => {
+                debug('Socket TLS error', error.message);
+                cleanup(tlsSocket);
+            });
+            return tlsSocket;
+        }
+        return socket;
+    }
+}
+SocksProxyAgent.protocols = [
+    'socks',
+    'socks4',
+    'socks4a',
+    'socks5',
+    'socks5h',
+];
+exports.SocksProxyAgent = SocksProxyAgent;
+function omit(obj, ...keys) {
+    const ret = {};
+    let key;
+    for (key in obj) {
+        if (!keys.includes(key)) {
+            ret[key] = obj[key];
+        }
+    }
+    return ret;
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/package.json b/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/package.json
new file mode 100644
index 0000000000000..a6c7c0741641a
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/package.json
@@ -0,0 +1,142 @@
+{
+  "name": "socks-proxy-agent",
+  "version": "8.0.1",
+  "description": "A SOCKS proxy `http.Agent` implementation for HTTP and HTTPS",
+  "main": "./dist/index.js",
+  "types": "./dist/index.d.ts",
+  "files": [
+    "dist"
+  ],
+  "author": {
+    "email": "nathan@tootallnate.net",
+    "name": "Nathan Rajlich",
+    "url": "http://n8.io/"
+  },
+  "contributors": [
+    {
+      "name": "Kiko Beats",
+      "email": "josefrancisco.verdu@gmail.com"
+    },
+    {
+      "name": "Josh Glazebrook",
+      "email": "josh@joshglazebrook.com"
+    },
+    {
+      "name": "talmobi",
+      "email": "talmobi@users.noreply.github.com"
+    },
+    {
+      "name": "Indospace.io",
+      "email": "justin@indospace.io"
+    },
+    {
+      "name": "Kilian von Pflugk",
+      "email": "github@jumoog.io"
+    },
+    {
+      "name": "Kyle",
+      "email": "admin@hk1229.cn"
+    },
+    {
+      "name": "Matheus Fernandes",
+      "email": "matheus.frndes@gmail.com"
+    },
+    {
+      "name": "Ricky Miller",
+      "email": "richardkazuomiller@gmail.com"
+    },
+    {
+      "name": "Shantanu Sharma",
+      "email": "shantanu34@outlook.com"
+    },
+    {
+      "name": "Tim Perry",
+      "email": "pimterry@gmail.com"
+    },
+    {
+      "name": "Vadim Baryshev",
+      "email": "vadimbaryshev@gmail.com"
+    },
+    {
+      "name": "jigu",
+      "email": "luo1257857309@gmail.com"
+    },
+    {
+      "name": "Alba Mendez",
+      "email": "me@jmendeth.com"
+    },
+    {
+      "name": "Дмитрий Гуденков",
+      "email": "Dimangud@rambler.ru"
+    },
+    {
+      "name": "Andrei Bitca",
+      "email": "63638922+andrei-bitca-dc@users.noreply.github.com"
+    },
+    {
+      "name": "Andrew Casey",
+      "email": "amcasey@users.noreply.github.com"
+    },
+    {
+      "name": "Brandon Ros",
+      "email": "brandonros1@gmail.com"
+    },
+    {
+      "name": "Dang Duy Thanh",
+      "email": "thanhdd.it@gmail.com"
+    },
+    {
+      "name": "Dimitar Nestorov",
+      "email": "8790386+dimitarnestorov@users.noreply.github.com"
+    }
+  ],
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/TooTallNate/proxy-agents.git",
+    "directory": "packages/socks-proxy-agent"
+  },
+  "keywords": [
+    "agent",
+    "http",
+    "https",
+    "proxy",
+    "socks",
+    "socks4",
+    "socks4a",
+    "socks5",
+    "socks5h"
+  ],
+  "dependencies": {
+    "agent-base": "^7.0.1",
+    "debug": "^4.3.4",
+    "socks": "^2.7.1"
+  },
+  "devDependencies": {
+    "@types/async-retry": "^1.4.5",
+    "@types/debug": "^4.1.7",
+    "@types/dns2": "^2.0.3",
+    "@types/jest": "^29.5.1",
+    "@types/node": "^14.18.45",
+    "async-listen": "^2.1.0",
+    "async-retry": "^1.3.3",
+    "cacheable-lookup": "^6.1.0",
+    "dns2": "^2.1.0",
+    "jest": "^29.5.0",
+    "socksv5": "github:TooTallNate/socksv5#fix/dstSock-close-event",
+    "ts-jest": "^29.1.0",
+    "typescript": "^5.0.4",
+    "tsconfig": "0.0.0",
+    "proxy": "2.0.1"
+  },
+  "engines": {
+    "node": ">= 14"
+  },
+  "license": "MIT",
+  "scripts": {
+    "build": "tsc",
+    "test": "jest --env node --verbose --bail test/test.ts",
+    "test-e2e": "jest --env node --verbose --bail test/e2e.test.ts",
+    "lint": "eslint . --ext .ts",
+    "pack": "node ../../scripts/pack.mjs"
+  }
+}
\ No newline at end of file
diff --git a/node_modules/@npmcli/agent/package.json b/node_modules/@npmcli/agent/package.json
index 0d0ec1bdfb418..c0bf65719db9a 100644
--- a/node_modules/@npmcli/agent/package.json
+++ b/node_modules/@npmcli/agent/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@npmcli/agent",
-  "version": "2.0.0",
+  "version": "2.1.0",
   "description": "the http/https agent used by the npm cli",
   "main": "lib/index.js",
   "scripts": {
@@ -37,6 +37,12 @@
       "18.x"
     ]
   },
+  "dependencies": {
+    "http-proxy-agent": "^7.0.0",
+    "https-proxy-agent": "^7.0.1",
+    "lru-cache": "^10.0.1",
+    "socks-proxy-agent": "^8.0.1"
+  },
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/template-oss": "4.18.0",
@@ -54,9 +60,5 @@
       "--exclude",
       "tap-snapshots/**"
     ]
-  },
-  "dependencies": {
-    "lru-cache": "^10.0.1",
-    "socks": "^2.7.1"
   }
 }
diff --git a/package-lock.json b/package-lock.json
index 26628070149f4..4e41c82f33bda 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -2314,18 +2314,72 @@
       }
     },
     "node_modules/@npmcli/agent": {
-      "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-2.0.0.tgz",
-      "integrity": "sha512-RpRbD6PnaQIUl+p8MoH7sl2CHyMofCO0abOV+0VulqKW84+0nRWnj0bYFQELTN5HpNvzWAV8pRN6Fjx9ZLOS0g==",
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-2.1.0.tgz",
+      "integrity": "sha512-/HFJP3a/DzgIg+6TWVee3bQmnBcWeKKYE9DKQqS8SWpAV8oYDTn/zkDM8iQ7bWI6kDDgNfHOlEFZZpN/UXMwig==",
       "inBundle": true,
       "dependencies": {
+        "http-proxy-agent": "^7.0.0",
+        "https-proxy-agent": "^7.0.1",
         "lru-cache": "^10.0.1",
-        "socks": "^2.7.1"
+        "socks-proxy-agent": "^8.0.1"
       },
       "engines": {
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/agent/node_modules/agent-base": {
+      "version": "7.1.0",
+      "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.0.tgz",
+      "integrity": "sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg==",
+      "inBundle": true,
+      "dependencies": {
+        "debug": "^4.3.4"
+      },
+      "engines": {
+        "node": ">= 14"
+      }
+    },
+    "node_modules/@npmcli/agent/node_modules/http-proxy-agent": {
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.0.tgz",
+      "integrity": "sha512-+ZT+iBxVUQ1asugqnD6oWoRiS25AkjNfG085dKJGtGxkdwLQrMKU5wJr2bOOFAXzKcTuqq+7fZlTMgG3SRfIYQ==",
+      "inBundle": true,
+      "dependencies": {
+        "agent-base": "^7.1.0",
+        "debug": "^4.3.4"
+      },
+      "engines": {
+        "node": ">= 14"
+      }
+    },
+    "node_modules/@npmcli/agent/node_modules/https-proxy-agent": {
+      "version": "7.0.1",
+      "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.1.tgz",
+      "integrity": "sha512-Eun8zV0kcYS1g19r78osiQLEFIRspRUDd9tIfBCTBPBeMieF/EsJNL8VI3xOIdYRDEkjQnqOYPsZ2DsWsVsFwQ==",
+      "inBundle": true,
+      "dependencies": {
+        "agent-base": "^7.0.2",
+        "debug": "4"
+      },
+      "engines": {
+        "node": ">= 14"
+      }
+    },
+    "node_modules/@npmcli/agent/node_modules/socks-proxy-agent": {
+      "version": "8.0.1",
+      "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-8.0.1.tgz",
+      "integrity": "sha512-59EjPbbgg8U3x62hhKOFVAmySQUcfRQ4C7Q/D5sEHnZTQRrQlNKINks44DMR1gwXp0p4LaVIeccX2KHTTcHVqQ==",
+      "inBundle": true,
+      "dependencies": {
+        "agent-base": "^7.0.1",
+        "debug": "^4.3.4",
+        "socks": "^2.7.1"
+      },
+      "engines": {
+        "node": ">= 14"
+      }
+    },
     "node_modules/@npmcli/arborist": {
       "resolved": "workspaces/arborist",
       "link": true

From 364399968f28f729add44f00e4177dd5c777f25d Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Wed, 30 Aug 2023 10:27:13 -0700
Subject: [PATCH 62/68] deps: @npmcli/promise-spawn@7.0.0

---
 DEPENDENCIES.md                               |   2 -
 node_modules/.gitignore                       |  15 +-
 .../@npmcli/promise-spawn/LICENSE             |  15 ++
 .../@npmcli/promise-spawn/lib/escape.js       |  68 ++++++
 .../@npmcli/promise-spawn/lib/index.js        | 195 ++++++++++++++++++
 .../@npmcli/promise-spawn/package.json        |  50 +++++
 .../@npmcli/promise-spawn/package.json        |  18 +-
 .../@npmcli/promise-spawn/LICENSE             |  15 ++
 .../@npmcli/promise-spawn/lib/escape.js       |  68 ++++++
 .../@npmcli/promise-spawn/lib/index.js        | 195 ++++++++++++++++++
 .../@npmcli/promise-spawn/package.json        |  50 +++++
 .../@npmcli/promise-spawn/LICENSE             |  15 ++
 .../@npmcli/promise-spawn/lib/escape.js       |  68 ++++++
 .../@npmcli/promise-spawn/lib/index.js        | 195 ++++++++++++++++++
 .../@npmcli/promise-spawn/package.json        |  50 +++++
 .../node_modules/which/LICENSE                |   0
 .../node_modules/which/bin/which.js           |   0
 .../node_modules/which/lib/index.js           |   0
 .../node_modules/which/package.json           |   0
 package-lock.json                             |  92 +++++++--
 package.json                                  |   2 +-
 smoke-tests/package.json                      |   2 +-
 22 files changed, 1080 insertions(+), 35 deletions(-)
 create mode 100644 node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/LICENSE
 create mode 100644 node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/lib/escape.js
 create mode 100644 node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/lib/index.js
 create mode 100644 node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/package.json
 create mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/LICENSE
 create mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/lib/escape.js
 create mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/lib/index.js
 create mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/package.json
 create mode 100644 node_modules/pacote/node_modules/@npmcli/promise-spawn/LICENSE
 create mode 100644 node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/escape.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/index.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/promise-spawn/package.json
 rename node_modules/{@npmcli/promise-spawn => pacote}/node_modules/which/LICENSE (100%)
 rename node_modules/{@npmcli/promise-spawn => pacote}/node_modules/which/bin/which.js (100%)
 rename node_modules/{@npmcli/promise-spawn => pacote}/node_modules/which/lib/index.js (100%)
 rename node_modules/{@npmcli/promise-spawn => pacote}/node_modules/which/package.json (100%)

diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md
index 773b6aeffa54d..80e9325831c5a 100644
--- a/DEPENDENCIES.md
+++ b/DEPENDENCIES.md
@@ -115,7 +115,6 @@ graph LR;
   npm-->npm-profile;
   npm-->npm-registry-fetch;
   npm-->npm-user-validate;
-  npm-->npmcli-agent["@npmcli/agent"];
   npm-->npmcli-arborist["@npmcli/arborist"];
   npm-->npmcli-config["@npmcli/config"];
   npm-->npmcli-docs["@npmcli/docs"];
@@ -541,7 +540,6 @@ graph LR;
   npm-->npm-profile;
   npm-->npm-registry-fetch;
   npm-->npm-user-validate;
-  npm-->npmcli-agent["@npmcli/agent"];
   npm-->npmcli-arborist["@npmcli/arborist"];
   npm-->npmcli-config["@npmcli/config"];
   npm-->npmcli-docs["@npmcli/docs"];
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 9076b038b0ceb..735440933fc4d 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -30,6 +30,9 @@
 !/@npmcli/git
 !/@npmcli/git/node_modules/
 /@npmcli/git/node_modules/*
+!/@npmcli/git/node_modules/@npmcli/
+/@npmcli/git/node_modules/@npmcli/*
+!/@npmcli/git/node_modules/@npmcli/promise-spawn
 !/@npmcli/git/node_modules/which
 !/@npmcli/installed-package-contents
 !/@npmcli/map-workspaces
@@ -38,13 +41,13 @@
 !/@npmcli/node-gyp
 !/@npmcli/package-json
 !/@npmcli/promise-spawn
-!/@npmcli/promise-spawn/node_modules/
-/@npmcli/promise-spawn/node_modules/*
-!/@npmcli/promise-spawn/node_modules/which
 !/@npmcli/query
 !/@npmcli/run-script
 !/@npmcli/run-script/node_modules/
 /@npmcli/run-script/node_modules/*
+!/@npmcli/run-script/node_modules/@npmcli/
+/@npmcli/run-script/node_modules/@npmcli/*
+!/@npmcli/run-script/node_modules/@npmcli/promise-spawn
 !/@npmcli/run-script/node_modules/which
 !/@pkgjs/
 /@pkgjs/*
@@ -226,6 +229,12 @@
 !/once
 !/p-map
 !/pacote
+!/pacote/node_modules/
+/pacote/node_modules/*
+!/pacote/node_modules/@npmcli/
+/pacote/node_modules/@npmcli/*
+!/pacote/node_modules/@npmcli/promise-spawn
+!/pacote/node_modules/which
 !/parse-conflict-json
 !/path-is-absolute
 !/path-key
diff --git a/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/LICENSE b/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/LICENSE
new file mode 100644
index 0000000000000..8f90f96f4c6c5
--- /dev/null
+++ b/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
+OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
+ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
+SOFTWARE.
diff --git a/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/lib/escape.js b/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/lib/escape.js
new file mode 100644
index 0000000000000..9aca8bde70a6e
--- /dev/null
+++ b/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/lib/escape.js
@@ -0,0 +1,68 @@
+'use strict'
+
+// eslint-disable-next-line max-len
+// this code adapted from: https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/
+const cmd = (input, doubleEscape) => {
+  if (!input.length) {
+    return '""'
+  }
+
+  let result
+  if (!/[ \t\n\v"]/.test(input)) {
+    result = input
+  } else {
+    result = '"'
+    for (let i = 0; i <= input.length; ++i) {
+      let slashCount = 0
+      while (input[i] === '\\') {
+        ++i
+        ++slashCount
+      }
+
+      if (i === input.length) {
+        result += '\\'.repeat(slashCount * 2)
+        break
+      }
+
+      if (input[i] === '"') {
+        result += '\\'.repeat(slashCount * 2 + 1)
+        result += input[i]
+      } else {
+        result += '\\'.repeat(slashCount)
+        result += input[i]
+      }
+    }
+    result += '"'
+  }
+
+  // and finally, prefix shell meta chars with a ^
+  result = result.replace(/[ !%^&()<>|"]/g, '^$&')
+  if (doubleEscape) {
+    result = result.replace(/[ !%^&()<>|"]/g, '^$&')
+  }
+
+  return result
+}
+
+const sh = (input) => {
+  if (!input.length) {
+    return `''`
+  }
+
+  if (!/[\t\n\r "#$&'()*;<>?\\`|~]/.test(input)) {
+    return input
+  }
+
+  // replace single quotes with '\'' and wrap the whole result in a fresh set of quotes
+  const result = `'${input.replace(/'/g, `'\\''`)}'`
+    // if the input string already had single quotes around it, clean those up
+    .replace(/^(?:'')+(?!$)/, '')
+    .replace(/\\'''/g, `\\'`)
+
+  return result
+}
+
+module.exports = {
+  cmd,
+  sh,
+}
diff --git a/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/lib/index.js b/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/lib/index.js
new file mode 100644
index 0000000000000..571ff6b9169c9
--- /dev/null
+++ b/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/lib/index.js
@@ -0,0 +1,195 @@
+'use strict'
+
+const { spawn } = require('child_process')
+const os = require('os')
+const which = require('which')
+
+const escape = require('./escape.js')
+
+// 'extra' object is for decorating the error a bit more
+const promiseSpawn = (cmd, args, opts = {}, extra = {}) => {
+  if (opts.shell) {
+    return spawnWithShell(cmd, args, opts, extra)
+  }
+
+  let proc
+
+  const p = new Promise((res, rej) => {
+    proc = spawn(cmd, args, opts)
+
+    const stdout = []
+    const stderr = []
+
+    const reject = er => rej(Object.assign(er, {
+      cmd,
+      args,
+      ...stdioResult(stdout, stderr, opts),
+      ...extra,
+    }))
+
+    proc.on('error', reject)
+
+    if (proc.stdout) {
+      proc.stdout.on('data', c => stdout.push(c)).on('error', reject)
+      proc.stdout.on('error', er => reject(er))
+    }
+
+    if (proc.stderr) {
+      proc.stderr.on('data', c => stderr.push(c)).on('error', reject)
+      proc.stderr.on('error', er => reject(er))
+    }
+
+    proc.on('close', (code, signal) => {
+      const result = {
+        cmd,
+        args,
+        code,
+        signal,
+        ...stdioResult(stdout, stderr, opts),
+        ...extra,
+      }
+
+      if (code || signal) {
+        rej(Object.assign(new Error('command failed'), result))
+      } else {
+        res(result)
+      }
+    })
+  })
+
+  p.stdin = proc.stdin
+  p.process = proc
+  return p
+}
+
+const spawnWithShell = (cmd, args, opts, extra) => {
+  let command = opts.shell
+  // if shell is set to true, we use a platform default. we can't let the core
+  // spawn method decide this for us because we need to know what shell is in use
+  // ahead of time so that we can escape arguments properly. we don't need coverage here.
+  if (command === true) {
+    // istanbul ignore next
+    command = process.platform === 'win32' ? process.env.ComSpec : 'sh'
+  }
+
+  const options = { ...opts, shell: false }
+  const realArgs = []
+  let script = cmd
+
+  // first, determine if we're in windows because if we are we need to know if we're
+  // running an .exe or a .cmd/.bat since the latter requires extra escaping
+  const isCmd = /(?:^|\\)cmd(?:\.exe)?$/i.test(command)
+  if (isCmd) {
+    let doubleEscape = false
+
+    // find the actual command we're running
+    let initialCmd = ''
+    let insideQuotes = false
+    for (let i = 0; i < cmd.length; ++i) {
+      const char = cmd.charAt(i)
+      if (char === ' ' && !insideQuotes) {
+        break
+      }
+
+      initialCmd += char
+      if (char === '"' || char === "'") {
+        insideQuotes = !insideQuotes
+      }
+    }
+
+    let pathToInitial
+    try {
+      pathToInitial = which.sync(initialCmd, {
+        path: (options.env && options.env.PATH) || process.env.PATH,
+        pathext: (options.env && options.env.PATHEXT) || process.env.PATHEXT,
+      }).toLowerCase()
+    } catch (err) {
+      pathToInitial = initialCmd.toLowerCase()
+    }
+
+    doubleEscape = pathToInitial.endsWith('.cmd') || pathToInitial.endsWith('.bat')
+    for (const arg of args) {
+      script += ` ${escape.cmd(arg, doubleEscape)}`
+    }
+    realArgs.push('/d', '/s', '/c', script)
+    options.windowsVerbatimArguments = true
+  } else {
+    for (const arg of args) {
+      script += ` ${escape.sh(arg)}`
+    }
+    realArgs.push('-c', script)
+  }
+
+  return promiseSpawn(command, realArgs, options, extra)
+}
+
+// open a file with the default application as defined by the user's OS
+const open = (_args, opts = {}, extra = {}) => {
+  const options = { ...opts, shell: true }
+  const args = [].concat(_args)
+
+  let platform = process.platform
+  // process.platform === 'linux' may actually indicate WSL, if that's the case
+  // we want to treat things as win32 anyway so the host can open the argument
+  if (platform === 'linux' && os.release().toLowerCase().includes('microsoft')) {
+    platform = 'win32'
+  }
+
+  let command = options.command
+  if (!command) {
+    if (platform === 'win32') {
+      // spawnWithShell does not do the additional os.release() check, so we
+      // have to force the shell here to make sure we treat WSL as windows.
+      options.shell = process.env.ComSpec
+      // also, the start command accepts a title so to make sure that we don't
+      // accidentally interpret the first arg as the title, we stick an empty
+      // string immediately after the start command
+      command = 'start ""'
+    } else if (platform === 'darwin') {
+      command = 'open'
+    } else {
+      command = 'xdg-open'
+    }
+  }
+
+  return spawnWithShell(command, args, options, extra)
+}
+promiseSpawn.open = open
+
+const isPipe = (stdio = 'pipe', fd) => {
+  if (stdio === 'pipe' || stdio === null) {
+    return true
+  }
+
+  if (Array.isArray(stdio)) {
+    return isPipe(stdio[fd], fd)
+  }
+
+  return false
+}
+
+const stdioResult = (stdout, stderr, { stdioString = true, stdio }) => {
+  const result = {
+    stdout: null,
+    stderr: null,
+  }
+
+  // stdio is [stdin, stdout, stderr]
+  if (isPipe(stdio, 1)) {
+    result.stdout = Buffer.concat(stdout)
+    if (stdioString) {
+      result.stdout = result.stdout.toString().trim()
+    }
+  }
+
+  if (isPipe(stdio, 2)) {
+    result.stderr = Buffer.concat(stderr)
+    if (stdioString) {
+      result.stderr = result.stderr.toString().trim()
+    }
+  }
+
+  return result
+}
+
+module.exports = promiseSpawn
diff --git a/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/package.json b/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/package.json
new file mode 100644
index 0000000000000..2080d9f5be9f0
--- /dev/null
+++ b/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/package.json
@@ -0,0 +1,50 @@
+{
+  "name": "@npmcli/promise-spawn",
+  "version": "6.0.2",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "main": "./lib/index.js",
+  "description": "spawn processes the way the npm cli likes to do",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/promise-spawn.git"
+  },
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "lint": "eslint \"**/*.js\"",
+    "lintfix": "npm run lint -- --fix",
+    "posttest": "npm run lint",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "tap": {
+    "check-coverage": true,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.11.0",
+    "minipass": "^4.0.0",
+    "spawk": "^1.7.1",
+    "tap": "^16.0.1"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.11.0"
+  },
+  "dependencies": {
+    "which": "^3.0.0"
+  }
+}
diff --git a/node_modules/@npmcli/promise-spawn/package.json b/node_modules/@npmcli/promise-spawn/package.json
index 2080d9f5be9f0..ffd89f1083341 100644
--- a/node_modules/@npmcli/promise-spawn/package.json
+++ b/node_modules/@npmcli/promise-spawn/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@npmcli/promise-spawn",
-  "version": "6.0.2",
+  "version": "7.0.0",
   "files": [
     "bin/",
     "lib/"
@@ -32,19 +32,25 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.11.0",
-    "minipass": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
     "spawk": "^1.7.1",
     "tap": "^16.0.1"
   },
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.11.0"
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ],
+    "version": "4.18.0",
+    "publish": true
   },
   "dependencies": {
-    "which": "^3.0.0"
+    "which": "^4.0.0"
   }
 }
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/LICENSE b/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/LICENSE
new file mode 100644
index 0000000000000..8f90f96f4c6c5
--- /dev/null
+++ b/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
+OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
+ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
+SOFTWARE.
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/lib/escape.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/lib/escape.js
new file mode 100644
index 0000000000000..9aca8bde70a6e
--- /dev/null
+++ b/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/lib/escape.js
@@ -0,0 +1,68 @@
+'use strict'
+
+// eslint-disable-next-line max-len
+// this code adapted from: https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/
+const cmd = (input, doubleEscape) => {
+  if (!input.length) {
+    return '""'
+  }
+
+  let result
+  if (!/[ \t\n\v"]/.test(input)) {
+    result = input
+  } else {
+    result = '"'
+    for (let i = 0; i <= input.length; ++i) {
+      let slashCount = 0
+      while (input[i] === '\\') {
+        ++i
+        ++slashCount
+      }
+
+      if (i === input.length) {
+        result += '\\'.repeat(slashCount * 2)
+        break
+      }
+
+      if (input[i] === '"') {
+        result += '\\'.repeat(slashCount * 2 + 1)
+        result += input[i]
+      } else {
+        result += '\\'.repeat(slashCount)
+        result += input[i]
+      }
+    }
+    result += '"'
+  }
+
+  // and finally, prefix shell meta chars with a ^
+  result = result.replace(/[ !%^&()<>|"]/g, '^$&')
+  if (doubleEscape) {
+    result = result.replace(/[ !%^&()<>|"]/g, '^$&')
+  }
+
+  return result
+}
+
+const sh = (input) => {
+  if (!input.length) {
+    return `''`
+  }
+
+  if (!/[\t\n\r "#$&'()*;<>?\\`|~]/.test(input)) {
+    return input
+  }
+
+  // replace single quotes with '\'' and wrap the whole result in a fresh set of quotes
+  const result = `'${input.replace(/'/g, `'\\''`)}'`
+    // if the input string already had single quotes around it, clean those up
+    .replace(/^(?:'')+(?!$)/, '')
+    .replace(/\\'''/g, `\\'`)
+
+  return result
+}
+
+module.exports = {
+  cmd,
+  sh,
+}
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/lib/index.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/lib/index.js
new file mode 100644
index 0000000000000..571ff6b9169c9
--- /dev/null
+++ b/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/lib/index.js
@@ -0,0 +1,195 @@
+'use strict'
+
+const { spawn } = require('child_process')
+const os = require('os')
+const which = require('which')
+
+const escape = require('./escape.js')
+
+// 'extra' object is for decorating the error a bit more
+const promiseSpawn = (cmd, args, opts = {}, extra = {}) => {
+  if (opts.shell) {
+    return spawnWithShell(cmd, args, opts, extra)
+  }
+
+  let proc
+
+  const p = new Promise((res, rej) => {
+    proc = spawn(cmd, args, opts)
+
+    const stdout = []
+    const stderr = []
+
+    const reject = er => rej(Object.assign(er, {
+      cmd,
+      args,
+      ...stdioResult(stdout, stderr, opts),
+      ...extra,
+    }))
+
+    proc.on('error', reject)
+
+    if (proc.stdout) {
+      proc.stdout.on('data', c => stdout.push(c)).on('error', reject)
+      proc.stdout.on('error', er => reject(er))
+    }
+
+    if (proc.stderr) {
+      proc.stderr.on('data', c => stderr.push(c)).on('error', reject)
+      proc.stderr.on('error', er => reject(er))
+    }
+
+    proc.on('close', (code, signal) => {
+      const result = {
+        cmd,
+        args,
+        code,
+        signal,
+        ...stdioResult(stdout, stderr, opts),
+        ...extra,
+      }
+
+      if (code || signal) {
+        rej(Object.assign(new Error('command failed'), result))
+      } else {
+        res(result)
+      }
+    })
+  })
+
+  p.stdin = proc.stdin
+  p.process = proc
+  return p
+}
+
+const spawnWithShell = (cmd, args, opts, extra) => {
+  let command = opts.shell
+  // if shell is set to true, we use a platform default. we can't let the core
+  // spawn method decide this for us because we need to know what shell is in use
+  // ahead of time so that we can escape arguments properly. we don't need coverage here.
+  if (command === true) {
+    // istanbul ignore next
+    command = process.platform === 'win32' ? process.env.ComSpec : 'sh'
+  }
+
+  const options = { ...opts, shell: false }
+  const realArgs = []
+  let script = cmd
+
+  // first, determine if we're in windows because if we are we need to know if we're
+  // running an .exe or a .cmd/.bat since the latter requires extra escaping
+  const isCmd = /(?:^|\\)cmd(?:\.exe)?$/i.test(command)
+  if (isCmd) {
+    let doubleEscape = false
+
+    // find the actual command we're running
+    let initialCmd = ''
+    let insideQuotes = false
+    for (let i = 0; i < cmd.length; ++i) {
+      const char = cmd.charAt(i)
+      if (char === ' ' && !insideQuotes) {
+        break
+      }
+
+      initialCmd += char
+      if (char === '"' || char === "'") {
+        insideQuotes = !insideQuotes
+      }
+    }
+
+    let pathToInitial
+    try {
+      pathToInitial = which.sync(initialCmd, {
+        path: (options.env && options.env.PATH) || process.env.PATH,
+        pathext: (options.env && options.env.PATHEXT) || process.env.PATHEXT,
+      }).toLowerCase()
+    } catch (err) {
+      pathToInitial = initialCmd.toLowerCase()
+    }
+
+    doubleEscape = pathToInitial.endsWith('.cmd') || pathToInitial.endsWith('.bat')
+    for (const arg of args) {
+      script += ` ${escape.cmd(arg, doubleEscape)}`
+    }
+    realArgs.push('/d', '/s', '/c', script)
+    options.windowsVerbatimArguments = true
+  } else {
+    for (const arg of args) {
+      script += ` ${escape.sh(arg)}`
+    }
+    realArgs.push('-c', script)
+  }
+
+  return promiseSpawn(command, realArgs, options, extra)
+}
+
+// open a file with the default application as defined by the user's OS
+const open = (_args, opts = {}, extra = {}) => {
+  const options = { ...opts, shell: true }
+  const args = [].concat(_args)
+
+  let platform = process.platform
+  // process.platform === 'linux' may actually indicate WSL, if that's the case
+  // we want to treat things as win32 anyway so the host can open the argument
+  if (platform === 'linux' && os.release().toLowerCase().includes('microsoft')) {
+    platform = 'win32'
+  }
+
+  let command = options.command
+  if (!command) {
+    if (platform === 'win32') {
+      // spawnWithShell does not do the additional os.release() check, so we
+      // have to force the shell here to make sure we treat WSL as windows.
+      options.shell = process.env.ComSpec
+      // also, the start command accepts a title so to make sure that we don't
+      // accidentally interpret the first arg as the title, we stick an empty
+      // string immediately after the start command
+      command = 'start ""'
+    } else if (platform === 'darwin') {
+      command = 'open'
+    } else {
+      command = 'xdg-open'
+    }
+  }
+
+  return spawnWithShell(command, args, options, extra)
+}
+promiseSpawn.open = open
+
+const isPipe = (stdio = 'pipe', fd) => {
+  if (stdio === 'pipe' || stdio === null) {
+    return true
+  }
+
+  if (Array.isArray(stdio)) {
+    return isPipe(stdio[fd], fd)
+  }
+
+  return false
+}
+
+const stdioResult = (stdout, stderr, { stdioString = true, stdio }) => {
+  const result = {
+    stdout: null,
+    stderr: null,
+  }
+
+  // stdio is [stdin, stdout, stderr]
+  if (isPipe(stdio, 1)) {
+    result.stdout = Buffer.concat(stdout)
+    if (stdioString) {
+      result.stdout = result.stdout.toString().trim()
+    }
+  }
+
+  if (isPipe(stdio, 2)) {
+    result.stderr = Buffer.concat(stderr)
+    if (stdioString) {
+      result.stderr = result.stderr.toString().trim()
+    }
+  }
+
+  return result
+}
+
+module.exports = promiseSpawn
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/package.json b/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/package.json
new file mode 100644
index 0000000000000..2080d9f5be9f0
--- /dev/null
+++ b/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/package.json
@@ -0,0 +1,50 @@
+{
+  "name": "@npmcli/promise-spawn",
+  "version": "6.0.2",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "main": "./lib/index.js",
+  "description": "spawn processes the way the npm cli likes to do",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/promise-spawn.git"
+  },
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "lint": "eslint \"**/*.js\"",
+    "lintfix": "npm run lint -- --fix",
+    "posttest": "npm run lint",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "tap": {
+    "check-coverage": true,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.11.0",
+    "minipass": "^4.0.0",
+    "spawk": "^1.7.1",
+    "tap": "^16.0.1"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.11.0"
+  },
+  "dependencies": {
+    "which": "^3.0.0"
+  }
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/promise-spawn/LICENSE b/node_modules/pacote/node_modules/@npmcli/promise-spawn/LICENSE
new file mode 100644
index 0000000000000..8f90f96f4c6c5
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/promise-spawn/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
+OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
+ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
+SOFTWARE.
diff --git a/node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/escape.js b/node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/escape.js
new file mode 100644
index 0000000000000..9aca8bde70a6e
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/escape.js
@@ -0,0 +1,68 @@
+'use strict'
+
+// eslint-disable-next-line max-len
+// this code adapted from: https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/
+const cmd = (input, doubleEscape) => {
+  if (!input.length) {
+    return '""'
+  }
+
+  let result
+  if (!/[ \t\n\v"]/.test(input)) {
+    result = input
+  } else {
+    result = '"'
+    for (let i = 0; i <= input.length; ++i) {
+      let slashCount = 0
+      while (input[i] === '\\') {
+        ++i
+        ++slashCount
+      }
+
+      if (i === input.length) {
+        result += '\\'.repeat(slashCount * 2)
+        break
+      }
+
+      if (input[i] === '"') {
+        result += '\\'.repeat(slashCount * 2 + 1)
+        result += input[i]
+      } else {
+        result += '\\'.repeat(slashCount)
+        result += input[i]
+      }
+    }
+    result += '"'
+  }
+
+  // and finally, prefix shell meta chars with a ^
+  result = result.replace(/[ !%^&()<>|"]/g, '^$&')
+  if (doubleEscape) {
+    result = result.replace(/[ !%^&()<>|"]/g, '^$&')
+  }
+
+  return result
+}
+
+const sh = (input) => {
+  if (!input.length) {
+    return `''`
+  }
+
+  if (!/[\t\n\r "#$&'()*;<>?\\`|~]/.test(input)) {
+    return input
+  }
+
+  // replace single quotes with '\'' and wrap the whole result in a fresh set of quotes
+  const result = `'${input.replace(/'/g, `'\\''`)}'`
+    // if the input string already had single quotes around it, clean those up
+    .replace(/^(?:'')+(?!$)/, '')
+    .replace(/\\'''/g, `\\'`)
+
+  return result
+}
+
+module.exports = {
+  cmd,
+  sh,
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/index.js b/node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/index.js
new file mode 100644
index 0000000000000..571ff6b9169c9
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/index.js
@@ -0,0 +1,195 @@
+'use strict'
+
+const { spawn } = require('child_process')
+const os = require('os')
+const which = require('which')
+
+const escape = require('./escape.js')
+
+// 'extra' object is for decorating the error a bit more
+const promiseSpawn = (cmd, args, opts = {}, extra = {}) => {
+  if (opts.shell) {
+    return spawnWithShell(cmd, args, opts, extra)
+  }
+
+  let proc
+
+  const p = new Promise((res, rej) => {
+    proc = spawn(cmd, args, opts)
+
+    const stdout = []
+    const stderr = []
+
+    const reject = er => rej(Object.assign(er, {
+      cmd,
+      args,
+      ...stdioResult(stdout, stderr, opts),
+      ...extra,
+    }))
+
+    proc.on('error', reject)
+
+    if (proc.stdout) {
+      proc.stdout.on('data', c => stdout.push(c)).on('error', reject)
+      proc.stdout.on('error', er => reject(er))
+    }
+
+    if (proc.stderr) {
+      proc.stderr.on('data', c => stderr.push(c)).on('error', reject)
+      proc.stderr.on('error', er => reject(er))
+    }
+
+    proc.on('close', (code, signal) => {
+      const result = {
+        cmd,
+        args,
+        code,
+        signal,
+        ...stdioResult(stdout, stderr, opts),
+        ...extra,
+      }
+
+      if (code || signal) {
+        rej(Object.assign(new Error('command failed'), result))
+      } else {
+        res(result)
+      }
+    })
+  })
+
+  p.stdin = proc.stdin
+  p.process = proc
+  return p
+}
+
+const spawnWithShell = (cmd, args, opts, extra) => {
+  let command = opts.shell
+  // if shell is set to true, we use a platform default. we can't let the core
+  // spawn method decide this for us because we need to know what shell is in use
+  // ahead of time so that we can escape arguments properly. we don't need coverage here.
+  if (command === true) {
+    // istanbul ignore next
+    command = process.platform === 'win32' ? process.env.ComSpec : 'sh'
+  }
+
+  const options = { ...opts, shell: false }
+  const realArgs = []
+  let script = cmd
+
+  // first, determine if we're in windows because if we are we need to know if we're
+  // running an .exe or a .cmd/.bat since the latter requires extra escaping
+  const isCmd = /(?:^|\\)cmd(?:\.exe)?$/i.test(command)
+  if (isCmd) {
+    let doubleEscape = false
+
+    // find the actual command we're running
+    let initialCmd = ''
+    let insideQuotes = false
+    for (let i = 0; i < cmd.length; ++i) {
+      const char = cmd.charAt(i)
+      if (char === ' ' && !insideQuotes) {
+        break
+      }
+
+      initialCmd += char
+      if (char === '"' || char === "'") {
+        insideQuotes = !insideQuotes
+      }
+    }
+
+    let pathToInitial
+    try {
+      pathToInitial = which.sync(initialCmd, {
+        path: (options.env && options.env.PATH) || process.env.PATH,
+        pathext: (options.env && options.env.PATHEXT) || process.env.PATHEXT,
+      }).toLowerCase()
+    } catch (err) {
+      pathToInitial = initialCmd.toLowerCase()
+    }
+
+    doubleEscape = pathToInitial.endsWith('.cmd') || pathToInitial.endsWith('.bat')
+    for (const arg of args) {
+      script += ` ${escape.cmd(arg, doubleEscape)}`
+    }
+    realArgs.push('/d', '/s', '/c', script)
+    options.windowsVerbatimArguments = true
+  } else {
+    for (const arg of args) {
+      script += ` ${escape.sh(arg)}`
+    }
+    realArgs.push('-c', script)
+  }
+
+  return promiseSpawn(command, realArgs, options, extra)
+}
+
+// open a file with the default application as defined by the user's OS
+const open = (_args, opts = {}, extra = {}) => {
+  const options = { ...opts, shell: true }
+  const args = [].concat(_args)
+
+  let platform = process.platform
+  // process.platform === 'linux' may actually indicate WSL, if that's the case
+  // we want to treat things as win32 anyway so the host can open the argument
+  if (platform === 'linux' && os.release().toLowerCase().includes('microsoft')) {
+    platform = 'win32'
+  }
+
+  let command = options.command
+  if (!command) {
+    if (platform === 'win32') {
+      // spawnWithShell does not do the additional os.release() check, so we
+      // have to force the shell here to make sure we treat WSL as windows.
+      options.shell = process.env.ComSpec
+      // also, the start command accepts a title so to make sure that we don't
+      // accidentally interpret the first arg as the title, we stick an empty
+      // string immediately after the start command
+      command = 'start ""'
+    } else if (platform === 'darwin') {
+      command = 'open'
+    } else {
+      command = 'xdg-open'
+    }
+  }
+
+  return spawnWithShell(command, args, options, extra)
+}
+promiseSpawn.open = open
+
+const isPipe = (stdio = 'pipe', fd) => {
+  if (stdio === 'pipe' || stdio === null) {
+    return true
+  }
+
+  if (Array.isArray(stdio)) {
+    return isPipe(stdio[fd], fd)
+  }
+
+  return false
+}
+
+const stdioResult = (stdout, stderr, { stdioString = true, stdio }) => {
+  const result = {
+    stdout: null,
+    stderr: null,
+  }
+
+  // stdio is [stdin, stdout, stderr]
+  if (isPipe(stdio, 1)) {
+    result.stdout = Buffer.concat(stdout)
+    if (stdioString) {
+      result.stdout = result.stdout.toString().trim()
+    }
+  }
+
+  if (isPipe(stdio, 2)) {
+    result.stderr = Buffer.concat(stderr)
+    if (stdioString) {
+      result.stderr = result.stderr.toString().trim()
+    }
+  }
+
+  return result
+}
+
+module.exports = promiseSpawn
diff --git a/node_modules/pacote/node_modules/@npmcli/promise-spawn/package.json b/node_modules/pacote/node_modules/@npmcli/promise-spawn/package.json
new file mode 100644
index 0000000000000..2080d9f5be9f0
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/promise-spawn/package.json
@@ -0,0 +1,50 @@
+{
+  "name": "@npmcli/promise-spawn",
+  "version": "6.0.2",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "main": "./lib/index.js",
+  "description": "spawn processes the way the npm cli likes to do",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/promise-spawn.git"
+  },
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "lint": "eslint \"**/*.js\"",
+    "lintfix": "npm run lint -- --fix",
+    "posttest": "npm run lint",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "tap": {
+    "check-coverage": true,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.11.0",
+    "minipass": "^4.0.0",
+    "spawk": "^1.7.1",
+    "tap": "^16.0.1"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.11.0"
+  },
+  "dependencies": {
+    "which": "^3.0.0"
+  }
+}
diff --git a/node_modules/@npmcli/promise-spawn/node_modules/which/LICENSE b/node_modules/pacote/node_modules/which/LICENSE
similarity index 100%
rename from node_modules/@npmcli/promise-spawn/node_modules/which/LICENSE
rename to node_modules/pacote/node_modules/which/LICENSE
diff --git a/node_modules/@npmcli/promise-spawn/node_modules/which/bin/which.js b/node_modules/pacote/node_modules/which/bin/which.js
similarity index 100%
rename from node_modules/@npmcli/promise-spawn/node_modules/which/bin/which.js
rename to node_modules/pacote/node_modules/which/bin/which.js
diff --git a/node_modules/@npmcli/promise-spawn/node_modules/which/lib/index.js b/node_modules/pacote/node_modules/which/lib/index.js
similarity index 100%
rename from node_modules/@npmcli/promise-spawn/node_modules/which/lib/index.js
rename to node_modules/pacote/node_modules/which/lib/index.js
diff --git a/node_modules/@npmcli/promise-spawn/node_modules/which/package.json b/node_modules/pacote/node_modules/which/package.json
similarity index 100%
rename from node_modules/@npmcli/promise-spawn/node_modules/which/package.json
rename to node_modules/pacote/node_modules/which/package.json
diff --git a/package-lock.json b/package-lock.json
index 4e41c82f33bda..8863b1bdf533a 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -92,7 +92,7 @@
         "@npmcli/fs": "^3.1.0",
         "@npmcli/map-workspaces": "^3.0.4",
         "@npmcli/package-json": "^5.0.0",
-        "@npmcli/promise-spawn": "^6.0.2",
+        "@npmcli/promise-spawn": "^7.0.0",
         "@npmcli/run-script": "^6.0.2",
         "@sigstore/tuf": "^2.1.0",
         "abbrev": "^2.0.0",
@@ -2470,6 +2470,18 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn": {
+      "version": "6.0.2",
+      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-6.0.2.tgz",
+      "integrity": "sha512-gGq0NJkIGSwdbUt4yhdF8ZrmkGKVz9vAdVzpOfnom+V8PLSmSOVhZwbNvZZS1EYcJN5hzzKBxmmVVAInM6HQLg==",
+      "inBundle": true,
+      "dependencies": {
+        "which": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/git/node_modules/which": {
       "version": "3.0.1",
       "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz",
@@ -2575,30 +2587,15 @@
       }
     },
     "node_modules/@npmcli/promise-spawn": {
-      "version": "6.0.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-6.0.2.tgz",
-      "integrity": "sha512-gGq0NJkIGSwdbUt4yhdF8ZrmkGKVz9vAdVzpOfnom+V8PLSmSOVhZwbNvZZS1EYcJN5hzzKBxmmVVAInM6HQLg==",
-      "inBundle": true,
-      "dependencies": {
-        "which": "^3.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/promise-spawn/node_modules/which": {
-      "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz",
-      "integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==",
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-7.0.0.tgz",
+      "integrity": "sha512-wBqcGsMELZna0jDblGd7UXgOby45TQaMWmbFwWX+SEotk4HV6zG2t6rT9siyLhPk4P6YYqgfL1UO8nMWDBVJXQ==",
       "inBundle": true,
       "dependencies": {
-        "isexe": "^2.0.0"
-      },
-      "bin": {
-        "node-which": "bin/which.js"
+        "which": "^4.0.0"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "node_modules/@npmcli/query": {
@@ -2628,6 +2625,18 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn": {
+      "version": "6.0.2",
+      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-6.0.2.tgz",
+      "integrity": "sha512-gGq0NJkIGSwdbUt4yhdF8ZrmkGKVz9vAdVzpOfnom+V8PLSmSOVhZwbNvZZS1EYcJN5hzzKBxmmVVAInM6HQLg==",
+      "inBundle": true,
+      "dependencies": {
+        "which": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/run-script/node_modules/which": {
       "version": "3.0.1",
       "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz",
@@ -2726,6 +2735,18 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/promise-spawn": {
+      "version": "6.0.2",
+      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-6.0.2.tgz",
+      "integrity": "sha512-gGq0NJkIGSwdbUt4yhdF8ZrmkGKVz9vAdVzpOfnom+V8PLSmSOVhZwbNvZZS1EYcJN5hzzKBxmmVVAInM6HQLg==",
+      "dev": true,
+      "dependencies": {
+        "which": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/hosted-git-info": {
       "version": "6.1.1",
       "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz",
@@ -10539,6 +10560,33 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/pacote/node_modules/@npmcli/promise-spawn": {
+      "version": "6.0.2",
+      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-6.0.2.tgz",
+      "integrity": "sha512-gGq0NJkIGSwdbUt4yhdF8ZrmkGKVz9vAdVzpOfnom+V8PLSmSOVhZwbNvZZS1EYcJN5hzzKBxmmVVAInM6HQLg==",
+      "inBundle": true,
+      "dependencies": {
+        "which": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/pacote/node_modules/which": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz",
+      "integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==",
+      "inBundle": true,
+      "dependencies": {
+        "isexe": "^2.0.0"
+      },
+      "bin": {
+        "node-which": "bin/which.js"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/parent-module": {
       "version": "1.0.1",
       "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
@@ -15952,7 +16000,7 @@
       "devDependencies": {
         "@npmcli/eslint-config": "^4.0.0",
         "@npmcli/mock-registry": "^1.0.0",
-        "@npmcli/promise-spawn": "^6.0.2",
+        "@npmcli/promise-spawn": "^7.0.0",
         "@npmcli/template-oss": "4.18.0",
         "http-proxy": "^1.18.1",
         "tap": "^16.3.8",
diff --git a/package.json b/package.json
index dec1f7707b071..cf775c0f32760 100644
--- a/package.json
+++ b/package.json
@@ -57,7 +57,7 @@
     "@npmcli/fs": "^3.1.0",
     "@npmcli/map-workspaces": "^3.0.4",
     "@npmcli/package-json": "^5.0.0",
-    "@npmcli/promise-spawn": "^6.0.2",
+    "@npmcli/promise-spawn": "^7.0.0",
     "@npmcli/run-script": "^6.0.2",
     "@sigstore/tuf": "^2.1.0",
     "abbrev": "^2.0.0",
diff --git a/smoke-tests/package.json b/smoke-tests/package.json
index 14ee2f5b367cb..6a9d9e7a3ffae 100644
--- a/smoke-tests/package.json
+++ b/smoke-tests/package.json
@@ -20,7 +20,7 @@
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
     "@npmcli/mock-registry": "^1.0.0",
-    "@npmcli/promise-spawn": "^6.0.2",
+    "@npmcli/promise-spawn": "^7.0.0",
     "@npmcli/template-oss": "4.18.0",
     "http-proxy": "^1.18.1",
     "tap": "^16.3.8",

From eb4dc0f31ed501f58cd3acfe4f55479b69716407 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Wed, 30 Aug 2023 10:28:26 -0700
Subject: [PATCH 63/68] deps: @npmcli/run-script@7.0.0

---
 node_modules/.gitignore                       |   1 +
 node_modules/@npmcli/run-script/package.json  |  16 ++-
 .../node_modules/@npmcli/run-script/LICENSE   |  15 +++
 .../run-script/lib/is-server-package.js       |  12 ++
 .../@npmcli/run-script/lib/is-windows.js      |   2 +
 .../@npmcli/run-script/lib/make-spawn-args.js |  40 +++++++
 .../run-script/lib/node-gyp-bin/node-gyp      |   2 +
 .../run-script/lib/node-gyp-bin/node-gyp.cmd  |   1 +
 .../@npmcli/run-script/lib/package-envs.js    |  26 ++++
 .../@npmcli/run-script/lib/run-script-pkg.js  | 112 ++++++++++++++++++
 .../@npmcli/run-script/lib/run-script.js      |  14 +++
 .../@npmcli/run-script/lib/set-path.js        |  45 +++++++
 .../@npmcli/run-script/lib/signal-manager.js  |  49 ++++++++
 .../run-script/lib/validate-options.js        |  39 ++++++
 .../@npmcli/run-script/package.json           |  53 +++++++++
 package-lock.json                             |  36 ++++--
 package.json                                  |   2 +-
 workspaces/arborist/package.json              |   2 +-
 workspaces/libnpmexec/package.json            |   2 +-
 workspaces/libnpmpack/package.json            |   2 +-
 workspaces/libnpmversion/package.json         |   2 +-
 21 files changed, 453 insertions(+), 20 deletions(-)
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/LICENSE
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/is-server-package.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/is-windows.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/make-spawn-args.js
 create mode 100755 node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp
 create mode 100755 node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/package-envs.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script-pkg.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/set-path.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/signal-manager.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/validate-options.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 735440933fc4d..580fc628b2609 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -234,6 +234,7 @@
 !/pacote/node_modules/@npmcli/
 /pacote/node_modules/@npmcli/*
 !/pacote/node_modules/@npmcli/promise-spawn
+!/pacote/node_modules/@npmcli/run-script
 !/pacote/node_modules/which
 !/parse-conflict-json
 !/path-is-absolute
diff --git a/node_modules/@npmcli/run-script/package.json b/node_modules/@npmcli/run-script/package.json
index 38f6f72fa6ad9..a54df3d1cb609 100644
--- a/node_modules/@npmcli/run-script/package.json
+++ b/node_modules/@npmcli/run-script/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@npmcli/run-script",
-  "version": "6.0.2",
+  "version": "7.0.0",
   "description": "Run a lifecycle script for a package (descendant of npm-lifecycle)",
   "author": "GitHub Inc.",
   "license": "ISC",
@@ -16,7 +16,7 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.15.1",
+    "@npmcli/template-oss": "4.18.0",
     "require-inject": "^1.4.4",
     "tap": "^16.0.1"
   },
@@ -25,7 +25,7 @@
     "@npmcli/promise-spawn": "^6.0.0",
     "node-gyp": "^9.0.0",
     "read-package-json-fast": "^3.0.0",
-    "which": "^3.0.0"
+    "which": "^4.0.0"
   },
   "files": [
     "bin/",
@@ -37,11 +37,17 @@
     "url": "https://github.com/npm/run-script.git"
   },
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.15.1",
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ],
+    "version": "4.18.0",
     "publish": "true"
   },
   "tap": {
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/LICENSE b/node_modules/pacote/node_modules/@npmcli/run-script/LICENSE
new file mode 100644
index 0000000000000..19cec97b18468
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/is-server-package.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/is-server-package.js
new file mode 100644
index 0000000000000..d168623247527
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/is-server-package.js
@@ -0,0 +1,12 @@
+const util = require('util')
+const fs = require('fs')
+const { stat } = fs.promises || { stat: util.promisify(fs.stat) }
+const { resolve } = require('path')
+module.exports = async path => {
+  try {
+    const st = await stat(resolve(path, 'server.js'))
+    return st.isFile()
+  } catch (er) {
+    return false
+  }
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/is-windows.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/is-windows.js
new file mode 100644
index 0000000000000..651917e6ad27a
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/is-windows.js
@@ -0,0 +1,2 @@
+const platform = process.env.__FAKE_TESTING_PLATFORM__ || process.platform
+module.exports = platform === 'win32'
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/make-spawn-args.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/make-spawn-args.js
new file mode 100644
index 0000000000000..2b2f96a91c8d5
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/make-spawn-args.js
@@ -0,0 +1,40 @@
+/* eslint camelcase: "off" */
+const setPATH = require('./set-path.js')
+const { resolve } = require('path')
+const npm_config_node_gyp = require.resolve('node-gyp/bin/node-gyp.js')
+
+const makeSpawnArgs = options => {
+  const {
+    event,
+    path,
+    scriptShell = true,
+    binPaths,
+    env = {},
+    stdio,
+    cmd,
+    args = [],
+    stdioString,
+  } = options
+
+  const spawnEnv = setPATH(path, binPaths, {
+    // we need to at least save the PATH environment var
+    ...process.env,
+    ...env,
+    npm_package_json: resolve(path, 'package.json'),
+    npm_lifecycle_event: event,
+    npm_lifecycle_script: cmd,
+    npm_config_node_gyp,
+  })
+
+  const spawnOpts = {
+    env: spawnEnv,
+    stdioString,
+    stdio,
+    cwd: path,
+    shell: scriptShell,
+  }
+
+  return [cmd, args, spawnOpts]
+}
+
+module.exports = makeSpawnArgs
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp b/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp
new file mode 100755
index 0000000000000..5bec64d961a3a
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp
@@ -0,0 +1,2 @@
+#!/usr/bin/env sh
+node "$npm_config_node_gyp" "$@"
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd b/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd
new file mode 100755
index 0000000000000..4c6987ac9868b
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd
@@ -0,0 +1 @@
+@node "%npm_config_node_gyp%" %*
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/package-envs.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/package-envs.js
new file mode 100644
index 0000000000000..6b538e50247fd
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/package-envs.js
@@ -0,0 +1,26 @@
+// https://github.com/npm/rfcs/pull/183
+
+const envVal = val => Array.isArray(val) ? val.map(v => envVal(v)).join('\n\n')
+  : val === null || val === false ? ''
+  : String(val)
+
+const packageEnvs = (env, vals, prefix) => {
+  for (const [key, val] of Object.entries(vals)) {
+    if (val === undefined) {
+      continue
+    } else if (val && !Array.isArray(val) && typeof val === 'object') {
+      packageEnvs(env, val, `${prefix}${key}_`)
+    } else {
+      env[`${prefix}${key}`] = envVal(val)
+    }
+  }
+  return env
+}
+
+module.exports = (env, pkg) => packageEnvs({ ...env }, {
+  name: pkg.name,
+  version: pkg.version,
+  config: pkg.config,
+  engines: pkg.engines,
+  bin: pkg.bin,
+}, 'npm_package_')
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script-pkg.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script-pkg.js
new file mode 100644
index 0000000000000..a5518285d1af1
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script-pkg.js
@@ -0,0 +1,112 @@
+const makeSpawnArgs = require('./make-spawn-args.js')
+const promiseSpawn = require('@npmcli/promise-spawn')
+const packageEnvs = require('./package-envs.js')
+const { isNodeGypPackage, defaultGypInstallScript } = require('@npmcli/node-gyp')
+const signalManager = require('./signal-manager.js')
+const isServerPackage = require('./is-server-package.js')
+
+// you wouldn't like me when I'm angry...
+const bruce = (id, event, cmd, args) => {
+  let banner = id
+    ? `\n> ${id} ${event}\n`
+    : `\n> ${event}\n`
+  banner += `> ${cmd.trim().replace(/\n/g, '\n> ')}`
+  if (args.length) {
+    banner += ` ${args.join(' ')}`
+  }
+  banner += '\n'
+  return banner
+}
+
+const runScriptPkg = async options => {
+  const {
+    event,
+    path,
+    scriptShell,
+    binPaths = false,
+    env = {},
+    stdio = 'pipe',
+    pkg,
+    args = [],
+    stdioString,
+    // note: only used when stdio:inherit
+    banner = true,
+    // how long to wait for a process.kill signal
+    // only exposed here so that we can make the test go a bit faster.
+    signalTimeout = 500,
+  } = options
+
+  const { scripts = {}, gypfile } = pkg
+  let cmd = null
+  if (options.cmd) {
+    cmd = options.cmd
+  } else if (pkg.scripts && pkg.scripts[event]) {
+    cmd = pkg.scripts[event]
+  } else if (
+    // If there is no preinstall or install script, default to rebuilding node-gyp packages.
+    event === 'install' &&
+    !scripts.install &&
+    !scripts.preinstall &&
+    gypfile !== false &&
+    await isNodeGypPackage(path)
+  ) {
+    cmd = defaultGypInstallScript
+  } else if (event === 'start' && await isServerPackage(path)) {
+    cmd = 'node server.js'
+  }
+
+  if (!cmd) {
+    return { code: 0, signal: null }
+  }
+
+  if (stdio === 'inherit' && banner !== false) {
+    // we're dumping to the parent's stdout, so print the banner
+    console.log(bruce(pkg._id, event, cmd, args))
+  }
+
+  const [spawnShell, spawnArgs, spawnOpts] = makeSpawnArgs({
+    event,
+    path,
+    scriptShell,
+    binPaths,
+    env: packageEnvs(env, pkg),
+    stdio,
+    cmd,
+    args,
+    stdioString,
+  })
+
+  const p = promiseSpawn(spawnShell, spawnArgs, spawnOpts, {
+    event,
+    script: cmd,
+    pkgid: pkg._id,
+    path,
+  })
+
+  if (stdio === 'inherit') {
+    signalManager.add(p.process)
+  }
+
+  if (p.stdin) {
+    p.stdin.end()
+  }
+
+  return p.catch(er => {
+    const { signal } = er
+    if (stdio === 'inherit' && signal) {
+      // by the time we reach here, the child has already exited. we send the
+      // signal back to ourselves again so that npm will exit with the same
+      // status as the child
+      process.kill(process.pid, signal)
+
+      // just in case we don't die, reject after 500ms
+      // this also keeps the node process open long enough to actually
+      // get the signal, rather than terminating gracefully.
+      return new Promise((res, rej) => setTimeout(() => rej(er), signalTimeout))
+    } else {
+      throw er
+    }
+  })
+}
+
+module.exports = runScriptPkg
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script.js
new file mode 100644
index 0000000000000..e9d18261a2c1f
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script.js
@@ -0,0 +1,14 @@
+const rpj = require('read-package-json-fast')
+const runScriptPkg = require('./run-script-pkg.js')
+const validateOptions = require('./validate-options.js')
+const isServerPackage = require('./is-server-package.js')
+
+const runScript = options => {
+  validateOptions(options)
+  const { pkg, path } = options
+  return pkg ? runScriptPkg(options)
+    : rpj(path + '/package.json')
+      .then(readPackage => runScriptPkg({ ...options, pkg: readPackage }))
+}
+
+module.exports = Object.assign(runScript, { isServerPackage })
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/set-path.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/set-path.js
new file mode 100644
index 0000000000000..c59c270d9969a
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/set-path.js
@@ -0,0 +1,45 @@
+const { resolve, dirname, delimiter } = require('path')
+// the path here is relative, even though it does not need to be
+// in order to make the posix tests pass in windows
+const nodeGypPath = resolve(__dirname, '../lib/node-gyp-bin')
+
+// Windows typically calls its PATH environ 'Path', but this is not
+// guaranteed, nor is it guaranteed to be the only one.  Merge them
+// all together in the order they appear in the object.
+const setPATH = (projectPath, binPaths, env) => {
+  const PATH = Object.keys(env).filter(p => /^path$/i.test(p) && env[p])
+    .map(p => env[p].split(delimiter))
+    .reduce((set, p) => set.concat(p.filter(concatted => !set.includes(concatted))), [])
+    .join(delimiter)
+
+  const pathArr = []
+  if (binPaths) {
+    pathArr.push(...binPaths)
+  }
+  // unshift the ./node_modules/.bin from every folder
+  // walk up until dirname() does nothing, at the root
+  // XXX we should specify a cwd that we don't go above
+  let p = projectPath
+  let pp
+  do {
+    pathArr.push(resolve(p, 'node_modules', '.bin'))
+    pp = p
+    p = dirname(p)
+  } while (p !== pp)
+  pathArr.push(nodeGypPath, PATH)
+
+  const pathVal = pathArr.join(delimiter)
+
+  // XXX include the node-gyp-bin path somehow?  Probably better for
+  // npm or arborist or whoever to just provide that by putting it in
+  // the PATH environ, since that's preserved anyway.
+  for (const key of Object.keys(env)) {
+    if (/^path$/i.test(key)) {
+      env[key] = pathVal
+    }
+  }
+
+  return env
+}
+
+module.exports = setPATH
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/signal-manager.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/signal-manager.js
new file mode 100644
index 0000000000000..efc00b488063f
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/signal-manager.js
@@ -0,0 +1,49 @@
+const runningProcs = new Set()
+let handlersInstalled = false
+
+// NOTE: these signals aren't actually forwarded anywhere. they're trapped and
+// ignored until all child processes have exited. in our next breaking change
+// we should rename this
+const forwardedSignals = [
+  'SIGINT',
+  'SIGTERM',
+]
+
+// no-op, this is so receiving the signal doesn't cause us to exit immediately
+// instead, we exit after all children have exited when we re-send the signal
+// to ourselves. see the catch handler at the bottom of run-script-pkg.js
+// istanbul ignore next - this function does nothing
+const handleSignal = () => {}
+const setupListeners = () => {
+  for (const signal of forwardedSignals) {
+    process.on(signal, handleSignal)
+  }
+  handlersInstalled = true
+}
+
+const cleanupListeners = () => {
+  if (runningProcs.size === 0) {
+    for (const signal of forwardedSignals) {
+      process.removeListener(signal, handleSignal)
+    }
+    handlersInstalled = false
+  }
+}
+
+const add = proc => {
+  runningProcs.add(proc)
+  if (!handlersInstalled) {
+    setupListeners()
+  }
+
+  proc.once('exit', () => {
+    runningProcs.delete(proc)
+    cleanupListeners()
+  })
+}
+
+module.exports = {
+  add,
+  handleSignal,
+  forwardedSignals,
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/validate-options.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/validate-options.js
new file mode 100644
index 0000000000000..8d855916ecd15
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/validate-options.js
@@ -0,0 +1,39 @@
+const validateOptions = options => {
+  if (typeof options !== 'object' || !options) {
+    throw new TypeError('invalid options object provided to runScript')
+  }
+
+  const {
+    event,
+    path,
+    scriptShell,
+    env = {},
+    stdio = 'pipe',
+    args = [],
+    cmd,
+  } = options
+
+  if (!event || typeof event !== 'string') {
+    throw new TypeError('valid event not provided to runScript')
+  }
+  if (!path || typeof path !== 'string') {
+    throw new TypeError('valid path not provided to runScript')
+  }
+  if (scriptShell !== undefined && typeof scriptShell !== 'string') {
+    throw new TypeError('invalid scriptShell option provided to runScript')
+  }
+  if (typeof env !== 'object' || !env) {
+    throw new TypeError('invalid env option provided to runScript')
+  }
+  if (typeof stdio !== 'string' && !Array.isArray(stdio)) {
+    throw new TypeError('invalid stdio option provided to runScript')
+  }
+  if (!Array.isArray(args) || args.some(a => typeof a !== 'string')) {
+    throw new TypeError('invalid args option provided to runScript')
+  }
+  if (cmd !== undefined && typeof cmd !== 'string') {
+    throw new TypeError('invalid cmd option provided to runScript')
+  }
+}
+
+module.exports = validateOptions
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/package.json b/node_modules/pacote/node_modules/@npmcli/run-script/package.json
new file mode 100644
index 0000000000000..38f6f72fa6ad9
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/package.json
@@ -0,0 +1,53 @@
+{
+  "name": "@npmcli/run-script",
+  "version": "6.0.2",
+  "description": "Run a lifecycle script for a package (descendant of npm-lifecycle)",
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "scripts": {
+    "test": "tap",
+    "eslint": "eslint",
+    "lint": "eslint \"**/*.js\"",
+    "lintfix": "npm run lint -- --fix",
+    "postlint": "template-oss-check",
+    "snap": "tap",
+    "posttest": "npm run lint",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.15.1",
+    "require-inject": "^1.4.4",
+    "tap": "^16.0.1"
+  },
+  "dependencies": {
+    "@npmcli/node-gyp": "^3.0.0",
+    "@npmcli/promise-spawn": "^6.0.0",
+    "node-gyp": "^9.0.0",
+    "read-package-json-fast": "^3.0.0",
+    "which": "^3.0.0"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "main": "lib/run-script.js",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/run-script.git"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.15.1",
+    "publish": "true"
+  },
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/package-lock.json b/package-lock.json
index 8863b1bdf533a..fd926e1d2637c 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -93,7 +93,7 @@
         "@npmcli/map-workspaces": "^3.0.4",
         "@npmcli/package-json": "^5.0.0",
         "@npmcli/promise-spawn": "^7.0.0",
-        "@npmcli/run-script": "^6.0.2",
+        "@npmcli/run-script": "^7.0.0",
         "@sigstore/tuf": "^2.1.0",
         "abbrev": "^2.0.0",
         "archy": "~1.0.0",
@@ -2610,19 +2610,19 @@
       }
     },
     "node_modules/@npmcli/run-script": {
-      "version": "6.0.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-6.0.2.tgz",
-      "integrity": "sha512-NCcr1uQo1k5U+SYlnIrbAh3cxy+OQT1VtqiAbxdymSlptbzBb62AjH2xXgjNCoP073hoa1CfCAcwoZ8k96C4nA==",
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-7.0.0.tgz",
+      "integrity": "sha512-JpR7jgCPIKnke0/xJUSYgWKk6BtWQ2FPoVm4lEC4gTDOjIpBJgAfwg+nGaCwnzY8oq6I5F4r+sI1jGJk/iCh/w==",
       "inBundle": true,
       "dependencies": {
         "@npmcli/node-gyp": "^3.0.0",
         "@npmcli/promise-spawn": "^6.0.0",
         "node-gyp": "^9.0.0",
         "read-package-json-fast": "^3.0.0",
-        "which": "^3.0.0"
+        "which": "^4.0.0"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn": {
@@ -10572,6 +10572,22 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/pacote/node_modules/@npmcli/run-script": {
+      "version": "6.0.2",
+      "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-6.0.2.tgz",
+      "integrity": "sha512-NCcr1uQo1k5U+SYlnIrbAh3cxy+OQT1VtqiAbxdymSlptbzBb62AjH2xXgjNCoP073hoa1CfCAcwoZ8k96C4nA==",
+      "inBundle": true,
+      "dependencies": {
+        "@npmcli/node-gyp": "^3.0.0",
+        "@npmcli/promise-spawn": "^6.0.0",
+        "node-gyp": "^9.0.0",
+        "read-package-json-fast": "^3.0.0",
+        "which": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/pacote/node_modules/which": {
       "version": "3.0.1",
       "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz",
@@ -16024,7 +16040,7 @@
         "@npmcli/node-gyp": "^3.0.0",
         "@npmcli/package-json": "^5.0.0",
         "@npmcli/query": "^3.0.0",
-        "@npmcli/run-script": "^6.0.0",
+        "@npmcli/run-script": "^7.0.0",
         "bin-links": "^4.0.1",
         "cacache": "^18.0.0",
         "common-ancestor-path": "^1.0.1",
@@ -16136,7 +16152,7 @@
       "license": "ISC",
       "dependencies": {
         "@npmcli/arborist": "^6.3.0",
-        "@npmcli/run-script": "^6.0.0",
+        "@npmcli/run-script": "^7.0.0",
         "ci-info": "^3.7.1",
         "npm-package-arg": "^11.0.0",
         "npmlog": "^7.0.1",
@@ -16216,7 +16232,7 @@
       "license": "ISC",
       "dependencies": {
         "@npmcli/arborist": "^6.3.0",
-        "@npmcli/run-script": "^6.0.0",
+        "@npmcli/run-script": "^7.0.0",
         "npm-package-arg": "^11.0.0",
         "pacote": "^17.0.3"
       },
@@ -16294,7 +16310,7 @@
       "license": "ISC",
       "dependencies": {
         "@npmcli/git": "^5.0.1",
-        "@npmcli/run-script": "^6.0.0",
+        "@npmcli/run-script": "^7.0.0",
         "json-parse-even-better-errors": "^3.0.0",
         "proc-log": "^3.0.0",
         "semver": "^7.3.7"
diff --git a/package.json b/package.json
index cf775c0f32760..1e47c14a69e5a 100644
--- a/package.json
+++ b/package.json
@@ -58,7 +58,7 @@
     "@npmcli/map-workspaces": "^3.0.4",
     "@npmcli/package-json": "^5.0.0",
     "@npmcli/promise-spawn": "^7.0.0",
-    "@npmcli/run-script": "^6.0.2",
+    "@npmcli/run-script": "^7.0.0",
     "@sigstore/tuf": "^2.1.0",
     "abbrev": "^2.0.0",
     "archy": "~1.0.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index c86504f333e9f..3535fea3c2d12 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -12,7 +12,7 @@
     "@npmcli/node-gyp": "^3.0.0",
     "@npmcli/package-json": "^5.0.0",
     "@npmcli/query": "^3.0.0",
-    "@npmcli/run-script": "^6.0.0",
+    "@npmcli/run-script": "^7.0.0",
     "bin-links": "^4.0.1",
     "cacache": "^18.0.0",
     "common-ancestor-path": "^1.0.1",
diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json
index 8e97e9f096100..2ca73add6dbc5 100644
--- a/workspaces/libnpmexec/package.json
+++ b/workspaces/libnpmexec/package.json
@@ -60,7 +60,7 @@
   },
   "dependencies": {
     "@npmcli/arborist": "^6.3.0",
-    "@npmcli/run-script": "^6.0.0",
+    "@npmcli/run-script": "^7.0.0",
     "ci-info": "^3.7.1",
     "npm-package-arg": "^11.0.0",
     "npmlog": "^7.0.1",
diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json
index 4e1b055b75a41..aef88b5da7d6d 100644
--- a/workspaces/libnpmpack/package.json
+++ b/workspaces/libnpmpack/package.json
@@ -37,7 +37,7 @@
   "homepage": "https://npmjs.com/package/libnpmpack",
   "dependencies": {
     "@npmcli/arborist": "^6.3.0",
-    "@npmcli/run-script": "^6.0.0",
+    "@npmcli/run-script": "^7.0.0",
     "npm-package-arg": "^11.0.0",
     "pacote": "^17.0.3"
   },
diff --git a/workspaces/libnpmversion/package.json b/workspaces/libnpmversion/package.json
index 2798ec4260bcf..57723c4b41b26 100644
--- a/workspaces/libnpmversion/package.json
+++ b/workspaces/libnpmversion/package.json
@@ -38,7 +38,7 @@
   },
   "dependencies": {
     "@npmcli/git": "^5.0.1",
-    "@npmcli/run-script": "^6.0.0",
+    "@npmcli/run-script": "^7.0.0",
     "json-parse-even-better-errors": "^3.0.0",
     "proc-log": "^3.0.0",
     "semver": "^7.3.7"

From aa4164c709c0efe23a309e49e24c573107f3f971 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Wed, 30 Aug 2023 10:44:43 -0700
Subject: [PATCH 64/68] deps: pacote@17.0.4

---
 mock-registry/package.json                    |   2 +-
 node_modules/.gitignore                       |   7 -
 .../@npmcli/promise-spawn/LICENSE             |  15 --
 .../@npmcli/promise-spawn/lib/escape.js       |  68 ------
 .../@npmcli/promise-spawn/lib/index.js        | 195 ------------------
 .../@npmcli/promise-spawn/package.json        |  50 -----
 .../node_modules/@npmcli/run-script/LICENSE   |  15 --
 .../run-script/lib/is-server-package.js       |  12 --
 .../@npmcli/run-script/lib/is-windows.js      |   2 -
 .../@npmcli/run-script/lib/make-spawn-args.js |  40 ----
 .../run-script/lib/node-gyp-bin/node-gyp      |   2 -
 .../run-script/lib/node-gyp-bin/node-gyp.cmd  |   1 -
 .../@npmcli/run-script/lib/package-envs.js    |  26 ---
 .../@npmcli/run-script/lib/run-script-pkg.js  | 112 ----------
 .../@npmcli/run-script/lib/run-script.js      |  14 --
 .../@npmcli/run-script/lib/set-path.js        |  45 ----
 .../@npmcli/run-script/lib/signal-manager.js  |  49 -----
 .../run-script/lib/validate-options.js        |  39 ----
 .../@npmcli/run-script/package.json           |  53 -----
 .../pacote/node_modules/which/LICENSE         |  15 --
 .../pacote/node_modules/which/bin/which.js    |  52 -----
 .../pacote/node_modules/which/lib/index.js    | 115 -----------
 .../pacote/node_modules/which/package.json    |  51 -----
 node_modules/pacote/package.json              |   6 +-
 package-lock.json                             |  65 +-----
 package.json                                  |   2 +-
 workspaces/arborist/package.json              |   2 +-
 workspaces/libnpmdiff/package.json            |   2 +-
 workspaces/libnpmexec/package.json            |   2 +-
 workspaces/libnpmpack/package.json            |   2 +-
 30 files changed, 20 insertions(+), 1041 deletions(-)
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/promise-spawn/LICENSE
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/escape.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/index.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/promise-spawn/package.json
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/LICENSE
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/is-server-package.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/is-windows.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/make-spawn-args.js
 delete mode 100755 node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp
 delete mode 100755 node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/package-envs.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script-pkg.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/set-path.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/signal-manager.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/validate-options.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/package.json
 delete mode 100644 node_modules/pacote/node_modules/which/LICENSE
 delete mode 100755 node_modules/pacote/node_modules/which/bin/which.js
 delete mode 100644 node_modules/pacote/node_modules/which/lib/index.js
 delete mode 100644 node_modules/pacote/node_modules/which/package.json

diff --git a/mock-registry/package.json b/mock-registry/package.json
index eb7544d8cfeaf..205c420e3c6d8 100644
--- a/mock-registry/package.json
+++ b/mock-registry/package.json
@@ -56,7 +56,7 @@
     "json-stringify-safe": "^5.0.1",
     "nock": "^13.3.3",
     "npm-package-arg": "^11.0.0",
-    "pacote": "^17.0.3",
+    "pacote": "^17.0.4",
     "tap": "^16.3.8"
   }
 }
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 580fc628b2609..93e03b09bef97 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -229,13 +229,6 @@
 !/once
 !/p-map
 !/pacote
-!/pacote/node_modules/
-/pacote/node_modules/*
-!/pacote/node_modules/@npmcli/
-/pacote/node_modules/@npmcli/*
-!/pacote/node_modules/@npmcli/promise-spawn
-!/pacote/node_modules/@npmcli/run-script
-!/pacote/node_modules/which
 !/parse-conflict-json
 !/path-is-absolute
 !/path-key
diff --git a/node_modules/pacote/node_modules/@npmcli/promise-spawn/LICENSE b/node_modules/pacote/node_modules/@npmcli/promise-spawn/LICENSE
deleted file mode 100644
index 8f90f96f4c6c5..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/promise-spawn/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
-OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
-DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
-ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
-SOFTWARE.
diff --git a/node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/escape.js b/node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/escape.js
deleted file mode 100644
index 9aca8bde70a6e..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/escape.js
+++ /dev/null
@@ -1,68 +0,0 @@
-'use strict'
-
-// eslint-disable-next-line max-len
-// this code adapted from: https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/
-const cmd = (input, doubleEscape) => {
-  if (!input.length) {
-    return '""'
-  }
-
-  let result
-  if (!/[ \t\n\v"]/.test(input)) {
-    result = input
-  } else {
-    result = '"'
-    for (let i = 0; i <= input.length; ++i) {
-      let slashCount = 0
-      while (input[i] === '\\') {
-        ++i
-        ++slashCount
-      }
-
-      if (i === input.length) {
-        result += '\\'.repeat(slashCount * 2)
-        break
-      }
-
-      if (input[i] === '"') {
-        result += '\\'.repeat(slashCount * 2 + 1)
-        result += input[i]
-      } else {
-        result += '\\'.repeat(slashCount)
-        result += input[i]
-      }
-    }
-    result += '"'
-  }
-
-  // and finally, prefix shell meta chars with a ^
-  result = result.replace(/[ !%^&()<>|"]/g, '^$&')
-  if (doubleEscape) {
-    result = result.replace(/[ !%^&()<>|"]/g, '^$&')
-  }
-
-  return result
-}
-
-const sh = (input) => {
-  if (!input.length) {
-    return `''`
-  }
-
-  if (!/[\t\n\r "#$&'()*;<>?\\`|~]/.test(input)) {
-    return input
-  }
-
-  // replace single quotes with '\'' and wrap the whole result in a fresh set of quotes
-  const result = `'${input.replace(/'/g, `'\\''`)}'`
-    // if the input string already had single quotes around it, clean those up
-    .replace(/^(?:'')+(?!$)/, '')
-    .replace(/\\'''/g, `\\'`)
-
-  return result
-}
-
-module.exports = {
-  cmd,
-  sh,
-}
diff --git a/node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/index.js b/node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/index.js
deleted file mode 100644
index 571ff6b9169c9..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/promise-spawn/lib/index.js
+++ /dev/null
@@ -1,195 +0,0 @@
-'use strict'
-
-const { spawn } = require('child_process')
-const os = require('os')
-const which = require('which')
-
-const escape = require('./escape.js')
-
-// 'extra' object is for decorating the error a bit more
-const promiseSpawn = (cmd, args, opts = {}, extra = {}) => {
-  if (opts.shell) {
-    return spawnWithShell(cmd, args, opts, extra)
-  }
-
-  let proc
-
-  const p = new Promise((res, rej) => {
-    proc = spawn(cmd, args, opts)
-
-    const stdout = []
-    const stderr = []
-
-    const reject = er => rej(Object.assign(er, {
-      cmd,
-      args,
-      ...stdioResult(stdout, stderr, opts),
-      ...extra,
-    }))
-
-    proc.on('error', reject)
-
-    if (proc.stdout) {
-      proc.stdout.on('data', c => stdout.push(c)).on('error', reject)
-      proc.stdout.on('error', er => reject(er))
-    }
-
-    if (proc.stderr) {
-      proc.stderr.on('data', c => stderr.push(c)).on('error', reject)
-      proc.stderr.on('error', er => reject(er))
-    }
-
-    proc.on('close', (code, signal) => {
-      const result = {
-        cmd,
-        args,
-        code,
-        signal,
-        ...stdioResult(stdout, stderr, opts),
-        ...extra,
-      }
-
-      if (code || signal) {
-        rej(Object.assign(new Error('command failed'), result))
-      } else {
-        res(result)
-      }
-    })
-  })
-
-  p.stdin = proc.stdin
-  p.process = proc
-  return p
-}
-
-const spawnWithShell = (cmd, args, opts, extra) => {
-  let command = opts.shell
-  // if shell is set to true, we use a platform default. we can't let the core
-  // spawn method decide this for us because we need to know what shell is in use
-  // ahead of time so that we can escape arguments properly. we don't need coverage here.
-  if (command === true) {
-    // istanbul ignore next
-    command = process.platform === 'win32' ? process.env.ComSpec : 'sh'
-  }
-
-  const options = { ...opts, shell: false }
-  const realArgs = []
-  let script = cmd
-
-  // first, determine if we're in windows because if we are we need to know if we're
-  // running an .exe or a .cmd/.bat since the latter requires extra escaping
-  const isCmd = /(?:^|\\)cmd(?:\.exe)?$/i.test(command)
-  if (isCmd) {
-    let doubleEscape = false
-
-    // find the actual command we're running
-    let initialCmd = ''
-    let insideQuotes = false
-    for (let i = 0; i < cmd.length; ++i) {
-      const char = cmd.charAt(i)
-      if (char === ' ' && !insideQuotes) {
-        break
-      }
-
-      initialCmd += char
-      if (char === '"' || char === "'") {
-        insideQuotes = !insideQuotes
-      }
-    }
-
-    let pathToInitial
-    try {
-      pathToInitial = which.sync(initialCmd, {
-        path: (options.env && options.env.PATH) || process.env.PATH,
-        pathext: (options.env && options.env.PATHEXT) || process.env.PATHEXT,
-      }).toLowerCase()
-    } catch (err) {
-      pathToInitial = initialCmd.toLowerCase()
-    }
-
-    doubleEscape = pathToInitial.endsWith('.cmd') || pathToInitial.endsWith('.bat')
-    for (const arg of args) {
-      script += ` ${escape.cmd(arg, doubleEscape)}`
-    }
-    realArgs.push('/d', '/s', '/c', script)
-    options.windowsVerbatimArguments = true
-  } else {
-    for (const arg of args) {
-      script += ` ${escape.sh(arg)}`
-    }
-    realArgs.push('-c', script)
-  }
-
-  return promiseSpawn(command, realArgs, options, extra)
-}
-
-// open a file with the default application as defined by the user's OS
-const open = (_args, opts = {}, extra = {}) => {
-  const options = { ...opts, shell: true }
-  const args = [].concat(_args)
-
-  let platform = process.platform
-  // process.platform === 'linux' may actually indicate WSL, if that's the case
-  // we want to treat things as win32 anyway so the host can open the argument
-  if (platform === 'linux' && os.release().toLowerCase().includes('microsoft')) {
-    platform = 'win32'
-  }
-
-  let command = options.command
-  if (!command) {
-    if (platform === 'win32') {
-      // spawnWithShell does not do the additional os.release() check, so we
-      // have to force the shell here to make sure we treat WSL as windows.
-      options.shell = process.env.ComSpec
-      // also, the start command accepts a title so to make sure that we don't
-      // accidentally interpret the first arg as the title, we stick an empty
-      // string immediately after the start command
-      command = 'start ""'
-    } else if (platform === 'darwin') {
-      command = 'open'
-    } else {
-      command = 'xdg-open'
-    }
-  }
-
-  return spawnWithShell(command, args, options, extra)
-}
-promiseSpawn.open = open
-
-const isPipe = (stdio = 'pipe', fd) => {
-  if (stdio === 'pipe' || stdio === null) {
-    return true
-  }
-
-  if (Array.isArray(stdio)) {
-    return isPipe(stdio[fd], fd)
-  }
-
-  return false
-}
-
-const stdioResult = (stdout, stderr, { stdioString = true, stdio }) => {
-  const result = {
-    stdout: null,
-    stderr: null,
-  }
-
-  // stdio is [stdin, stdout, stderr]
-  if (isPipe(stdio, 1)) {
-    result.stdout = Buffer.concat(stdout)
-    if (stdioString) {
-      result.stdout = result.stdout.toString().trim()
-    }
-  }
-
-  if (isPipe(stdio, 2)) {
-    result.stderr = Buffer.concat(stderr)
-    if (stdioString) {
-      result.stderr = result.stderr.toString().trim()
-    }
-  }
-
-  return result
-}
-
-module.exports = promiseSpawn
diff --git a/node_modules/pacote/node_modules/@npmcli/promise-spawn/package.json b/node_modules/pacote/node_modules/@npmcli/promise-spawn/package.json
deleted file mode 100644
index 2080d9f5be9f0..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/promise-spawn/package.json
+++ /dev/null
@@ -1,50 +0,0 @@
-{
-  "name": "@npmcli/promise-spawn",
-  "version": "6.0.2",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "main": "./lib/index.js",
-  "description": "spawn processes the way the npm cli likes to do",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/promise-spawn.git"
-  },
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "lint": "eslint \"**/*.js\"",
-    "lintfix": "npm run lint -- --fix",
-    "posttest": "npm run lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "tap": {
-    "check-coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.11.0",
-    "minipass": "^4.0.0",
-    "spawk": "^1.7.1",
-    "tap": "^16.0.1"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.11.0"
-  },
-  "dependencies": {
-    "which": "^3.0.0"
-  }
-}
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/LICENSE b/node_modules/pacote/node_modules/@npmcli/run-script/LICENSE
deleted file mode 100644
index 19cec97b18468..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/is-server-package.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/is-server-package.js
deleted file mode 100644
index d168623247527..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/is-server-package.js
+++ /dev/null
@@ -1,12 +0,0 @@
-const util = require('util')
-const fs = require('fs')
-const { stat } = fs.promises || { stat: util.promisify(fs.stat) }
-const { resolve } = require('path')
-module.exports = async path => {
-  try {
-    const st = await stat(resolve(path, 'server.js'))
-    return st.isFile()
-  } catch (er) {
-    return false
-  }
-}
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/is-windows.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/is-windows.js
deleted file mode 100644
index 651917e6ad27a..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/is-windows.js
+++ /dev/null
@@ -1,2 +0,0 @@
-const platform = process.env.__FAKE_TESTING_PLATFORM__ || process.platform
-module.exports = platform === 'win32'
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/make-spawn-args.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/make-spawn-args.js
deleted file mode 100644
index 2b2f96a91c8d5..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/make-spawn-args.js
+++ /dev/null
@@ -1,40 +0,0 @@
-/* eslint camelcase: "off" */
-const setPATH = require('./set-path.js')
-const { resolve } = require('path')
-const npm_config_node_gyp = require.resolve('node-gyp/bin/node-gyp.js')
-
-const makeSpawnArgs = options => {
-  const {
-    event,
-    path,
-    scriptShell = true,
-    binPaths,
-    env = {},
-    stdio,
-    cmd,
-    args = [],
-    stdioString,
-  } = options
-
-  const spawnEnv = setPATH(path, binPaths, {
-    // we need to at least save the PATH environment var
-    ...process.env,
-    ...env,
-    npm_package_json: resolve(path, 'package.json'),
-    npm_lifecycle_event: event,
-    npm_lifecycle_script: cmd,
-    npm_config_node_gyp,
-  })
-
-  const spawnOpts = {
-    env: spawnEnv,
-    stdioString,
-    stdio,
-    cwd: path,
-    shell: scriptShell,
-  }
-
-  return [cmd, args, spawnOpts]
-}
-
-module.exports = makeSpawnArgs
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp b/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp
deleted file mode 100755
index 5bec64d961a3a..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/usr/bin/env sh
-node "$npm_config_node_gyp" "$@"
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd b/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd
deleted file mode 100755
index 4c6987ac9868b..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd
+++ /dev/null
@@ -1 +0,0 @@
-@node "%npm_config_node_gyp%" %*
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/package-envs.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/package-envs.js
deleted file mode 100644
index 6b538e50247fd..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/package-envs.js
+++ /dev/null
@@ -1,26 +0,0 @@
-// https://github.com/npm/rfcs/pull/183
-
-const envVal = val => Array.isArray(val) ? val.map(v => envVal(v)).join('\n\n')
-  : val === null || val === false ? ''
-  : String(val)
-
-const packageEnvs = (env, vals, prefix) => {
-  for (const [key, val] of Object.entries(vals)) {
-    if (val === undefined) {
-      continue
-    } else if (val && !Array.isArray(val) && typeof val === 'object') {
-      packageEnvs(env, val, `${prefix}${key}_`)
-    } else {
-      env[`${prefix}${key}`] = envVal(val)
-    }
-  }
-  return env
-}
-
-module.exports = (env, pkg) => packageEnvs({ ...env }, {
-  name: pkg.name,
-  version: pkg.version,
-  config: pkg.config,
-  engines: pkg.engines,
-  bin: pkg.bin,
-}, 'npm_package_')
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script-pkg.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script-pkg.js
deleted file mode 100644
index a5518285d1af1..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script-pkg.js
+++ /dev/null
@@ -1,112 +0,0 @@
-const makeSpawnArgs = require('./make-spawn-args.js')
-const promiseSpawn = require('@npmcli/promise-spawn')
-const packageEnvs = require('./package-envs.js')
-const { isNodeGypPackage, defaultGypInstallScript } = require('@npmcli/node-gyp')
-const signalManager = require('./signal-manager.js')
-const isServerPackage = require('./is-server-package.js')
-
-// you wouldn't like me when I'm angry...
-const bruce = (id, event, cmd, args) => {
-  let banner = id
-    ? `\n> ${id} ${event}\n`
-    : `\n> ${event}\n`
-  banner += `> ${cmd.trim().replace(/\n/g, '\n> ')}`
-  if (args.length) {
-    banner += ` ${args.join(' ')}`
-  }
-  banner += '\n'
-  return banner
-}
-
-const runScriptPkg = async options => {
-  const {
-    event,
-    path,
-    scriptShell,
-    binPaths = false,
-    env = {},
-    stdio = 'pipe',
-    pkg,
-    args = [],
-    stdioString,
-    // note: only used when stdio:inherit
-    banner = true,
-    // how long to wait for a process.kill signal
-    // only exposed here so that we can make the test go a bit faster.
-    signalTimeout = 500,
-  } = options
-
-  const { scripts = {}, gypfile } = pkg
-  let cmd = null
-  if (options.cmd) {
-    cmd = options.cmd
-  } else if (pkg.scripts && pkg.scripts[event]) {
-    cmd = pkg.scripts[event]
-  } else if (
-    // If there is no preinstall or install script, default to rebuilding node-gyp packages.
-    event === 'install' &&
-    !scripts.install &&
-    !scripts.preinstall &&
-    gypfile !== false &&
-    await isNodeGypPackage(path)
-  ) {
-    cmd = defaultGypInstallScript
-  } else if (event === 'start' && await isServerPackage(path)) {
-    cmd = 'node server.js'
-  }
-
-  if (!cmd) {
-    return { code: 0, signal: null }
-  }
-
-  if (stdio === 'inherit' && banner !== false) {
-    // we're dumping to the parent's stdout, so print the banner
-    console.log(bruce(pkg._id, event, cmd, args))
-  }
-
-  const [spawnShell, spawnArgs, spawnOpts] = makeSpawnArgs({
-    event,
-    path,
-    scriptShell,
-    binPaths,
-    env: packageEnvs(env, pkg),
-    stdio,
-    cmd,
-    args,
-    stdioString,
-  })
-
-  const p = promiseSpawn(spawnShell, spawnArgs, spawnOpts, {
-    event,
-    script: cmd,
-    pkgid: pkg._id,
-    path,
-  })
-
-  if (stdio === 'inherit') {
-    signalManager.add(p.process)
-  }
-
-  if (p.stdin) {
-    p.stdin.end()
-  }
-
-  return p.catch(er => {
-    const { signal } = er
-    if (stdio === 'inherit' && signal) {
-      // by the time we reach here, the child has already exited. we send the
-      // signal back to ourselves again so that npm will exit with the same
-      // status as the child
-      process.kill(process.pid, signal)
-
-      // just in case we don't die, reject after 500ms
-      // this also keeps the node process open long enough to actually
-      // get the signal, rather than terminating gracefully.
-      return new Promise((res, rej) => setTimeout(() => rej(er), signalTimeout))
-    } else {
-      throw er
-    }
-  })
-}
-
-module.exports = runScriptPkg
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script.js
deleted file mode 100644
index e9d18261a2c1f..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script.js
+++ /dev/null
@@ -1,14 +0,0 @@
-const rpj = require('read-package-json-fast')
-const runScriptPkg = require('./run-script-pkg.js')
-const validateOptions = require('./validate-options.js')
-const isServerPackage = require('./is-server-package.js')
-
-const runScript = options => {
-  validateOptions(options)
-  const { pkg, path } = options
-  return pkg ? runScriptPkg(options)
-    : rpj(path + '/package.json')
-      .then(readPackage => runScriptPkg({ ...options, pkg: readPackage }))
-}
-
-module.exports = Object.assign(runScript, { isServerPackage })
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/set-path.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/set-path.js
deleted file mode 100644
index c59c270d9969a..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/set-path.js
+++ /dev/null
@@ -1,45 +0,0 @@
-const { resolve, dirname, delimiter } = require('path')
-// the path here is relative, even though it does not need to be
-// in order to make the posix tests pass in windows
-const nodeGypPath = resolve(__dirname, '../lib/node-gyp-bin')
-
-// Windows typically calls its PATH environ 'Path', but this is not
-// guaranteed, nor is it guaranteed to be the only one.  Merge them
-// all together in the order they appear in the object.
-const setPATH = (projectPath, binPaths, env) => {
-  const PATH = Object.keys(env).filter(p => /^path$/i.test(p) && env[p])
-    .map(p => env[p].split(delimiter))
-    .reduce((set, p) => set.concat(p.filter(concatted => !set.includes(concatted))), [])
-    .join(delimiter)
-
-  const pathArr = []
-  if (binPaths) {
-    pathArr.push(...binPaths)
-  }
-  // unshift the ./node_modules/.bin from every folder
-  // walk up until dirname() does nothing, at the root
-  // XXX we should specify a cwd that we don't go above
-  let p = projectPath
-  let pp
-  do {
-    pathArr.push(resolve(p, 'node_modules', '.bin'))
-    pp = p
-    p = dirname(p)
-  } while (p !== pp)
-  pathArr.push(nodeGypPath, PATH)
-
-  const pathVal = pathArr.join(delimiter)
-
-  // XXX include the node-gyp-bin path somehow?  Probably better for
-  // npm or arborist or whoever to just provide that by putting it in
-  // the PATH environ, since that's preserved anyway.
-  for (const key of Object.keys(env)) {
-    if (/^path$/i.test(key)) {
-      env[key] = pathVal
-    }
-  }
-
-  return env
-}
-
-module.exports = setPATH
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/signal-manager.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/signal-manager.js
deleted file mode 100644
index efc00b488063f..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/signal-manager.js
+++ /dev/null
@@ -1,49 +0,0 @@
-const runningProcs = new Set()
-let handlersInstalled = false
-
-// NOTE: these signals aren't actually forwarded anywhere. they're trapped and
-// ignored until all child processes have exited. in our next breaking change
-// we should rename this
-const forwardedSignals = [
-  'SIGINT',
-  'SIGTERM',
-]
-
-// no-op, this is so receiving the signal doesn't cause us to exit immediately
-// instead, we exit after all children have exited when we re-send the signal
-// to ourselves. see the catch handler at the bottom of run-script-pkg.js
-// istanbul ignore next - this function does nothing
-const handleSignal = () => {}
-const setupListeners = () => {
-  for (const signal of forwardedSignals) {
-    process.on(signal, handleSignal)
-  }
-  handlersInstalled = true
-}
-
-const cleanupListeners = () => {
-  if (runningProcs.size === 0) {
-    for (const signal of forwardedSignals) {
-      process.removeListener(signal, handleSignal)
-    }
-    handlersInstalled = false
-  }
-}
-
-const add = proc => {
-  runningProcs.add(proc)
-  if (!handlersInstalled) {
-    setupListeners()
-  }
-
-  proc.once('exit', () => {
-    runningProcs.delete(proc)
-    cleanupListeners()
-  })
-}
-
-module.exports = {
-  add,
-  handleSignal,
-  forwardedSignals,
-}
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/validate-options.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/validate-options.js
deleted file mode 100644
index 8d855916ecd15..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/validate-options.js
+++ /dev/null
@@ -1,39 +0,0 @@
-const validateOptions = options => {
-  if (typeof options !== 'object' || !options) {
-    throw new TypeError('invalid options object provided to runScript')
-  }
-
-  const {
-    event,
-    path,
-    scriptShell,
-    env = {},
-    stdio = 'pipe',
-    args = [],
-    cmd,
-  } = options
-
-  if (!event || typeof event !== 'string') {
-    throw new TypeError('valid event not provided to runScript')
-  }
-  if (!path || typeof path !== 'string') {
-    throw new TypeError('valid path not provided to runScript')
-  }
-  if (scriptShell !== undefined && typeof scriptShell !== 'string') {
-    throw new TypeError('invalid scriptShell option provided to runScript')
-  }
-  if (typeof env !== 'object' || !env) {
-    throw new TypeError('invalid env option provided to runScript')
-  }
-  if (typeof stdio !== 'string' && !Array.isArray(stdio)) {
-    throw new TypeError('invalid stdio option provided to runScript')
-  }
-  if (!Array.isArray(args) || args.some(a => typeof a !== 'string')) {
-    throw new TypeError('invalid args option provided to runScript')
-  }
-  if (cmd !== undefined && typeof cmd !== 'string') {
-    throw new TypeError('invalid cmd option provided to runScript')
-  }
-}
-
-module.exports = validateOptions
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/package.json b/node_modules/pacote/node_modules/@npmcli/run-script/package.json
deleted file mode 100644
index 38f6f72fa6ad9..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/package.json
+++ /dev/null
@@ -1,53 +0,0 @@
-{
-  "name": "@npmcli/run-script",
-  "version": "6.0.2",
-  "description": "Run a lifecycle script for a package (descendant of npm-lifecycle)",
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "scripts": {
-    "test": "tap",
-    "eslint": "eslint",
-    "lint": "eslint \"**/*.js\"",
-    "lintfix": "npm run lint -- --fix",
-    "postlint": "template-oss-check",
-    "snap": "tap",
-    "posttest": "npm run lint",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.15.1",
-    "require-inject": "^1.4.4",
-    "tap": "^16.0.1"
-  },
-  "dependencies": {
-    "@npmcli/node-gyp": "^3.0.0",
-    "@npmcli/promise-spawn": "^6.0.0",
-    "node-gyp": "^9.0.0",
-    "read-package-json-fast": "^3.0.0",
-    "which": "^3.0.0"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "main": "lib/run-script.js",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/run-script.git"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.15.1",
-    "publish": "true"
-  },
-  "tap": {
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  }
-}
diff --git a/node_modules/pacote/node_modules/which/LICENSE b/node_modules/pacote/node_modules/which/LICENSE
deleted file mode 100644
index 19129e315fe59..0000000000000
--- a/node_modules/pacote/node_modules/which/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/which/bin/which.js b/node_modules/pacote/node_modules/which/bin/which.js
deleted file mode 100755
index 6df16f21acf93..0000000000000
--- a/node_modules/pacote/node_modules/which/bin/which.js
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env node
-
-const which = require('../lib')
-const argv = process.argv.slice(2)
-
-const usage = (err) => {
-  if (err) {
-    console.error(`which: ${err}`)
-  }
-  console.error('usage: which [-as] program ...')
-  process.exit(1)
-}
-
-if (!argv.length) {
-  return usage()
-}
-
-let dashdash = false
-const [commands, flags] = argv.reduce((acc, arg) => {
-  if (dashdash || arg === '--') {
-    dashdash = true
-    return acc
-  }
-
-  if (!/^-/.test(arg)) {
-    acc[0].push(arg)
-    return acc
-  }
-
-  for (const flag of arg.slice(1).split('')) {
-    if (flag === 's') {
-      acc[1].silent = true
-    } else if (flag === 'a') {
-      acc[1].all = true
-    } else {
-      usage(`illegal option -- ${flag}`)
-    }
-  }
-
-  return acc
-}, [[], {}])
-
-for (const command of commands) {
-  try {
-    const res = which.sync(command, { all: flags.all })
-    if (!flags.silent) {
-      console.log([].concat(res).join('\n'))
-    }
-  } catch (err) {
-    process.exitCode = 1
-  }
-}
diff --git a/node_modules/pacote/node_modules/which/lib/index.js b/node_modules/pacote/node_modules/which/lib/index.js
deleted file mode 100644
index 52e9ea62377e7..0000000000000
--- a/node_modules/pacote/node_modules/which/lib/index.js
+++ /dev/null
@@ -1,115 +0,0 @@
-const isexe = require('isexe')
-const { join, delimiter, sep, posix } = require('path')
-
-const isWindows = process.platform === 'win32'
-
-// used to check for slashed in commands passed in. always checks for the posix
-// seperator on all platforms, and checks for the current separator when not on
-// a posix platform. don't use the isWindows check for this since that is mocked
-// in tests but we still need the code to actually work when called. that is also
-// why it is ignored from coverage.
-/* istanbul ignore next */
-const rSlash = new RegExp(`[${posix.sep}${sep === posix.sep ? '' : sep}]`.replace(/(\\)/g, '\\$1'))
-const rRel = new RegExp(`^\\.${rSlash.source}`)
-
-const getNotFoundError = (cmd) =>
-  Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' })
-
-const getPathInfo = (cmd, {
-  path: optPath = process.env.PATH,
-  pathExt: optPathExt = process.env.PATHEXT,
-  delimiter: optDelimiter = delimiter,
-}) => {
-  // If it has a slash, then we don't bother searching the pathenv.
-  // just check the file itself, and that's it.
-  const pathEnv = cmd.match(rSlash) ? [''] : [
-    // windows always checks the cwd first
-    ...(isWindows ? [process.cwd()] : []),
-    ...(optPath || /* istanbul ignore next: very unusual */ '').split(optDelimiter),
-  ]
-
-  if (isWindows) {
-    const pathExtExe = optPathExt ||
-      ['.EXE', '.CMD', '.BAT', '.COM'].join(optDelimiter)
-    const pathExt = pathExtExe.split(optDelimiter).reduce((acc, item) => {
-      acc.push(item)
-      acc.push(item.toLowerCase())
-      return acc
-    }, [])
-    if (cmd.includes('.') && pathExt[0] !== '') {
-      pathExt.unshift('')
-    }
-    return { pathEnv, pathExt, pathExtExe }
-  }
-
-  return { pathEnv, pathExt: [''] }
-}
-
-const getPathPart = (raw, cmd) => {
-  const pathPart = /^".*"$/.test(raw) ? raw.slice(1, -1) : raw
-  const prefix = !pathPart && rRel.test(cmd) ? cmd.slice(0, 2) : ''
-  return prefix + join(pathPart, cmd)
-}
-
-const which = async (cmd, opt = {}) => {
-  const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt)
-  const found = []
-
-  for (const envPart of pathEnv) {
-    const p = getPathPart(envPart, cmd)
-
-    for (const ext of pathExt) {
-      const withExt = p + ext
-      const is = await isexe(withExt, { pathExt: pathExtExe, ignoreErrors: true })
-      if (is) {
-        if (!opt.all) {
-          return withExt
-        }
-        found.push(withExt)
-      }
-    }
-  }
-
-  if (opt.all && found.length) {
-    return found
-  }
-
-  if (opt.nothrow) {
-    return null
-  }
-
-  throw getNotFoundError(cmd)
-}
-
-const whichSync = (cmd, opt = {}) => {
-  const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt)
-  const found = []
-
-  for (const pathEnvPart of pathEnv) {
-    const p = getPathPart(pathEnvPart, cmd)
-
-    for (const ext of pathExt) {
-      const withExt = p + ext
-      const is = isexe.sync(withExt, { pathExt: pathExtExe, ignoreErrors: true })
-      if (is) {
-        if (!opt.all) {
-          return withExt
-        }
-        found.push(withExt)
-      }
-    }
-  }
-
-  if (opt.all && found.length) {
-    return found
-  }
-
-  if (opt.nothrow) {
-    return null
-  }
-
-  throw getNotFoundError(cmd)
-}
-
-module.exports = which
-which.sync = whichSync
diff --git a/node_modules/pacote/node_modules/which/package.json b/node_modules/pacote/node_modules/which/package.json
deleted file mode 100644
index 989e01c9a3683..0000000000000
--- a/node_modules/pacote/node_modules/which/package.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
-  "author": "GitHub Inc.",
-  "name": "which",
-  "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.",
-  "version": "3.0.1",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/node-which.git"
-  },
-  "main": "lib/index.js",
-  "bin": {
-    "node-which": "./bin/which.js"
-  },
-  "license": "ISC",
-  "dependencies": {
-    "isexe": "^2.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.14.1",
-    "tap": "^16.3.0"
-  },
-  "scripts": {
-    "test": "tap",
-    "lint": "eslint \"**/*.js\"",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force",
-    "lintfix": "npm run lint -- --fix",
-    "snap": "tap",
-    "posttest": "npm run lint"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "tap": {
-    "check-coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.14.1",
-    "publish": "true"
-  }
-}
diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json
index 44236542285c8..4654b03d988c3 100644
--- a/node_modules/pacote/package.json
+++ b/node_modules/pacote/package.json
@@ -1,6 +1,6 @@
 {
   "name": "pacote",
-  "version": "17.0.3",
+  "version": "17.0.4",
   "description": "JavaScript package downloader",
   "author": "GitHub Inc.",
   "bin": {
@@ -46,8 +46,8 @@
   "dependencies": {
     "@npmcli/git": "^5.0.0",
     "@npmcli/installed-package-contents": "^2.0.1",
-    "@npmcli/promise-spawn": "^6.0.1",
-    "@npmcli/run-script": "^6.0.0",
+    "@npmcli/promise-spawn": "^7.0.0",
+    "@npmcli/run-script": "^7.0.0",
     "cacache": "^18.0.0",
     "fs-minipass": "^3.0.0",
     "minipass": "^7.0.2",
diff --git a/package-lock.json b/package-lock.json
index fd926e1d2637c..2f71fd3ecdaf9 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -139,7 +139,7 @@
         "npm-user-validate": "^2.0.0",
         "npmlog": "^7.0.1",
         "p-map": "^4.0.0",
-        "pacote": "^17.0.3",
+        "pacote": "^17.0.4",
         "parse-conflict-json": "^3.0.1",
         "proc-log": "^3.0.0",
         "qrcode-terminal": "^0.12.0",
@@ -230,7 +230,7 @@
         "json-stringify-safe": "^5.0.1",
         "nock": "^13.3.3",
         "npm-package-arg": "^11.0.0",
-        "pacote": "^17.0.3",
+        "pacote": "^17.0.4",
         "tap": "^16.3.8"
       },
       "engines": {
@@ -10529,15 +10529,15 @@
       }
     },
     "node_modules/pacote": {
-      "version": "17.0.3",
-      "resolved": "https://registry.npmjs.org/pacote/-/pacote-17.0.3.tgz",
-      "integrity": "sha512-nT66y5NK2u/d7qV9lP6ye+powAufDl6OHT+aOZ4Cmtq89GSqgB05Ar6aQ7DM+0+bIE5NCdYUcqFlkK4m/0LVHA==",
+      "version": "17.0.4",
+      "resolved": "https://registry.npmjs.org/pacote/-/pacote-17.0.4.tgz",
+      "integrity": "sha512-eGdLHrV/g5b5MtD5cTPyss+JxOlaOloSMG3UwPMAvL8ywaLJ6beONPF40K4KKl/UI6q5hTKCJq5rCu8tkF+7Dg==",
       "inBundle": true,
       "dependencies": {
         "@npmcli/git": "^5.0.0",
         "@npmcli/installed-package-contents": "^2.0.1",
-        "@npmcli/promise-spawn": "^6.0.1",
-        "@npmcli/run-script": "^6.0.0",
+        "@npmcli/promise-spawn": "^7.0.0",
+        "@npmcli/run-script": "^7.0.0",
         "cacache": "^18.0.0",
         "fs-minipass": "^3.0.0",
         "minipass": "^7.0.2",
@@ -10560,49 +10560,6 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/pacote/node_modules/@npmcli/promise-spawn": {
-      "version": "6.0.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-6.0.2.tgz",
-      "integrity": "sha512-gGq0NJkIGSwdbUt4yhdF8ZrmkGKVz9vAdVzpOfnom+V8PLSmSOVhZwbNvZZS1EYcJN5hzzKBxmmVVAInM6HQLg==",
-      "inBundle": true,
-      "dependencies": {
-        "which": "^3.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/pacote/node_modules/@npmcli/run-script": {
-      "version": "6.0.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-6.0.2.tgz",
-      "integrity": "sha512-NCcr1uQo1k5U+SYlnIrbAh3cxy+OQT1VtqiAbxdymSlptbzBb62AjH2xXgjNCoP073hoa1CfCAcwoZ8k96C4nA==",
-      "inBundle": true,
-      "dependencies": {
-        "@npmcli/node-gyp": "^3.0.0",
-        "@npmcli/promise-spawn": "^6.0.0",
-        "node-gyp": "^9.0.0",
-        "read-package-json-fast": "^3.0.0",
-        "which": "^3.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/pacote/node_modules/which": {
-      "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz",
-      "integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==",
-      "inBundle": true,
-      "dependencies": {
-        "isexe": "^2.0.0"
-      },
-      "bin": {
-        "node-which": "bin/which.js"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
     "node_modules/parent-module": {
       "version": "1.0.1",
       "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
@@ -16054,7 +16011,7 @@
         "npm-pick-manifest": "^9.0.0",
         "npm-registry-fetch": "^16.0.0",
         "npmlog": "^7.0.1",
-        "pacote": "^17.0.3",
+        "pacote": "^17.0.4",
         "parse-conflict-json": "^3.0.0",
         "proc-log": "^3.0.0",
         "promise-all-reject-late": "^1.0.0",
@@ -16135,7 +16092,7 @@
         "diff": "^5.1.0",
         "minimatch": "^9.0.0",
         "npm-package-arg": "^11.0.0",
-        "pacote": "^17.0.3",
+        "pacote": "^17.0.4",
         "tar": "^6.1.13"
       },
       "devDependencies": {
@@ -16156,7 +16113,7 @@
         "ci-info": "^3.7.1",
         "npm-package-arg": "^11.0.0",
         "npmlog": "^7.0.1",
-        "pacote": "^17.0.3",
+        "pacote": "^17.0.4",
         "proc-log": "^3.0.0",
         "read": "^2.0.0",
         "read-package-json-fast": "^3.0.2",
@@ -16234,7 +16191,7 @@
         "@npmcli/arborist": "^6.3.0",
         "@npmcli/run-script": "^7.0.0",
         "npm-package-arg": "^11.0.0",
-        "pacote": "^17.0.3"
+        "pacote": "^17.0.4"
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^4.0.0",
diff --git a/package.json b/package.json
index 1e47c14a69e5a..677722dcc0b30 100644
--- a/package.json
+++ b/package.json
@@ -104,7 +104,7 @@
     "npm-user-validate": "^2.0.0",
     "npmlog": "^7.0.1",
     "p-map": "^4.0.0",
-    "pacote": "^17.0.3",
+    "pacote": "^17.0.4",
     "parse-conflict-json": "^3.0.1",
     "proc-log": "^3.0.0",
     "qrcode-terminal": "^0.12.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 3535fea3c2d12..aa8409eee0cf7 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -26,7 +26,7 @@
     "npm-pick-manifest": "^9.0.0",
     "npm-registry-fetch": "^16.0.0",
     "npmlog": "^7.0.1",
-    "pacote": "^17.0.3",
+    "pacote": "^17.0.4",
     "parse-conflict-json": "^3.0.0",
     "proc-log": "^3.0.0",
     "promise-all-reject-late": "^1.0.0",
diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json
index d2fe63d07219f..aa621ae418c52 100644
--- a/workspaces/libnpmdiff/package.json
+++ b/workspaces/libnpmdiff/package.json
@@ -53,7 +53,7 @@
     "diff": "^5.1.0",
     "minimatch": "^9.0.0",
     "npm-package-arg": "^11.0.0",
-    "pacote": "^17.0.3",
+    "pacote": "^17.0.4",
     "tar": "^6.1.13"
   },
   "templateOSS": {
diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json
index 2ca73add6dbc5..c6f638aba6553 100644
--- a/workspaces/libnpmexec/package.json
+++ b/workspaces/libnpmexec/package.json
@@ -64,7 +64,7 @@
     "ci-info": "^3.7.1",
     "npm-package-arg": "^11.0.0",
     "npmlog": "^7.0.1",
-    "pacote": "^17.0.3",
+    "pacote": "^17.0.4",
     "proc-log": "^3.0.0",
     "read": "^2.0.0",
     "read-package-json-fast": "^3.0.2",
diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json
index aef88b5da7d6d..4294794519fb2 100644
--- a/workspaces/libnpmpack/package.json
+++ b/workspaces/libnpmpack/package.json
@@ -39,7 +39,7 @@
     "@npmcli/arborist": "^6.3.0",
     "@npmcli/run-script": "^7.0.0",
     "npm-package-arg": "^11.0.0",
-    "pacote": "^17.0.3"
+    "pacote": "^17.0.4"
   },
   "engines": {
     "node": "^16.14.0 || >=18.0.0"

From 520b7a01cb5d2b79a44bc321a9023c56aa05e6c9 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Wed, 30 Aug 2023 11:08:28 -0700
Subject: [PATCH 65/68] deps: @npmcli/git@5.0.2

---
 node_modules/@npmcli/git/package.json |  4 ++--
 package-lock.json                     | 12 ++++++------
 package.json                          |  2 +-
 workspaces/libnpmversion/package.json |  2 +-
 4 files changed, 10 insertions(+), 10 deletions(-)

diff --git a/node_modules/@npmcli/git/package.json b/node_modules/@npmcli/git/package.json
index 3852c358ae4ef..4a25e53214843 100644
--- a/node_modules/@npmcli/git/package.json
+++ b/node_modules/@npmcli/git/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@npmcli/git",
-  "version": "5.0.1",
+  "version": "5.0.2",
   "main": "lib/index.js",
   "files": [
     "bin/",
@@ -44,7 +44,7 @@
     "promise-inflight": "^1.0.1",
     "promise-retry": "^2.0.1",
     "semver": "^7.3.5",
-    "which": "^3.0.0"
+    "which": "^4.0.0"
   },
   "engines": {
     "node": "^16.14.0 || >=18.0.0"
diff --git a/package-lock.json b/package-lock.json
index 2f71fd3ecdaf9..da5d85219ee16 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -162,7 +162,7 @@
       "devDependencies": {
         "@npmcli/docs": "^1.0.0",
         "@npmcli/eslint-config": "^4.0.2",
-        "@npmcli/git": "^5.0.1",
+        "@npmcli/git": "^5.0.2",
         "@npmcli/mock-globals": "^1.0.0",
         "@npmcli/mock-registry": "^1.0.0",
         "@npmcli/template-oss": "4.18.0",
@@ -2452,9 +2452,9 @@
       }
     },
     "node_modules/@npmcli/git": {
-      "version": "5.0.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.1.tgz",
-      "integrity": "sha512-9zUEqmRMZU5bmqWVu83wFVHH9kwLEQeMuDUDSYsBK/L4qbBl8Shdoc5EWfANzAdy5kFuPbBn7ToXTakbVdlCZg==",
+      "version": "5.0.2",
+      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.2.tgz",
+      "integrity": "sha512-IKTmfAlPoDtlLk5Bpm4bqF8mrkNRkR1KXHIWOQlykXcwoV7ZacO44PPUMHB+aFOF0/dsSee+60NfGEItI4YDiw==",
       "inBundle": true,
       "dependencies": {
         "@npmcli/promise-spawn": "^6.0.0",
@@ -2464,7 +2464,7 @@
         "promise-inflight": "^1.0.1",
         "promise-retry": "^2.0.1",
         "semver": "^7.3.5",
-        "which": "^3.0.0"
+        "which": "^4.0.0"
       },
       "engines": {
         "node": "^16.14.0 || >=18.0.0"
@@ -16266,7 +16266,7 @@
       "version": "4.0.2",
       "license": "ISC",
       "dependencies": {
-        "@npmcli/git": "^5.0.1",
+        "@npmcli/git": "^5.0.2",
         "@npmcli/run-script": "^7.0.0",
         "json-parse-even-better-errors": "^3.0.0",
         "proc-log": "^3.0.0",
diff --git a/package.json b/package.json
index 677722dcc0b30..40dc8a8cf405b 100644
--- a/package.json
+++ b/package.json
@@ -193,7 +193,7 @@
   "devDependencies": {
     "@npmcli/docs": "^1.0.0",
     "@npmcli/eslint-config": "^4.0.2",
-    "@npmcli/git": "^5.0.1",
+    "@npmcli/git": "^5.0.2",
     "@npmcli/mock-globals": "^1.0.0",
     "@npmcli/mock-registry": "^1.0.0",
     "@npmcli/template-oss": "4.18.0",
diff --git a/workspaces/libnpmversion/package.json b/workspaces/libnpmversion/package.json
index 57723c4b41b26..8c49988dcf63d 100644
--- a/workspaces/libnpmversion/package.json
+++ b/workspaces/libnpmversion/package.json
@@ -37,7 +37,7 @@
     "tap": "^16.3.8"
   },
   "dependencies": {
-    "@npmcli/git": "^5.0.1",
+    "@npmcli/git": "^5.0.2",
     "@npmcli/run-script": "^7.0.0",
     "json-parse-even-better-errors": "^3.0.0",
     "proc-log": "^3.0.0",

From 1394a212e9e9536aaf0aba9b8f7b7e300f22d912 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Wed, 30 Aug 2023 11:09:00 -0700
Subject: [PATCH 66/68] deps: @npmcli/run-script@7.0.1

---
 node_modules/.gitignore                       |   6 -
 .../@npmcli/promise-spawn/LICENSE             |  15 --
 .../@npmcli/promise-spawn/lib/escape.js       |  68 ------
 .../@npmcli/promise-spawn/lib/index.js        | 195 ------------------
 .../@npmcli/promise-spawn/package.json        |  50 -----
 .../run-script/node_modules/which/LICENSE     |  15 --
 .../node_modules/which/bin/which.js           |  52 -----
 .../node_modules/which/lib/index.js           | 115 -----------
 .../node_modules/which/package.json           |  51 -----
 node_modules/@npmcli/run-script/package.json  |   4 +-
 package-lock.json                             |  45 +---
 package.json                                  |   2 +-
 workspaces/arborist/package.json              |   2 +-
 workspaces/libnpmexec/package.json            |   2 +-
 workspaces/libnpmpack/package.json            |   2 +-
 workspaces/libnpmversion/package.json         |   2 +-
 16 files changed, 16 insertions(+), 610 deletions(-)
 delete mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/LICENSE
 delete mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/lib/escape.js
 delete mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/lib/index.js
 delete mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/package.json
 delete mode 100644 node_modules/@npmcli/run-script/node_modules/which/LICENSE
 delete mode 100755 node_modules/@npmcli/run-script/node_modules/which/bin/which.js
 delete mode 100644 node_modules/@npmcli/run-script/node_modules/which/lib/index.js
 delete mode 100644 node_modules/@npmcli/run-script/node_modules/which/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 93e03b09bef97..e626abc08a518 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -43,12 +43,6 @@
 !/@npmcli/promise-spawn
 !/@npmcli/query
 !/@npmcli/run-script
-!/@npmcli/run-script/node_modules/
-/@npmcli/run-script/node_modules/*
-!/@npmcli/run-script/node_modules/@npmcli/
-/@npmcli/run-script/node_modules/@npmcli/*
-!/@npmcli/run-script/node_modules/@npmcli/promise-spawn
-!/@npmcli/run-script/node_modules/which
 !/@pkgjs/
 /@pkgjs/*
 !/@pkgjs/parseargs
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/LICENSE b/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/LICENSE
deleted file mode 100644
index 8f90f96f4c6c5..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
-OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
-DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
-ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
-SOFTWARE.
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/lib/escape.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/lib/escape.js
deleted file mode 100644
index 9aca8bde70a6e..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/lib/escape.js
+++ /dev/null
@@ -1,68 +0,0 @@
-'use strict'
-
-// eslint-disable-next-line max-len
-// this code adapted from: https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/
-const cmd = (input, doubleEscape) => {
-  if (!input.length) {
-    return '""'
-  }
-
-  let result
-  if (!/[ \t\n\v"]/.test(input)) {
-    result = input
-  } else {
-    result = '"'
-    for (let i = 0; i <= input.length; ++i) {
-      let slashCount = 0
-      while (input[i] === '\\') {
-        ++i
-        ++slashCount
-      }
-
-      if (i === input.length) {
-        result += '\\'.repeat(slashCount * 2)
-        break
-      }
-
-      if (input[i] === '"') {
-        result += '\\'.repeat(slashCount * 2 + 1)
-        result += input[i]
-      } else {
-        result += '\\'.repeat(slashCount)
-        result += input[i]
-      }
-    }
-    result += '"'
-  }
-
-  // and finally, prefix shell meta chars with a ^
-  result = result.replace(/[ !%^&()<>|"]/g, '^$&')
-  if (doubleEscape) {
-    result = result.replace(/[ !%^&()<>|"]/g, '^$&')
-  }
-
-  return result
-}
-
-const sh = (input) => {
-  if (!input.length) {
-    return `''`
-  }
-
-  if (!/[\t\n\r "#$&'()*;<>?\\`|~]/.test(input)) {
-    return input
-  }
-
-  // replace single quotes with '\'' and wrap the whole result in a fresh set of quotes
-  const result = `'${input.replace(/'/g, `'\\''`)}'`
-    // if the input string already had single quotes around it, clean those up
-    .replace(/^(?:'')+(?!$)/, '')
-    .replace(/\\'''/g, `\\'`)
-
-  return result
-}
-
-module.exports = {
-  cmd,
-  sh,
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/lib/index.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/lib/index.js
deleted file mode 100644
index 571ff6b9169c9..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/lib/index.js
+++ /dev/null
@@ -1,195 +0,0 @@
-'use strict'
-
-const { spawn } = require('child_process')
-const os = require('os')
-const which = require('which')
-
-const escape = require('./escape.js')
-
-// 'extra' object is for decorating the error a bit more
-const promiseSpawn = (cmd, args, opts = {}, extra = {}) => {
-  if (opts.shell) {
-    return spawnWithShell(cmd, args, opts, extra)
-  }
-
-  let proc
-
-  const p = new Promise((res, rej) => {
-    proc = spawn(cmd, args, opts)
-
-    const stdout = []
-    const stderr = []
-
-    const reject = er => rej(Object.assign(er, {
-      cmd,
-      args,
-      ...stdioResult(stdout, stderr, opts),
-      ...extra,
-    }))
-
-    proc.on('error', reject)
-
-    if (proc.stdout) {
-      proc.stdout.on('data', c => stdout.push(c)).on('error', reject)
-      proc.stdout.on('error', er => reject(er))
-    }
-
-    if (proc.stderr) {
-      proc.stderr.on('data', c => stderr.push(c)).on('error', reject)
-      proc.stderr.on('error', er => reject(er))
-    }
-
-    proc.on('close', (code, signal) => {
-      const result = {
-        cmd,
-        args,
-        code,
-        signal,
-        ...stdioResult(stdout, stderr, opts),
-        ...extra,
-      }
-
-      if (code || signal) {
-        rej(Object.assign(new Error('command failed'), result))
-      } else {
-        res(result)
-      }
-    })
-  })
-
-  p.stdin = proc.stdin
-  p.process = proc
-  return p
-}
-
-const spawnWithShell = (cmd, args, opts, extra) => {
-  let command = opts.shell
-  // if shell is set to true, we use a platform default. we can't let the core
-  // spawn method decide this for us because we need to know what shell is in use
-  // ahead of time so that we can escape arguments properly. we don't need coverage here.
-  if (command === true) {
-    // istanbul ignore next
-    command = process.platform === 'win32' ? process.env.ComSpec : 'sh'
-  }
-
-  const options = { ...opts, shell: false }
-  const realArgs = []
-  let script = cmd
-
-  // first, determine if we're in windows because if we are we need to know if we're
-  // running an .exe or a .cmd/.bat since the latter requires extra escaping
-  const isCmd = /(?:^|\\)cmd(?:\.exe)?$/i.test(command)
-  if (isCmd) {
-    let doubleEscape = false
-
-    // find the actual command we're running
-    let initialCmd = ''
-    let insideQuotes = false
-    for (let i = 0; i < cmd.length; ++i) {
-      const char = cmd.charAt(i)
-      if (char === ' ' && !insideQuotes) {
-        break
-      }
-
-      initialCmd += char
-      if (char === '"' || char === "'") {
-        insideQuotes = !insideQuotes
-      }
-    }
-
-    let pathToInitial
-    try {
-      pathToInitial = which.sync(initialCmd, {
-        path: (options.env && options.env.PATH) || process.env.PATH,
-        pathext: (options.env && options.env.PATHEXT) || process.env.PATHEXT,
-      }).toLowerCase()
-    } catch (err) {
-      pathToInitial = initialCmd.toLowerCase()
-    }
-
-    doubleEscape = pathToInitial.endsWith('.cmd') || pathToInitial.endsWith('.bat')
-    for (const arg of args) {
-      script += ` ${escape.cmd(arg, doubleEscape)}`
-    }
-    realArgs.push('/d', '/s', '/c', script)
-    options.windowsVerbatimArguments = true
-  } else {
-    for (const arg of args) {
-      script += ` ${escape.sh(arg)}`
-    }
-    realArgs.push('-c', script)
-  }
-
-  return promiseSpawn(command, realArgs, options, extra)
-}
-
-// open a file with the default application as defined by the user's OS
-const open = (_args, opts = {}, extra = {}) => {
-  const options = { ...opts, shell: true }
-  const args = [].concat(_args)
-
-  let platform = process.platform
-  // process.platform === 'linux' may actually indicate WSL, if that's the case
-  // we want to treat things as win32 anyway so the host can open the argument
-  if (platform === 'linux' && os.release().toLowerCase().includes('microsoft')) {
-    platform = 'win32'
-  }
-
-  let command = options.command
-  if (!command) {
-    if (platform === 'win32') {
-      // spawnWithShell does not do the additional os.release() check, so we
-      // have to force the shell here to make sure we treat WSL as windows.
-      options.shell = process.env.ComSpec
-      // also, the start command accepts a title so to make sure that we don't
-      // accidentally interpret the first arg as the title, we stick an empty
-      // string immediately after the start command
-      command = 'start ""'
-    } else if (platform === 'darwin') {
-      command = 'open'
-    } else {
-      command = 'xdg-open'
-    }
-  }
-
-  return spawnWithShell(command, args, options, extra)
-}
-promiseSpawn.open = open
-
-const isPipe = (stdio = 'pipe', fd) => {
-  if (stdio === 'pipe' || stdio === null) {
-    return true
-  }
-
-  if (Array.isArray(stdio)) {
-    return isPipe(stdio[fd], fd)
-  }
-
-  return false
-}
-
-const stdioResult = (stdout, stderr, { stdioString = true, stdio }) => {
-  const result = {
-    stdout: null,
-    stderr: null,
-  }
-
-  // stdio is [stdin, stdout, stderr]
-  if (isPipe(stdio, 1)) {
-    result.stdout = Buffer.concat(stdout)
-    if (stdioString) {
-      result.stdout = result.stdout.toString().trim()
-    }
-  }
-
-  if (isPipe(stdio, 2)) {
-    result.stderr = Buffer.concat(stderr)
-    if (stdioString) {
-      result.stderr = result.stderr.toString().trim()
-    }
-  }
-
-  return result
-}
-
-module.exports = promiseSpawn
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/package.json b/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/package.json
deleted file mode 100644
index 2080d9f5be9f0..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn/package.json
+++ /dev/null
@@ -1,50 +0,0 @@
-{
-  "name": "@npmcli/promise-spawn",
-  "version": "6.0.2",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "main": "./lib/index.js",
-  "description": "spawn processes the way the npm cli likes to do",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/promise-spawn.git"
-  },
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "lint": "eslint \"**/*.js\"",
-    "lintfix": "npm run lint -- --fix",
-    "posttest": "npm run lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "tap": {
-    "check-coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.11.0",
-    "minipass": "^4.0.0",
-    "spawk": "^1.7.1",
-    "tap": "^16.0.1"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.11.0"
-  },
-  "dependencies": {
-    "which": "^3.0.0"
-  }
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/which/LICENSE b/node_modules/@npmcli/run-script/node_modules/which/LICENSE
deleted file mode 100644
index 19129e315fe59..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/which/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/run-script/node_modules/which/bin/which.js b/node_modules/@npmcli/run-script/node_modules/which/bin/which.js
deleted file mode 100755
index 6df16f21acf93..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/which/bin/which.js
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env node
-
-const which = require('../lib')
-const argv = process.argv.slice(2)
-
-const usage = (err) => {
-  if (err) {
-    console.error(`which: ${err}`)
-  }
-  console.error('usage: which [-as] program ...')
-  process.exit(1)
-}
-
-if (!argv.length) {
-  return usage()
-}
-
-let dashdash = false
-const [commands, flags] = argv.reduce((acc, arg) => {
-  if (dashdash || arg === '--') {
-    dashdash = true
-    return acc
-  }
-
-  if (!/^-/.test(arg)) {
-    acc[0].push(arg)
-    return acc
-  }
-
-  for (const flag of arg.slice(1).split('')) {
-    if (flag === 's') {
-      acc[1].silent = true
-    } else if (flag === 'a') {
-      acc[1].all = true
-    } else {
-      usage(`illegal option -- ${flag}`)
-    }
-  }
-
-  return acc
-}, [[], {}])
-
-for (const command of commands) {
-  try {
-    const res = which.sync(command, { all: flags.all })
-    if (!flags.silent) {
-      console.log([].concat(res).join('\n'))
-    }
-  } catch (err) {
-    process.exitCode = 1
-  }
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/which/lib/index.js b/node_modules/@npmcli/run-script/node_modules/which/lib/index.js
deleted file mode 100644
index 52e9ea62377e7..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/which/lib/index.js
+++ /dev/null
@@ -1,115 +0,0 @@
-const isexe = require('isexe')
-const { join, delimiter, sep, posix } = require('path')
-
-const isWindows = process.platform === 'win32'
-
-// used to check for slashed in commands passed in. always checks for the posix
-// seperator on all platforms, and checks for the current separator when not on
-// a posix platform. don't use the isWindows check for this since that is mocked
-// in tests but we still need the code to actually work when called. that is also
-// why it is ignored from coverage.
-/* istanbul ignore next */
-const rSlash = new RegExp(`[${posix.sep}${sep === posix.sep ? '' : sep}]`.replace(/(\\)/g, '\\$1'))
-const rRel = new RegExp(`^\\.${rSlash.source}`)
-
-const getNotFoundError = (cmd) =>
-  Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' })
-
-const getPathInfo = (cmd, {
-  path: optPath = process.env.PATH,
-  pathExt: optPathExt = process.env.PATHEXT,
-  delimiter: optDelimiter = delimiter,
-}) => {
-  // If it has a slash, then we don't bother searching the pathenv.
-  // just check the file itself, and that's it.
-  const pathEnv = cmd.match(rSlash) ? [''] : [
-    // windows always checks the cwd first
-    ...(isWindows ? [process.cwd()] : []),
-    ...(optPath || /* istanbul ignore next: very unusual */ '').split(optDelimiter),
-  ]
-
-  if (isWindows) {
-    const pathExtExe = optPathExt ||
-      ['.EXE', '.CMD', '.BAT', '.COM'].join(optDelimiter)
-    const pathExt = pathExtExe.split(optDelimiter).reduce((acc, item) => {
-      acc.push(item)
-      acc.push(item.toLowerCase())
-      return acc
-    }, [])
-    if (cmd.includes('.') && pathExt[0] !== '') {
-      pathExt.unshift('')
-    }
-    return { pathEnv, pathExt, pathExtExe }
-  }
-
-  return { pathEnv, pathExt: [''] }
-}
-
-const getPathPart = (raw, cmd) => {
-  const pathPart = /^".*"$/.test(raw) ? raw.slice(1, -1) : raw
-  const prefix = !pathPart && rRel.test(cmd) ? cmd.slice(0, 2) : ''
-  return prefix + join(pathPart, cmd)
-}
-
-const which = async (cmd, opt = {}) => {
-  const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt)
-  const found = []
-
-  for (const envPart of pathEnv) {
-    const p = getPathPart(envPart, cmd)
-
-    for (const ext of pathExt) {
-      const withExt = p + ext
-      const is = await isexe(withExt, { pathExt: pathExtExe, ignoreErrors: true })
-      if (is) {
-        if (!opt.all) {
-          return withExt
-        }
-        found.push(withExt)
-      }
-    }
-  }
-
-  if (opt.all && found.length) {
-    return found
-  }
-
-  if (opt.nothrow) {
-    return null
-  }
-
-  throw getNotFoundError(cmd)
-}
-
-const whichSync = (cmd, opt = {}) => {
-  const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt)
-  const found = []
-
-  for (const pathEnvPart of pathEnv) {
-    const p = getPathPart(pathEnvPart, cmd)
-
-    for (const ext of pathExt) {
-      const withExt = p + ext
-      const is = isexe.sync(withExt, { pathExt: pathExtExe, ignoreErrors: true })
-      if (is) {
-        if (!opt.all) {
-          return withExt
-        }
-        found.push(withExt)
-      }
-    }
-  }
-
-  if (opt.all && found.length) {
-    return found
-  }
-
-  if (opt.nothrow) {
-    return null
-  }
-
-  throw getNotFoundError(cmd)
-}
-
-module.exports = which
-which.sync = whichSync
diff --git a/node_modules/@npmcli/run-script/node_modules/which/package.json b/node_modules/@npmcli/run-script/node_modules/which/package.json
deleted file mode 100644
index 989e01c9a3683..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/which/package.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
-  "author": "GitHub Inc.",
-  "name": "which",
-  "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.",
-  "version": "3.0.1",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/node-which.git"
-  },
-  "main": "lib/index.js",
-  "bin": {
-    "node-which": "./bin/which.js"
-  },
-  "license": "ISC",
-  "dependencies": {
-    "isexe": "^2.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.14.1",
-    "tap": "^16.3.0"
-  },
-  "scripts": {
-    "test": "tap",
-    "lint": "eslint \"**/*.js\"",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force",
-    "lintfix": "npm run lint -- --fix",
-    "snap": "tap",
-    "posttest": "npm run lint"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "tap": {
-    "check-coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.14.1",
-    "publish": "true"
-  }
-}
diff --git a/node_modules/@npmcli/run-script/package.json b/node_modules/@npmcli/run-script/package.json
index a54df3d1cb609..7e7d256157156 100644
--- a/node_modules/@npmcli/run-script/package.json
+++ b/node_modules/@npmcli/run-script/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@npmcli/run-script",
-  "version": "7.0.0",
+  "version": "7.0.1",
   "description": "Run a lifecycle script for a package (descendant of npm-lifecycle)",
   "author": "GitHub Inc.",
   "license": "ISC",
@@ -22,7 +22,7 @@
   },
   "dependencies": {
     "@npmcli/node-gyp": "^3.0.0",
-    "@npmcli/promise-spawn": "^6.0.0",
+    "@npmcli/promise-spawn": "^7.0.0",
     "node-gyp": "^9.0.0",
     "read-package-json-fast": "^3.0.0",
     "which": "^4.0.0"
diff --git a/package-lock.json b/package-lock.json
index da5d85219ee16..9eee2ff11d9fe 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -93,7 +93,7 @@
         "@npmcli/map-workspaces": "^3.0.4",
         "@npmcli/package-json": "^5.0.0",
         "@npmcli/promise-spawn": "^7.0.0",
-        "@npmcli/run-script": "^7.0.0",
+        "@npmcli/run-script": "^7.0.1",
         "@sigstore/tuf": "^2.1.0",
         "abbrev": "^2.0.0",
         "archy": "~1.0.0",
@@ -2610,13 +2610,13 @@
       }
     },
     "node_modules/@npmcli/run-script": {
-      "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-7.0.0.tgz",
-      "integrity": "sha512-JpR7jgCPIKnke0/xJUSYgWKk6BtWQ2FPoVm4lEC4gTDOjIpBJgAfwg+nGaCwnzY8oq6I5F4r+sI1jGJk/iCh/w==",
+      "version": "7.0.1",
+      "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-7.0.1.tgz",
+      "integrity": "sha512-Od/JMrgkjZ8alyBE0IzeqZDiF1jgMez9Gkc/OYrCkHHiXNwM0wc6s7+h+xM7kYDZkS0tAoOLr9VvygyE5+2F7g==",
       "inBundle": true,
       "dependencies": {
         "@npmcli/node-gyp": "^3.0.0",
-        "@npmcli/promise-spawn": "^6.0.0",
+        "@npmcli/promise-spawn": "^7.0.0",
         "node-gyp": "^9.0.0",
         "read-package-json-fast": "^3.0.0",
         "which": "^4.0.0"
@@ -2625,33 +2625,6 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn": {
-      "version": "6.0.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-6.0.2.tgz",
-      "integrity": "sha512-gGq0NJkIGSwdbUt4yhdF8ZrmkGKVz9vAdVzpOfnom+V8PLSmSOVhZwbNvZZS1EYcJN5hzzKBxmmVVAInM6HQLg==",
-      "inBundle": true,
-      "dependencies": {
-        "which": "^3.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/run-script/node_modules/which": {
-      "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz",
-      "integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==",
-      "inBundle": true,
-      "dependencies": {
-        "isexe": "^2.0.0"
-      },
-      "bin": {
-        "node-which": "bin/which.js"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
     "node_modules/@npmcli/smoke-tests": {
       "resolved": "smoke-tests",
       "link": true
@@ -15997,7 +15970,7 @@
         "@npmcli/node-gyp": "^3.0.0",
         "@npmcli/package-json": "^5.0.0",
         "@npmcli/query": "^3.0.0",
-        "@npmcli/run-script": "^7.0.0",
+        "@npmcli/run-script": "^7.0.1",
         "bin-links": "^4.0.1",
         "cacache": "^18.0.0",
         "common-ancestor-path": "^1.0.1",
@@ -16109,7 +16082,7 @@
       "license": "ISC",
       "dependencies": {
         "@npmcli/arborist": "^6.3.0",
-        "@npmcli/run-script": "^7.0.0",
+        "@npmcli/run-script": "^7.0.1",
         "ci-info": "^3.7.1",
         "npm-package-arg": "^11.0.0",
         "npmlog": "^7.0.1",
@@ -16189,7 +16162,7 @@
       "license": "ISC",
       "dependencies": {
         "@npmcli/arborist": "^6.3.0",
-        "@npmcli/run-script": "^7.0.0",
+        "@npmcli/run-script": "^7.0.1",
         "npm-package-arg": "^11.0.0",
         "pacote": "^17.0.4"
       },
@@ -16267,7 +16240,7 @@
       "license": "ISC",
       "dependencies": {
         "@npmcli/git": "^5.0.2",
-        "@npmcli/run-script": "^7.0.0",
+        "@npmcli/run-script": "^7.0.1",
         "json-parse-even-better-errors": "^3.0.0",
         "proc-log": "^3.0.0",
         "semver": "^7.3.7"
diff --git a/package.json b/package.json
index 40dc8a8cf405b..f27abc10c9bbc 100644
--- a/package.json
+++ b/package.json
@@ -58,7 +58,7 @@
     "@npmcli/map-workspaces": "^3.0.4",
     "@npmcli/package-json": "^5.0.0",
     "@npmcli/promise-spawn": "^7.0.0",
-    "@npmcli/run-script": "^7.0.0",
+    "@npmcli/run-script": "^7.0.1",
     "@sigstore/tuf": "^2.1.0",
     "abbrev": "^2.0.0",
     "archy": "~1.0.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index aa8409eee0cf7..9701d958c3de0 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -12,7 +12,7 @@
     "@npmcli/node-gyp": "^3.0.0",
     "@npmcli/package-json": "^5.0.0",
     "@npmcli/query": "^3.0.0",
-    "@npmcli/run-script": "^7.0.0",
+    "@npmcli/run-script": "^7.0.1",
     "bin-links": "^4.0.1",
     "cacache": "^18.0.0",
     "common-ancestor-path": "^1.0.1",
diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json
index c6f638aba6553..210e362099fcf 100644
--- a/workspaces/libnpmexec/package.json
+++ b/workspaces/libnpmexec/package.json
@@ -60,7 +60,7 @@
   },
   "dependencies": {
     "@npmcli/arborist": "^6.3.0",
-    "@npmcli/run-script": "^7.0.0",
+    "@npmcli/run-script": "^7.0.1",
     "ci-info": "^3.7.1",
     "npm-package-arg": "^11.0.0",
     "npmlog": "^7.0.1",
diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json
index 4294794519fb2..dc6c96c9a521c 100644
--- a/workspaces/libnpmpack/package.json
+++ b/workspaces/libnpmpack/package.json
@@ -37,7 +37,7 @@
   "homepage": "https://npmjs.com/package/libnpmpack",
   "dependencies": {
     "@npmcli/arborist": "^6.3.0",
-    "@npmcli/run-script": "^7.0.0",
+    "@npmcli/run-script": "^7.0.1",
     "npm-package-arg": "^11.0.0",
     "pacote": "^17.0.4"
   },
diff --git a/workspaces/libnpmversion/package.json b/workspaces/libnpmversion/package.json
index 8c49988dcf63d..bb58fd19553b3 100644
--- a/workspaces/libnpmversion/package.json
+++ b/workspaces/libnpmversion/package.json
@@ -38,7 +38,7 @@
   },
   "dependencies": {
     "@npmcli/git": "^5.0.2",
-    "@npmcli/run-script": "^7.0.0",
+    "@npmcli/run-script": "^7.0.1",
     "json-parse-even-better-errors": "^3.0.0",
     "proc-log": "^3.0.0",
     "semver": "^7.3.7"

From 46812cc422203e03a78ee07b0ea09eae8211cba1 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Wed, 30 Aug 2023 11:28:26 -0700
Subject: [PATCH 67/68] deps: @npmcli/git@5.0.3

---
 node_modules/.gitignore                       |   6 -
 .../@npmcli/promise-spawn/LICENSE             |  15 --
 .../@npmcli/promise-spawn/lib/escape.js       |  68 ------
 .../@npmcli/promise-spawn/lib/index.js        | 195 ------------------
 .../@npmcli/promise-spawn/package.json        |  50 -----
 .../@npmcli/git/node_modules/which/LICENSE    |  15 --
 .../git/node_modules/which/bin/which.js       |  52 -----
 .../git/node_modules/which/lib/index.js       | 115 -----------
 .../git/node_modules/which/package.json       |  51 -----
 node_modules/@npmcli/git/package.json         |   4 +-
 package-lock.json                             |  39 +---
 package.json                                  |   2 +-
 workspaces/libnpmversion/package.json         |   2 +-
 13 files changed, 10 insertions(+), 604 deletions(-)
 delete mode 100644 node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/LICENSE
 delete mode 100644 node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/lib/escape.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/lib/index.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/package.json
 delete mode 100644 node_modules/@npmcli/git/node_modules/which/LICENSE
 delete mode 100755 node_modules/@npmcli/git/node_modules/which/bin/which.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/which/lib/index.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/which/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index e626abc08a518..59812d3aa24b6 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -28,12 +28,6 @@
 !/@npmcli/disparity-colors
 !/@npmcli/fs
 !/@npmcli/git
-!/@npmcli/git/node_modules/
-/@npmcli/git/node_modules/*
-!/@npmcli/git/node_modules/@npmcli/
-/@npmcli/git/node_modules/@npmcli/*
-!/@npmcli/git/node_modules/@npmcli/promise-spawn
-!/@npmcli/git/node_modules/which
 !/@npmcli/installed-package-contents
 !/@npmcli/map-workspaces
 !/@npmcli/metavuln-calculator
diff --git a/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/LICENSE b/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/LICENSE
deleted file mode 100644
index 8f90f96f4c6c5..0000000000000
--- a/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
-OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
-DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
-ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
-SOFTWARE.
diff --git a/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/lib/escape.js b/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/lib/escape.js
deleted file mode 100644
index 9aca8bde70a6e..0000000000000
--- a/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/lib/escape.js
+++ /dev/null
@@ -1,68 +0,0 @@
-'use strict'
-
-// eslint-disable-next-line max-len
-// this code adapted from: https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/
-const cmd = (input, doubleEscape) => {
-  if (!input.length) {
-    return '""'
-  }
-
-  let result
-  if (!/[ \t\n\v"]/.test(input)) {
-    result = input
-  } else {
-    result = '"'
-    for (let i = 0; i <= input.length; ++i) {
-      let slashCount = 0
-      while (input[i] === '\\') {
-        ++i
-        ++slashCount
-      }
-
-      if (i === input.length) {
-        result += '\\'.repeat(slashCount * 2)
-        break
-      }
-
-      if (input[i] === '"') {
-        result += '\\'.repeat(slashCount * 2 + 1)
-        result += input[i]
-      } else {
-        result += '\\'.repeat(slashCount)
-        result += input[i]
-      }
-    }
-    result += '"'
-  }
-
-  // and finally, prefix shell meta chars with a ^
-  result = result.replace(/[ !%^&()<>|"]/g, '^$&')
-  if (doubleEscape) {
-    result = result.replace(/[ !%^&()<>|"]/g, '^$&')
-  }
-
-  return result
-}
-
-const sh = (input) => {
-  if (!input.length) {
-    return `''`
-  }
-
-  if (!/[\t\n\r "#$&'()*;<>?\\`|~]/.test(input)) {
-    return input
-  }
-
-  // replace single quotes with '\'' and wrap the whole result in a fresh set of quotes
-  const result = `'${input.replace(/'/g, `'\\''`)}'`
-    // if the input string already had single quotes around it, clean those up
-    .replace(/^(?:'')+(?!$)/, '')
-    .replace(/\\'''/g, `\\'`)
-
-  return result
-}
-
-module.exports = {
-  cmd,
-  sh,
-}
diff --git a/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/lib/index.js b/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/lib/index.js
deleted file mode 100644
index 571ff6b9169c9..0000000000000
--- a/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/lib/index.js
+++ /dev/null
@@ -1,195 +0,0 @@
-'use strict'
-
-const { spawn } = require('child_process')
-const os = require('os')
-const which = require('which')
-
-const escape = require('./escape.js')
-
-// 'extra' object is for decorating the error a bit more
-const promiseSpawn = (cmd, args, opts = {}, extra = {}) => {
-  if (opts.shell) {
-    return spawnWithShell(cmd, args, opts, extra)
-  }
-
-  let proc
-
-  const p = new Promise((res, rej) => {
-    proc = spawn(cmd, args, opts)
-
-    const stdout = []
-    const stderr = []
-
-    const reject = er => rej(Object.assign(er, {
-      cmd,
-      args,
-      ...stdioResult(stdout, stderr, opts),
-      ...extra,
-    }))
-
-    proc.on('error', reject)
-
-    if (proc.stdout) {
-      proc.stdout.on('data', c => stdout.push(c)).on('error', reject)
-      proc.stdout.on('error', er => reject(er))
-    }
-
-    if (proc.stderr) {
-      proc.stderr.on('data', c => stderr.push(c)).on('error', reject)
-      proc.stderr.on('error', er => reject(er))
-    }
-
-    proc.on('close', (code, signal) => {
-      const result = {
-        cmd,
-        args,
-        code,
-        signal,
-        ...stdioResult(stdout, stderr, opts),
-        ...extra,
-      }
-
-      if (code || signal) {
-        rej(Object.assign(new Error('command failed'), result))
-      } else {
-        res(result)
-      }
-    })
-  })
-
-  p.stdin = proc.stdin
-  p.process = proc
-  return p
-}
-
-const spawnWithShell = (cmd, args, opts, extra) => {
-  let command = opts.shell
-  // if shell is set to true, we use a platform default. we can't let the core
-  // spawn method decide this for us because we need to know what shell is in use
-  // ahead of time so that we can escape arguments properly. we don't need coverage here.
-  if (command === true) {
-    // istanbul ignore next
-    command = process.platform === 'win32' ? process.env.ComSpec : 'sh'
-  }
-
-  const options = { ...opts, shell: false }
-  const realArgs = []
-  let script = cmd
-
-  // first, determine if we're in windows because if we are we need to know if we're
-  // running an .exe or a .cmd/.bat since the latter requires extra escaping
-  const isCmd = /(?:^|\\)cmd(?:\.exe)?$/i.test(command)
-  if (isCmd) {
-    let doubleEscape = false
-
-    // find the actual command we're running
-    let initialCmd = ''
-    let insideQuotes = false
-    for (let i = 0; i < cmd.length; ++i) {
-      const char = cmd.charAt(i)
-      if (char === ' ' && !insideQuotes) {
-        break
-      }
-
-      initialCmd += char
-      if (char === '"' || char === "'") {
-        insideQuotes = !insideQuotes
-      }
-    }
-
-    let pathToInitial
-    try {
-      pathToInitial = which.sync(initialCmd, {
-        path: (options.env && options.env.PATH) || process.env.PATH,
-        pathext: (options.env && options.env.PATHEXT) || process.env.PATHEXT,
-      }).toLowerCase()
-    } catch (err) {
-      pathToInitial = initialCmd.toLowerCase()
-    }
-
-    doubleEscape = pathToInitial.endsWith('.cmd') || pathToInitial.endsWith('.bat')
-    for (const arg of args) {
-      script += ` ${escape.cmd(arg, doubleEscape)}`
-    }
-    realArgs.push('/d', '/s', '/c', script)
-    options.windowsVerbatimArguments = true
-  } else {
-    for (const arg of args) {
-      script += ` ${escape.sh(arg)}`
-    }
-    realArgs.push('-c', script)
-  }
-
-  return promiseSpawn(command, realArgs, options, extra)
-}
-
-// open a file with the default application as defined by the user's OS
-const open = (_args, opts = {}, extra = {}) => {
-  const options = { ...opts, shell: true }
-  const args = [].concat(_args)
-
-  let platform = process.platform
-  // process.platform === 'linux' may actually indicate WSL, if that's the case
-  // we want to treat things as win32 anyway so the host can open the argument
-  if (platform === 'linux' && os.release().toLowerCase().includes('microsoft')) {
-    platform = 'win32'
-  }
-
-  let command = options.command
-  if (!command) {
-    if (platform === 'win32') {
-      // spawnWithShell does not do the additional os.release() check, so we
-      // have to force the shell here to make sure we treat WSL as windows.
-      options.shell = process.env.ComSpec
-      // also, the start command accepts a title so to make sure that we don't
-      // accidentally interpret the first arg as the title, we stick an empty
-      // string immediately after the start command
-      command = 'start ""'
-    } else if (platform === 'darwin') {
-      command = 'open'
-    } else {
-      command = 'xdg-open'
-    }
-  }
-
-  return spawnWithShell(command, args, options, extra)
-}
-promiseSpawn.open = open
-
-const isPipe = (stdio = 'pipe', fd) => {
-  if (stdio === 'pipe' || stdio === null) {
-    return true
-  }
-
-  if (Array.isArray(stdio)) {
-    return isPipe(stdio[fd], fd)
-  }
-
-  return false
-}
-
-const stdioResult = (stdout, stderr, { stdioString = true, stdio }) => {
-  const result = {
-    stdout: null,
-    stderr: null,
-  }
-
-  // stdio is [stdin, stdout, stderr]
-  if (isPipe(stdio, 1)) {
-    result.stdout = Buffer.concat(stdout)
-    if (stdioString) {
-      result.stdout = result.stdout.toString().trim()
-    }
-  }
-
-  if (isPipe(stdio, 2)) {
-    result.stderr = Buffer.concat(stderr)
-    if (stdioString) {
-      result.stderr = result.stderr.toString().trim()
-    }
-  }
-
-  return result
-}
-
-module.exports = promiseSpawn
diff --git a/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/package.json b/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/package.json
deleted file mode 100644
index 2080d9f5be9f0..0000000000000
--- a/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn/package.json
+++ /dev/null
@@ -1,50 +0,0 @@
-{
-  "name": "@npmcli/promise-spawn",
-  "version": "6.0.2",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "main": "./lib/index.js",
-  "description": "spawn processes the way the npm cli likes to do",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/promise-spawn.git"
-  },
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "lint": "eslint \"**/*.js\"",
-    "lintfix": "npm run lint -- --fix",
-    "posttest": "npm run lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "tap": {
-    "check-coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.11.0",
-    "minipass": "^4.0.0",
-    "spawk": "^1.7.1",
-    "tap": "^16.0.1"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.11.0"
-  },
-  "dependencies": {
-    "which": "^3.0.0"
-  }
-}
diff --git a/node_modules/@npmcli/git/node_modules/which/LICENSE b/node_modules/@npmcli/git/node_modules/which/LICENSE
deleted file mode 100644
index 19129e315fe59..0000000000000
--- a/node_modules/@npmcli/git/node_modules/which/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/git/node_modules/which/bin/which.js b/node_modules/@npmcli/git/node_modules/which/bin/which.js
deleted file mode 100755
index 6df16f21acf93..0000000000000
--- a/node_modules/@npmcli/git/node_modules/which/bin/which.js
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env node
-
-const which = require('../lib')
-const argv = process.argv.slice(2)
-
-const usage = (err) => {
-  if (err) {
-    console.error(`which: ${err}`)
-  }
-  console.error('usage: which [-as] program ...')
-  process.exit(1)
-}
-
-if (!argv.length) {
-  return usage()
-}
-
-let dashdash = false
-const [commands, flags] = argv.reduce((acc, arg) => {
-  if (dashdash || arg === '--') {
-    dashdash = true
-    return acc
-  }
-
-  if (!/^-/.test(arg)) {
-    acc[0].push(arg)
-    return acc
-  }
-
-  for (const flag of arg.slice(1).split('')) {
-    if (flag === 's') {
-      acc[1].silent = true
-    } else if (flag === 'a') {
-      acc[1].all = true
-    } else {
-      usage(`illegal option -- ${flag}`)
-    }
-  }
-
-  return acc
-}, [[], {}])
-
-for (const command of commands) {
-  try {
-    const res = which.sync(command, { all: flags.all })
-    if (!flags.silent) {
-      console.log([].concat(res).join('\n'))
-    }
-  } catch (err) {
-    process.exitCode = 1
-  }
-}
diff --git a/node_modules/@npmcli/git/node_modules/which/lib/index.js b/node_modules/@npmcli/git/node_modules/which/lib/index.js
deleted file mode 100644
index 52e9ea62377e7..0000000000000
--- a/node_modules/@npmcli/git/node_modules/which/lib/index.js
+++ /dev/null
@@ -1,115 +0,0 @@
-const isexe = require('isexe')
-const { join, delimiter, sep, posix } = require('path')
-
-const isWindows = process.platform === 'win32'
-
-// used to check for slashed in commands passed in. always checks for the posix
-// seperator on all platforms, and checks for the current separator when not on
-// a posix platform. don't use the isWindows check for this since that is mocked
-// in tests but we still need the code to actually work when called. that is also
-// why it is ignored from coverage.
-/* istanbul ignore next */
-const rSlash = new RegExp(`[${posix.sep}${sep === posix.sep ? '' : sep}]`.replace(/(\\)/g, '\\$1'))
-const rRel = new RegExp(`^\\.${rSlash.source}`)
-
-const getNotFoundError = (cmd) =>
-  Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' })
-
-const getPathInfo = (cmd, {
-  path: optPath = process.env.PATH,
-  pathExt: optPathExt = process.env.PATHEXT,
-  delimiter: optDelimiter = delimiter,
-}) => {
-  // If it has a slash, then we don't bother searching the pathenv.
-  // just check the file itself, and that's it.
-  const pathEnv = cmd.match(rSlash) ? [''] : [
-    // windows always checks the cwd first
-    ...(isWindows ? [process.cwd()] : []),
-    ...(optPath || /* istanbul ignore next: very unusual */ '').split(optDelimiter),
-  ]
-
-  if (isWindows) {
-    const pathExtExe = optPathExt ||
-      ['.EXE', '.CMD', '.BAT', '.COM'].join(optDelimiter)
-    const pathExt = pathExtExe.split(optDelimiter).reduce((acc, item) => {
-      acc.push(item)
-      acc.push(item.toLowerCase())
-      return acc
-    }, [])
-    if (cmd.includes('.') && pathExt[0] !== '') {
-      pathExt.unshift('')
-    }
-    return { pathEnv, pathExt, pathExtExe }
-  }
-
-  return { pathEnv, pathExt: [''] }
-}
-
-const getPathPart = (raw, cmd) => {
-  const pathPart = /^".*"$/.test(raw) ? raw.slice(1, -1) : raw
-  const prefix = !pathPart && rRel.test(cmd) ? cmd.slice(0, 2) : ''
-  return prefix + join(pathPart, cmd)
-}
-
-const which = async (cmd, opt = {}) => {
-  const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt)
-  const found = []
-
-  for (const envPart of pathEnv) {
-    const p = getPathPart(envPart, cmd)
-
-    for (const ext of pathExt) {
-      const withExt = p + ext
-      const is = await isexe(withExt, { pathExt: pathExtExe, ignoreErrors: true })
-      if (is) {
-        if (!opt.all) {
-          return withExt
-        }
-        found.push(withExt)
-      }
-    }
-  }
-
-  if (opt.all && found.length) {
-    return found
-  }
-
-  if (opt.nothrow) {
-    return null
-  }
-
-  throw getNotFoundError(cmd)
-}
-
-const whichSync = (cmd, opt = {}) => {
-  const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt)
-  const found = []
-
-  for (const pathEnvPart of pathEnv) {
-    const p = getPathPart(pathEnvPart, cmd)
-
-    for (const ext of pathExt) {
-      const withExt = p + ext
-      const is = isexe.sync(withExt, { pathExt: pathExtExe, ignoreErrors: true })
-      if (is) {
-        if (!opt.all) {
-          return withExt
-        }
-        found.push(withExt)
-      }
-    }
-  }
-
-  if (opt.all && found.length) {
-    return found
-  }
-
-  if (opt.nothrow) {
-    return null
-  }
-
-  throw getNotFoundError(cmd)
-}
-
-module.exports = which
-which.sync = whichSync
diff --git a/node_modules/@npmcli/git/node_modules/which/package.json b/node_modules/@npmcli/git/node_modules/which/package.json
deleted file mode 100644
index 989e01c9a3683..0000000000000
--- a/node_modules/@npmcli/git/node_modules/which/package.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
-  "author": "GitHub Inc.",
-  "name": "which",
-  "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.",
-  "version": "3.0.1",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/node-which.git"
-  },
-  "main": "lib/index.js",
-  "bin": {
-    "node-which": "./bin/which.js"
-  },
-  "license": "ISC",
-  "dependencies": {
-    "isexe": "^2.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.14.1",
-    "tap": "^16.3.0"
-  },
-  "scripts": {
-    "test": "tap",
-    "lint": "eslint \"**/*.js\"",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force",
-    "lintfix": "npm run lint -- --fix",
-    "snap": "tap",
-    "posttest": "npm run lint"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "tap": {
-    "check-coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.14.1",
-    "publish": "true"
-  }
-}
diff --git a/node_modules/@npmcli/git/package.json b/node_modules/@npmcli/git/package.json
index 4a25e53214843..6ab037d841cc3 100644
--- a/node_modules/@npmcli/git/package.json
+++ b/node_modules/@npmcli/git/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@npmcli/git",
-  "version": "5.0.2",
+  "version": "5.0.3",
   "main": "lib/index.js",
   "files": [
     "bin/",
@@ -37,7 +37,7 @@
     "tap": "^16.0.1"
   },
   "dependencies": {
-    "@npmcli/promise-spawn": "^6.0.0",
+    "@npmcli/promise-spawn": "^7.0.0",
     "lru-cache": "^10.0.1",
     "npm-pick-manifest": "^9.0.0",
     "proc-log": "^3.0.0",
diff --git a/package-lock.json b/package-lock.json
index 9eee2ff11d9fe..62e5c1ba82138 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -162,7 +162,7 @@
       "devDependencies": {
         "@npmcli/docs": "^1.0.0",
         "@npmcli/eslint-config": "^4.0.2",
-        "@npmcli/git": "^5.0.2",
+        "@npmcli/git": "^5.0.3",
         "@npmcli/mock-globals": "^1.0.0",
         "@npmcli/mock-registry": "^1.0.0",
         "@npmcli/template-oss": "4.18.0",
@@ -2452,12 +2452,12 @@
       }
     },
     "node_modules/@npmcli/git": {
-      "version": "5.0.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.2.tgz",
-      "integrity": "sha512-IKTmfAlPoDtlLk5Bpm4bqF8mrkNRkR1KXHIWOQlykXcwoV7ZacO44PPUMHB+aFOF0/dsSee+60NfGEItI4YDiw==",
+      "version": "5.0.3",
+      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.3.tgz",
+      "integrity": "sha512-UZp9NwK+AynTrKvHn5k3KviW/hA5eENmFsu3iAPe7sWRt0lFUdsY/wXIYjpDFe7cdSNwOIzbObfwgt6eL5/2zw==",
       "inBundle": true,
       "dependencies": {
-        "@npmcli/promise-spawn": "^6.0.0",
+        "@npmcli/promise-spawn": "^7.0.0",
         "lru-cache": "^10.0.1",
         "npm-pick-manifest": "^9.0.0",
         "proc-log": "^3.0.0",
@@ -2470,33 +2470,6 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn": {
-      "version": "6.0.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-6.0.2.tgz",
-      "integrity": "sha512-gGq0NJkIGSwdbUt4yhdF8ZrmkGKVz9vAdVzpOfnom+V8PLSmSOVhZwbNvZZS1EYcJN5hzzKBxmmVVAInM6HQLg==",
-      "inBundle": true,
-      "dependencies": {
-        "which": "^3.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/git/node_modules/which": {
-      "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz",
-      "integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==",
-      "inBundle": true,
-      "dependencies": {
-        "isexe": "^2.0.0"
-      },
-      "bin": {
-        "node-which": "bin/which.js"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
     "node_modules/@npmcli/installed-package-contents": {
       "version": "2.0.2",
       "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-2.0.2.tgz",
@@ -16239,7 +16212,7 @@
       "version": "4.0.2",
       "license": "ISC",
       "dependencies": {
-        "@npmcli/git": "^5.0.2",
+        "@npmcli/git": "^5.0.3",
         "@npmcli/run-script": "^7.0.1",
         "json-parse-even-better-errors": "^3.0.0",
         "proc-log": "^3.0.0",
diff --git a/package.json b/package.json
index f27abc10c9bbc..c6f25ea500c92 100644
--- a/package.json
+++ b/package.json
@@ -193,7 +193,7 @@
   "devDependencies": {
     "@npmcli/docs": "^1.0.0",
     "@npmcli/eslint-config": "^4.0.2",
-    "@npmcli/git": "^5.0.2",
+    "@npmcli/git": "^5.0.3",
     "@npmcli/mock-globals": "^1.0.0",
     "@npmcli/mock-registry": "^1.0.0",
     "@npmcli/template-oss": "4.18.0",
diff --git a/workspaces/libnpmversion/package.json b/workspaces/libnpmversion/package.json
index bb58fd19553b3..77a74c2e31aa2 100644
--- a/workspaces/libnpmversion/package.json
+++ b/workspaces/libnpmversion/package.json
@@ -37,7 +37,7 @@
     "tap": "^16.3.8"
   },
   "dependencies": {
-    "@npmcli/git": "^5.0.2",
+    "@npmcli/git": "^5.0.3",
     "@npmcli/run-script": "^7.0.1",
     "json-parse-even-better-errors": "^3.0.0",
     "proc-log": "^3.0.0",

From ecf0de4a0a6619bfcb78e8623d8f183ebd43eea2 Mon Sep 17 00:00:00 2001
From: Luke Karrys 
Date: Wed, 30 Aug 2023 11:33:49 -0700
Subject: [PATCH 68/68] chore: drop node14 support in private mock-globals
 workspace

---
 .github/workflows/ci-npmcli-mock-globals.yml |  4 +---
 mock-globals/package.json                    | 10 ++++++++--
 package-lock.json                            |  2 +-
 3 files changed, 10 insertions(+), 6 deletions(-)

diff --git a/.github/workflows/ci-npmcli-mock-globals.yml b/.github/workflows/ci-npmcli-mock-globals.yml
index 11f8fcfad8357..aa275662780ac 100644
--- a/.github/workflows/ci-npmcli-mock-globals.yml
+++ b/.github/workflows/ci-npmcli-mock-globals.yml
@@ -66,9 +66,7 @@ jobs:
             os: windows-latest
             shell: cmd
         node-version:
-          - 14.17.0
-          - 14.x
-          - 16.13.0
+          - 16.14.0
           - 16.x
           - 18.0.0
           - 18.x
diff --git a/mock-globals/package.json b/mock-globals/package.json
index aa5fede81c9c5..345606cc5a7a1 100644
--- a/mock-globals/package.json
+++ b/mock-globals/package.json
@@ -30,11 +30,17 @@
     "lib/"
   ],
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.18.0"
+    "version": "4.18.0",
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ]
   },
   "tap": {
     "branches": 89,
diff --git a/package-lock.json b/package-lock.json
index 62e5c1ba82138..5ec996aa6c71f 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -216,7 +216,7 @@
         "tap": "^16.3.8"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
       }
     },
     "mock-registry": {