diff --git a/.github/workflows/cron-pins-failed.yml b/.github/workflows/cron-pins-failed.yml index 703148d796..3155b555f2 100644 --- a/.github/workflows/cron-pins-failed.yml +++ b/.github/workflows/cron-pins-failed.yml @@ -17,6 +17,9 @@ jobs: strategy: matrix: env: ['production'] + include: + - env: production + pickup_url: https://pickup.dag.haus steps: - uses: actions/checkout@v2 with: @@ -41,5 +44,7 @@ jobs: CLUSTER2_BASIC_AUTH_TOKEN: ${{ secrets.CLUSTER2_BASIC_AUTH_TOKEN }} CLUSTER3_API_URL: ${{ secrets.CLUSTER3_API_URL }} CLUSTER3_BASIC_AUTH_TOKEN: ${{ secrets.CLUSTER3_BASIC_AUTH_TOKEN }} + PICKUP_URL: ${{ matrix.pickup_url }} + PICKUP_BASIC_AUTH_TOKEN: ${{ secrets.PICKUP_BASIC_AUTH_TOKEN }} AFTER: ${{ github.event.inputs.after }} run: yarn --cwd packages/cron start:pins-failed diff --git a/.github/workflows/cron-pins.yml b/.github/workflows/cron-pins.yml index 1cd02f1999..92b3e0ecde 100644 --- a/.github/workflows/cron-pins.yml +++ b/.github/workflows/cron-pins.yml @@ -12,6 +12,11 @@ jobs: strategy: matrix: env: ['staging', 'production'] + include: + - env: production + pickup_url: https://pickup.dag.haus + - env: staging + pickup_url: https://staging.pickup.dag.haus timeout-minutes: 60 steps: - uses: actions/checkout@v2 @@ -37,6 +42,8 @@ jobs: CLUSTER2_BASIC_AUTH_TOKEN: ${{ secrets.CLUSTER2_BASIC_AUTH_TOKEN }} CLUSTER3_API_URL: ${{ secrets.CLUSTER3_API_URL }} CLUSTER3_BASIC_AUTH_TOKEN: ${{ secrets.CLUSTER3_BASIC_AUTH_TOKEN }} + PICKUP_URL: ${{ matrix.pickup_url }} + PICKUP_BASIC_AUTH_TOKEN: ${{ secrets.PICKUP_BASIC_AUTH_TOKEN }} run: yarn --cwd packages/cron start:pins - name: Heartbeat if: ${{ success() }} diff --git a/packages/cron/src/bin/pins-failed.js b/packages/cron/src/bin/pins-failed.js index f63f701efa..1618e814ab 100644 --- a/packages/cron/src/bin/pins-failed.js +++ b/packages/cron/src/bin/pins-failed.js @@ -5,7 +5,13 @@ import { fileURLToPath } from 'url' import dotenv from 'dotenv' import fetch from '@web-std/fetch' import { checkFailedPinStatuses } from '../jobs/pins.js' -import { getPg, getCluster1, getCluster2, getCluster3 } from '../lib/utils.js' +import { + getPg, + getCluster1, + getCluster2, + getCluster3, + getPickup, +} from '../lib/utils.js' const __dirname = path.dirname(fileURLToPath(import.meta.url)) global.fetch = fetch @@ -21,11 +27,19 @@ async function main() { const cluster1 = getCluster1(process.env) const cluster2 = getCluster2(process.env) const cluster3 = getCluster3(process.env) + const pickup = getPickup(process.env) const after = process.env.AFTER ? new Date(process.env.AFTER) : oneMonthAgo() - await checkFailedPinStatuses({ pg, cluster1, cluster2, cluster3, after }) + await checkFailedPinStatuses({ + pg, + cluster1, + cluster2, + cluster3, + pickup, + after, + }) } finally { await pg.end() } diff --git a/packages/cron/src/bin/pins.js b/packages/cron/src/bin/pins.js index 3e245f5441..72f62ce51e 100755 --- a/packages/cron/src/bin/pins.js +++ b/packages/cron/src/bin/pins.js @@ -5,7 +5,13 @@ import { fileURLToPath } from 'url' import dotenv from 'dotenv' import fetch from '@web-std/fetch' import { updatePendingPinStatuses } from '../jobs/pins.js' -import { getPg, getCluster1, getCluster2, getCluster3 } from '../lib/utils.js' +import { + getPg, + getCluster1, + getCluster2, + getCluster3, + getPickup, +} from '../lib/utils.js' const __dirname = path.dirname(fileURLToPath(import.meta.url)) global.fetch = fetch @@ -18,8 +24,9 @@ async function main() { const cluster1 = getCluster1(process.env) const cluster2 = getCluster2(process.env) const cluster3 = getCluster3(process.env) + const pickup = getPickup(process.env) - await updatePendingPinStatuses({ pg, cluster1, cluster2, cluster3 }) + await updatePendingPinStatuses({ pg, cluster1, cluster2, cluster3, pickup }) } finally { await pg.end() } diff --git a/packages/cron/src/jobs/pins.js b/packages/cron/src/jobs/pins.js index 5d910a6ab4..dbafe10eab 100644 --- a/packages/cron/src/jobs/pins.js +++ b/packages/cron/src/jobs/pins.js @@ -8,7 +8,11 @@ const CONCURRENCY = 5 * http://nginx.org/en/docs/http/ngx_http_core_module.html#large_client_header_buffers */ const MAX_CLUSTER_STATUS_CIDS = 120 -const CLUSTERS = ['IpfsCluster', 'IpfsCluster2', 'IpfsCluster3'] +/** + * @typedef {import('../../../api/src/utils/db-types').definitions} definitions + * @type Array + **/ +const CLUSTERS = ['IpfsCluster', 'IpfsCluster2', 'IpfsCluster3', 'ElasticIpfs'] /** * @typedef {import('pg').Client} Client @@ -17,8 +21,8 @@ const CLUSTERS = ['IpfsCluster', 'IpfsCluster2', 'IpfsCluster3'] * cluster1: import('@nftstorage/ipfs-cluster').Cluster * cluster2: import('@nftstorage/ipfs-cluster').Cluster * cluster3: import('@nftstorage/ipfs-cluster').Cluster + * pickup: import('@nftstorage/ipfs-cluster').Cluster * }} Config - * @typedef {import('../../../api/src/utils/db-types').definitions} definitions * @typedef {Pick & { source_cid: string }} Pin * @typedef {import('@supabase/postgrest-js').PostgrestQueryBuilder} PinQuery */ @@ -145,7 +149,7 @@ UPDATE pin AS p * }} config */ async function updatePinStatuses(config) { - const { countPins, fetchPins, pg, cluster3 } = config + const { countPins, fetchPins, pg, pickup } = config if (!log.enabled) { console.log('ℹ️ Enable logging by setting DEBUG=pins:updatePinStatuses') } @@ -182,7 +186,7 @@ async function updatePinStatuses(config) { /** @type {Pin[]} */ const updatedPins = [] const cids = pins.map((p) => p.source_cid) - const statuses = await cluster3.statusAll({ cids }) + const statuses = await pickup.statusAll({ cids }) const statusByCid = Object.fromEntries(statuses.map((s) => [s.cid, s])) for (const pin of pins) { diff --git a/packages/cron/src/lib/utils.js b/packages/cron/src/lib/utils.js index 8eb6ebe76e..e6a9ecf0f9 100644 --- a/packages/cron/src/lib/utils.js +++ b/packages/cron/src/lib/utils.js @@ -44,6 +44,22 @@ export function getCluster3(env) { }) } +/** + * Create a new IPFS Cluster instance from the passed environment variables. + * @param {Record} env + */ +export function getPickup(env) { + const pickupUrl = env.PICKUP_URL + if (!pickupUrl) throw new Error('PICKUP_URL must be set in env') + const basicAuthToken = env.PICKUP_BASIC_AUTH_TOKEN + if (!basicAuthToken) { + throw new Error('PICKUP_BASIC_AUTH_TOKEN must be set in env') + } + return new Cluster(pickupUrl, { + headers: { authorization: `Basic ${basicAuthToken}` }, + }) +} + /** * Create a new DBClient instance from the passed environment variables. * @param {Record} env diff --git a/yarn.lock b/yarn.lock index 3e77d7ff7c..17a1b13c5d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -16834,16 +16834,11 @@ prettier-linter-helpers@^1.0.0: dependencies: fast-diff "^1.1.2" -prettier@2.5.1, prettier@^2.5.1: +prettier@2.5.1, "prettier@>=2.2.1 <=2.3.0", prettier@^2.5.1: version "2.5.1" resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.5.1.tgz#fff75fa9d519c54cf0fce328c1017d94546bc56a" integrity sha512-vBZcPRUR5MZJwoyi3ZoyQlc1rXeEck8KgeC9AwwOn+exuxLxq5toTRDTSaVrXHxelDMHy9zlicw8u66yxoSUFg== -"prettier@>=2.2.1 <=2.3.0": - version "2.3.0" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.3.0.tgz#b6a5bf1284026ae640f17f7ff5658a7567fc0d18" - integrity sha512-kXtO4s0Lz/DW/IJ9QdWhAf7/NmPWQXkFr/r/WkR3vyI+0v8amTDxiaQSLzs8NBlytfLWX/7uQUMIW677yLKl4w== - pretty-error@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-2.1.2.tgz#be89f82d81b1c86ec8fdfbc385045882727f93b6" @@ -20016,12 +20011,7 @@ typedoc@^0.22.14: minimatch "^5.1.0" shiki "^0.10.1" -typescript@4.4.4: - version "4.4.4" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.4.tgz#2cd01a1a1f160704d3101fd5a58ff0f9fcb8030c" - integrity sha512-DqGhF5IKoBl8WNf8C1gu8q0xZSInh9j1kJJMqT3a94w1JzVaBU4EXOSMrz9yDqMT0xt3selp83fuFMQ0uzv6qA== - -typescript@4.5.3: +typescript@4.4.4, typescript@4.5.3: version "4.5.3" resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.5.3.tgz#afaa858e68c7103317d89eb90c5d8906268d353c" integrity sha512-eVYaEHALSt+s9LbvgEv4Ef+Tdq7hBiIZgii12xXJnukryt3pMgJf6aKhoCZ3FWQsu6sydEnkg11fYXLzhLBjeQ==