diff --git a/.github/workflows/cron-pins-failed.yml b/.github/workflows/cron-pins-failed.yml index 703148d796..1f90d60047 100644 --- a/.github/workflows/cron-pins-failed.yml +++ b/.github/workflows/cron-pins-failed.yml @@ -17,6 +17,9 @@ jobs: strategy: matrix: env: ['production'] + include: + - env: production + pickup_url: http://pickup.dag.haus steps: - uses: actions/checkout@v2 with: @@ -41,5 +44,7 @@ jobs: CLUSTER2_BASIC_AUTH_TOKEN: ${{ secrets.CLUSTER2_BASIC_AUTH_TOKEN }} CLUSTER3_API_URL: ${{ secrets.CLUSTER3_API_URL }} CLUSTER3_BASIC_AUTH_TOKEN: ${{ secrets.CLUSTER3_BASIC_AUTH_TOKEN }} + PICKUP_URL: ${{ matrix.pickup_url }} + PICKUP_BASIC_AUTH_TOKEN: ${{ secrets.PICKUP_BASIC_AUTH_TOKEN }} AFTER: ${{ github.event.inputs.after }} run: yarn --cwd packages/cron start:pins-failed diff --git a/.github/workflows/cron-pins.yml b/.github/workflows/cron-pins.yml index 1cd02f1999..9986b9111d 100644 --- a/.github/workflows/cron-pins.yml +++ b/.github/workflows/cron-pins.yml @@ -12,6 +12,11 @@ jobs: strategy: matrix: env: ['staging', 'production'] + include: + - env: production + pickup_url: http://pickup.dag.haus + - env: staging + pickup_url: http://staging.pickup.dag.haus timeout-minutes: 60 steps: - uses: actions/checkout@v2 @@ -37,6 +42,8 @@ jobs: CLUSTER2_BASIC_AUTH_TOKEN: ${{ secrets.CLUSTER2_BASIC_AUTH_TOKEN }} CLUSTER3_API_URL: ${{ secrets.CLUSTER3_API_URL }} CLUSTER3_BASIC_AUTH_TOKEN: ${{ secrets.CLUSTER3_BASIC_AUTH_TOKEN }} + PICKUP_URL: ${{ matrix.pickup_url }} + PICKUP_BASIC_AUTH_TOKEN: ${{ secrets.PICKUP_BASIC_AUTH_TOKEN }} run: yarn --cwd packages/cron start:pins - name: Heartbeat if: ${{ success() }} diff --git a/packages/cron/src/bin/pins-failed.js b/packages/cron/src/bin/pins-failed.js index f63f701efa..1618e814ab 100644 --- a/packages/cron/src/bin/pins-failed.js +++ b/packages/cron/src/bin/pins-failed.js @@ -5,7 +5,13 @@ import { fileURLToPath } from 'url' import dotenv from 'dotenv' import fetch from '@web-std/fetch' import { checkFailedPinStatuses } from '../jobs/pins.js' -import { getPg, getCluster1, getCluster2, getCluster3 } from '../lib/utils.js' +import { + getPg, + getCluster1, + getCluster2, + getCluster3, + getPickup, +} from '../lib/utils.js' const __dirname = path.dirname(fileURLToPath(import.meta.url)) global.fetch = fetch @@ -21,11 +27,19 @@ async function main() { const cluster1 = getCluster1(process.env) const cluster2 = getCluster2(process.env) const cluster3 = getCluster3(process.env) + const pickup = getPickup(process.env) const after = process.env.AFTER ? new Date(process.env.AFTER) : oneMonthAgo() - await checkFailedPinStatuses({ pg, cluster1, cluster2, cluster3, after }) + await checkFailedPinStatuses({ + pg, + cluster1, + cluster2, + cluster3, + pickup, + after, + }) } finally { await pg.end() } diff --git a/packages/cron/src/bin/pins.js b/packages/cron/src/bin/pins.js index 3e245f5441..72f62ce51e 100755 --- a/packages/cron/src/bin/pins.js +++ b/packages/cron/src/bin/pins.js @@ -5,7 +5,13 @@ import { fileURLToPath } from 'url' import dotenv from 'dotenv' import fetch from '@web-std/fetch' import { updatePendingPinStatuses } from '../jobs/pins.js' -import { getPg, getCluster1, getCluster2, getCluster3 } from '../lib/utils.js' +import { + getPg, + getCluster1, + getCluster2, + getCluster3, + getPickup, +} from '../lib/utils.js' const __dirname = path.dirname(fileURLToPath(import.meta.url)) global.fetch = fetch @@ -18,8 +24,9 @@ async function main() { const cluster1 = getCluster1(process.env) const cluster2 = getCluster2(process.env) const cluster3 = getCluster3(process.env) + const pickup = getPickup(process.env) - await updatePendingPinStatuses({ pg, cluster1, cluster2, cluster3 }) + await updatePendingPinStatuses({ pg, cluster1, cluster2, cluster3, pickup }) } finally { await pg.end() } diff --git a/packages/cron/src/jobs/pins.js b/packages/cron/src/jobs/pins.js index 5d910a6ab4..dbafe10eab 100644 --- a/packages/cron/src/jobs/pins.js +++ b/packages/cron/src/jobs/pins.js @@ -8,7 +8,11 @@ const CONCURRENCY = 5 * http://nginx.org/en/docs/http/ngx_http_core_module.html#large_client_header_buffers */ const MAX_CLUSTER_STATUS_CIDS = 120 -const CLUSTERS = ['IpfsCluster', 'IpfsCluster2', 'IpfsCluster3'] +/** + * @typedef {import('../../../api/src/utils/db-types').definitions} definitions + * @type Array + **/ +const CLUSTERS = ['IpfsCluster', 'IpfsCluster2', 'IpfsCluster3', 'ElasticIpfs'] /** * @typedef {import('pg').Client} Client @@ -17,8 +21,8 @@ const CLUSTERS = ['IpfsCluster', 'IpfsCluster2', 'IpfsCluster3'] * cluster1: import('@nftstorage/ipfs-cluster').Cluster * cluster2: import('@nftstorage/ipfs-cluster').Cluster * cluster3: import('@nftstorage/ipfs-cluster').Cluster + * pickup: import('@nftstorage/ipfs-cluster').Cluster * }} Config - * @typedef {import('../../../api/src/utils/db-types').definitions} definitions * @typedef {Pick & { source_cid: string }} Pin * @typedef {import('@supabase/postgrest-js').PostgrestQueryBuilder} PinQuery */ @@ -145,7 +149,7 @@ UPDATE pin AS p * }} config */ async function updatePinStatuses(config) { - const { countPins, fetchPins, pg, cluster3 } = config + const { countPins, fetchPins, pg, pickup } = config if (!log.enabled) { console.log('ℹ️ Enable logging by setting DEBUG=pins:updatePinStatuses') } @@ -182,7 +186,7 @@ async function updatePinStatuses(config) { /** @type {Pin[]} */ const updatedPins = [] const cids = pins.map((p) => p.source_cid) - const statuses = await cluster3.statusAll({ cids }) + const statuses = await pickup.statusAll({ cids }) const statusByCid = Object.fromEntries(statuses.map((s) => [s.cid, s])) for (const pin of pins) { diff --git a/packages/cron/src/lib/utils.js b/packages/cron/src/lib/utils.js index 8eb6ebe76e..fb8161fee1 100644 --- a/packages/cron/src/lib/utils.js +++ b/packages/cron/src/lib/utils.js @@ -44,6 +44,21 @@ export function getCluster3(env) { }) } +/** + * Create a new IPFS Cluster instance from the passed environment variables. + * @param {Record} env + */ +export function getPickup(env) { + const pickupUrl = env.PICKUP_URL + if (!pickupUrl) throw new Error('PICKUP_URL must be set in env') + const basicAuthToken = env.PICKUP_BASIC_AUTH_TOKEN + if (!basicAuthToken) + throw new Error('PICKUP_BASIC_AUTH_TOKEN must be set in env') + return new Cluster(pickupUrl, { + headers: { authorization: `Basic ${basicAuthToken}` }, + }) +} + /** * Create a new DBClient instance from the passed environment variables. * @param {Record} env