Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: remove dagcargo materialized views #735

Merged
merged 14 commits into from
Dec 9, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 0 additions & 28 deletions .github/workflows/cron-dagcargo-views.yml

This file was deleted.

1 change: 0 additions & 1 deletion packages/cron/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
"start": "run-s start:*",
"start:metrics": "node src/bin/metrics.js",
"start:pins": "node src/bin/pins.js",
"start:dagcargo:views": "NODE_TLS_REJECT_UNAUTHORIZED=0 node src/bin/dagcargo-views.js",
"test": "npm-run-all -p -r mock:cluster mock:pgrest test:e2e",
"test:e2e": "mocha test/*.spec.js --exit",
"mock:cluster": "smoke -p 9094 test/mocks/cluster",
Expand Down
19 changes: 0 additions & 19 deletions packages/cron/src/bin/dagcargo-views.js

This file was deleted.

26 changes: 0 additions & 26 deletions packages/cron/src/jobs/dagcargo.js

This file was deleted.

6 changes: 0 additions & 6 deletions packages/db/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,6 @@ PG_REST_JWT=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoicG9zdGdyZXMifQ.oM0S

# Connection string for locally running postgres used in tests
PG_CONNECTION=postgres://postgres:[email protected]:5432/postgres

# Read-only `dagcargo` credentials for "foreign data wrapper" (fdw) in tests
DAG_CARGO_HOST=<get from password vault - dagcargo replica>
DAG_CARGO_USER=<get from password vault - dagcargo replica>
DAG_CARGO_PASSWORD=<get from password vault - dagcargo replica>
DAG_CARGO_DATABASE=<get from password vault - dagcargo replica>
```

Production vars are set in Github Actions secrets.
Expand Down
34 changes: 0 additions & 34 deletions packages/db/postgres/cargo.sql
Original file line number Diff line number Diff line change
Expand Up @@ -5,37 +5,3 @@ IMPORT FOREIGN SCHEMA cargo
LIMIT TO (aggregate_entries, aggregates, deals, dags)
FROM SERVER dag_cargo_server
INTO cargo;

-- Create materialized view from cargo "aggregate_entries" table
CREATE MATERIALIZED VIEW public.aggregate_entry
AS
SELECT *
FROM cargo.aggregate_entries;

-- Indexes for "aggregate_entries" mat view
CREATE UNIQUE INDEX aggregate_entry_unique_cidv1_aggregate_cid
ON public.aggregate_entry (aggregate_cid, cid_v1);
CREATE INDEX aggregate_entry_cid_v1
ON public.aggregate_entry (cid_v1);

-- Create materialized view from cargo "deals" table
CREATE MATERIALIZED VIEW public.deal
AS
SELECT *
FROM cargo.deals;

-- Indexes for "deals" mat view
CREATE UNIQUE INDEX deal_unique_deal_id
ON public.deal (deal_id);
CREATE INDEX deal_aggregate_cid
ON public.deal (aggregate_cid);

-- Create materialized view from cargo "aggregates" table
CREATE MATERIALIZED VIEW public.aggregate
AS
SELECT *
FROM cargo.aggregates;

-- Indexes for "aggregate" mat view
CREATE UNIQUE INDEX aggregate_unique_aggregate_cid
ON public.aggregate (aggregate_cid);
33 changes: 33 additions & 0 deletions packages/db/postgres/cargo.testing.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
CREATE SCHEMA IF NOT EXISTS cargo;

CREATE TABLE IF NOT EXISTS cargo.aggregate_entries (
aggregate_cid TEXT NOT NULL,
cid_v1 TEXT NOT NULL,
datamodel_selector TEXT NOT NULL
);

CREATE TABLE IF NOT EXISTS cargo.aggregates (
aggregate_cid TEXT NOT NULL UNIQUE,
piece_cid TEXT UNIQUE NOT NULL,
sha256hex TEXT NOT NULL,
export_size BIGINT NOT NULL,
metadata JSONB,
entry_created TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
);

CREATE TABLE IF NOT EXISTS cargo.deals (
deal_id BIGINT UNIQUE NOT NULL,
aggregate_cid TEXT NOT NULL,
client TEXT NOT NULL,
provider TEXT NOT NULL,
status TEXT NOT NULL,
status_meta TEXT,
start_epoch INTEGER NOT NULL,
start_time TIMESTAMP WITH TIME ZONE NOT NULL,
end_epoch INTEGER NOT NULL,
end_time TIMESTAMP WITH TIME ZONE NOT NULL,
sector_start_epoch INTEGER,
sector_start_time TIMESTAMP WITH TIME ZONE,
entry_created TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
entry_last_updated TIMESTAMP WITH TIME ZONE NOT NULL
);
2 changes: 1 addition & 1 deletion packages/db/postgres/fdw.sql
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ SERVER dag_cargo_server
);

CREATE
USER MAPPING FOR current_user
USER MAPPING FOR :WEB3_STORAGE_USER
SERVER dag_cargo_server
OPTIONS (
user :'DAG_CARGO_USER',
Expand Down
6 changes: 3 additions & 3 deletions packages/db/postgres/functions.sql
Original file line number Diff line number Diff line change
Expand Up @@ -289,9 +289,9 @@ SELECT COALESCE(de.status, 'queued') as status,
a.piece_cid as pieceCid,
ae.aggregate_cid as batchRootCid,
ae.cid_v1 as dataCid
FROM public.aggregate_entry ae
join public.aggregate a using (aggregate_cid)
LEFT JOIN public.deal de USING (aggregate_cid)
FROM cargo.aggregate_entries ae
join cargo.aggregates a using (aggregate_cid)
LEFT JOIN cargo.deals de USING (aggregate_cid)
WHERE ae.cid_v1 = ANY (cids)
ORDER BY de.entry_last_updated
$$;
Expand Down
61 changes: 17 additions & 44 deletions packages/db/scripts/cmds/db-sql.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,32 +19,14 @@ const { Client } = pg
*/
export async function dbSqlCmd ({ reset, cargo, testing } = {}) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should keep the ability to setup non-testing version for running locally.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

so, use the flag testing instead of cargo.

// read all the SQL files
const configSql = fs.readFileSync(path.join(__dirname, '../../postgres/config.sql'), {
encoding: 'utf-8'
})
const configSql = fs.readFileSync(path.join(__dirname, '../../postgres/config.sql'), 'utf-8')

const tablesSql = fs.readFileSync(path.join(__dirname, '../../postgres/tables.sql'), {
encoding: 'utf-8'
})
const functionsSql = fs.readFileSync(path.join(__dirname, '../../postgres/functions.sql'), {
encoding: 'utf-8'
})
const resetSql = fs.readFileSync(path.join(__dirname, '../../postgres/reset.sql'), {
encoding: 'utf-8'
})
let cargoSql = fs.readFileSync(path.join(__dirname, '../../postgres/cargo.sql'), {
encoding: 'utf-8'
})

let fdwSql = fs.readFileSync(path.join(__dirname, '../../postgres/fdw.sql'), {
encoding: 'utf-8'
})

// Replace secrets in the FDW sql file
fdwSql = fdwSql.replace(":'DAG_CARGO_HOST'", `'${process.env.DAG_CARGO_HOST}'`)
fdwSql = fdwSql.replace(":'DAG_CARGO_DATABASE'", `'${process.env.DAG_CARGO_DATABASE}'`)
fdwSql = fdwSql.replace(":'DAG_CARGO_USER'", `'${process.env.DAG_CARGO_USER}'`)
fdwSql = fdwSql.replace(":'DAG_CARGO_PASSWORD'", `'${process.env.DAG_CARGO_PASSWORD}'`)
const tablesSql = fs.readFileSync(path.join(__dirname, '../../postgres/tables.sql'), 'utf-8')
const functionsSql = fs.readFileSync(path.join(__dirname, '../../postgres/functions.sql'), 'utf-8')
const resetSql = fs.readFileSync(path.join(__dirname, '../../postgres/reset.sql'), 'utf-8')
const cargoSql = fs.readFileSync(path.join(__dirname, '../../postgres/cargo.sql'), 'utf-8')
const cargoTesting = fs.readFileSync(path.join(__dirname, '../../postgres/cargo.testing.sql'), 'utf-8')
let fdwSql = fs.readFileSync(path.join(__dirname, '../../postgres/fdw.sql'), 'utf-8')

// Setup postgres client
const connectionString = process.env.PG_CONNECTION
Expand All @@ -64,25 +46,16 @@ export async function dbSqlCmd ({ reset, cargo, testing } = {}) {
await client.query(configSql)
await client.query(tablesSql)

if (cargo) {
if (testing) {
cargoSql = cargoSql.replace(
`
-- Create materialized view from cargo "aggregate_entries" table
CREATE MATERIALIZED VIEW public.aggregate_entry
AS
SELECT *
FROM cargo.aggregate_entries;`,
`
CREATE MATERIALIZED VIEW public.aggregate_entry
AS
SELECT *
FROM cargo.aggregate_entries
WHERE cid_v1 in ('bafybeiaj5yqocsg5cxsuhtvclnh4ulmrgsmnfbhbrfxrc3u2kkh35mts4e');
`
)
}

// if testing or cargo fdw flag not set, you just get the schema, no fdw connection to dagcargo
if (testing && cargo) {
await client.query(cargoTesting)
} else if (!testing && cargo) {
// Replace secrets in the FDW sql file
fdwSql = fdwSql.replace(":'DAG_CARGO_HOST'", `'${process.env.DAG_CARGO_HOST}'`)
fdwSql = fdwSql.replace(":'DAG_CARGO_DATABASE'", `'${process.env.DAG_CARGO_DATABASE}'`)
fdwSql = fdwSql.replaceAll(":'DAG_CARGO_USER'", `'${process.env.DAG_CARGO_USER}'`)
fdwSql = fdwSql.replaceAll(":'DAG_CARGO_PASSWORD'", `'${process.env.DAG_CARGO_PASSWORD}'`)
fdwSql = fdwSql.replaceAll(':WEB3_STORAGE_USER', `${process.env.WEB3_STORAGE_USER || 'CURRENT_USER'}`)
await client.query(fdwSql)
await client.query(cargoSql)
}
Expand Down
6 changes: 5 additions & 1 deletion packages/infra/heroku/grant-postgrest.sql
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,8 @@ GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO web3_storage;
-- allow access to new tables/sequences/functions that are created in the public schema in the future
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT, INSERT, UPDATE ON TABLES TO web3_storage;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON SEQUENCES TO web3_storage;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT EXECUTE ON FUNCTIONS TO web3_storage;
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT EXECUTE ON FUNCTIONS TO web3_storage;
-- allow access to schema cargo for web3_storage
GRANT USAGE ON SCHEMA cargo TO web3_storage;
GRANT SELECT ON ALL TABLES IN SCHEMA cargo TO web3_storage;
ALTER DEFAULT PRIVILEGES IN SCHEMA cargo GRANT SELECT ON TABLES TO web3_storage;