Skip to content

Commit

Permalink
feat: postgres setup (#508)
Browse files Browse the repository at this point in the history
* local setup for postgres database using docker compose
* DB Client revamped to be used for the API
* DB tests

Co-authored-by: Oli Evans <[email protected]>
  • Loading branch information
vasco-santos and olizilla committed Oct 22, 2021
1 parent 1ee989c commit cd04716
Show file tree
Hide file tree
Showing 34 changed files with 8,274 additions and 4,155 deletions.
2 changes: 0 additions & 2 deletions .github/workflows/api.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@ on:
- 'packages/api/**'
- '.github/workflows/api.yml'
pull_request:
branches:
- main
paths:
- 'packages/api/**'
- '.github/workflows/api.yml'
Expand Down
22 changes: 20 additions & 2 deletions .github/workflows/db.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,30 @@ on:
# Nothing to do on PR yet, but having the check appear on the PR serves as a reminder
# that we don't have proper tests for db changes yet, and that merging it will deploy.
pull_request:
branches:
- main
paths:
- 'packages/db/**'
- '.github/workflows/db.yml'
jobs:
test:
runs-on: ubuntu-latest
name: Test
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 16
- uses: bahmutov/npm-install@v1
- run: npx playwright install-deps
- run: npm test --workspace packages/db
env:
PG_REST_URL: ${{secrets.PG_REST_URL}}
PG_REST_JWT: ${{secrets.PG_REST_JWT}}
PG_CONNECTION: ${{secrets.PG_CONNECTION}}
DAG_CARGO_HOST: ${{secrets.DAG_CARGO_HOST}}
DAG_CARGO_DATABASE: ${{secrets.DAG_CARGO_DATABASE}}
DAG_CARGO_USER: ${{secrets.DAG_CARGO_USER}}
DAG_CARGO_PASSWORD: ${{secrets.DAG_CARGO_PASSWORD}}

deploy-staging:
name: Deploy Staging DB
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
Expand Down
8,395 changes: 4,252 additions & 4,143 deletions package-lock.json

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions packages/api/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ One time set up of your cloudflare worker subdomain for dev:
wrangler secret put S3_ACCESS_KEY_ID --env $(whoami) # Get from Amazon S3 (not required for dev)
wrangler secret put S3_SECRET_ACCESS_KEY_ID --env $(whoami) # Get from Amazon S3 (not required for dev)
wrangler secret put S3_BUCKET_NAME --env $(whoami) # e.g web3.storage-staging-us-east-2 (not required for dev)
wrangler secret put PG_REST_JWT --env USER # Get from database postgrest
```
- `npm run publish` - Publish the worker under your env. An alias for `wrangler publish --env $(whoami)`
Expand Down
1 change: 0 additions & 1 deletion packages/api/src/auth.js
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ export function withMagicToken (handler) {

const magicUser = await tryMagicToken(token, env)
if (magicUser) {
console.log('magicUser', magicUser)
request.auth = { user: magicUser }
env.sentry && env.sentry.setUser(magicUser)
return handler(request, env, ctx)
Expand Down
4 changes: 2 additions & 2 deletions packages/api/wrangler.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,14 @@ format = "service-worker"
account_id = "fffa4b4363a7e5250af8357087263b3a" # Protocol Labs CF account
zone_id = "7eee3323c1b35b6650568604c65f441e" # web3.storage zone
route = "https://api.web3.storage/*"
vars = { CLUSTER_API_URL = "https://web3.storage.ipfscluster.io/api/", ENV = "production" }
vars = { CLUSTER_API_URL = "https://web3.storage.ipfscluster.io/api/", ENV = "production", DATABASE_URL = "https://db.web3.storage" }

[env.staging]
# name = "web3-storage-staging"
account_id = "fffa4b4363a7e5250af8357087263b3a" # Protocol Labs CF account
zone_id = "7eee3323c1b35b6650568604c65f441e" # web3.storage zone
route = "https://api-staging.web3.storage/*"
vars = { CLUSTER_API_URL = "https://web3.storage.ipfscluster.io/api/", ENV = "staging" }
vars = { CLUSTER_API_URL = "https://web3.storage.ipfscluster.io/api/", ENV = "staging", DATABASE_URL = "https://db-staging.web3.storage" }

[env.alan]
workers_dev = true
Expand Down
2 changes: 2 additions & 0 deletions packages/db/README.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
# DB

web3.storage currently uses FaunaDB by default and its setup can be seen next. However, we are migrating to use Postgres and its setup can be seen [here](./postgres/README.md).

## Getting Started

1. Sign up at https://fauna.com.
Expand Down
229 changes: 229 additions & 0 deletions packages/db/db-client-types.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,229 @@
import { definitions } from './postgres/pg-rest-api-types'

// User
export type UpsertUserInput = {
id: definitions['user']['id'],
name: definitions['user']['name'],
picture?: definitions['user']['picture'],
email: definitions['user']['email'],
issuer: definitions['user']['issuer'],
github?: definitions['user']['github'],
publicAddress: definitions['user']['public_address']
}

export type UpsertUserOutput = {
issuer: string
}

export type User = definitions['user']

export type UserOutput = {
_id: definitions['user']['id'],
name: definitions['user']['name'],
email: definitions['user']['email'],
issuer: definitions['user']['issuer'],
publicAddress: definitions['user']['public_address']
created: definitions['user']['inserted_at'],
updated: definitions['user']['updated_at']
}

// Auth key
export interface CreateAuthKeyInput {
name: definitions['auth_key']['name']
secret: definitions['auth_key']['secret']
user: definitions['auth_key']['user_id']
}

export type CreateAuthKeyOutput = {
_id: definitions['auth_key']['id']
}

export type AuthKey = {
_id: definitions['auth_key']['id'],
name: definitions['auth_key']['name'],
user: {
_id: definitions['user']['id'],
issuer: definitions['user']['issuer']
}
}

export type AuthKeyItem = definitions['auth_key'] & {
uploads: Array<
Pick<definitions['upload'], 'id'>
>
}

export type AuthKeyItemOutput = {
_id: definitions['auth_key']['id']
name: definitions['auth_key']['name']
secret: definitions['auth_key']['secret']
created: definitions['auth_key']['inserted_at']
hasUploads: boolean
}

// Pin
export type PinItem = {
_id: definitions['pin']['id']
status: definitions['pin']['status']
created: definitions['pin']['inserted_at']
updated: definitions['pin']['updated_at']
location: {
_id: definitions['pin_location']['id']
peerId: definitions['pin_location']['peer_id']
peerName: definitions['pin_location']['peer_name']
region: definitions['pin_location']['region']
}
}

export type PinItemOutput = {
_id?: definitions['pin']['id']
status: definitions['pin']['status']
created?: definitions['pin']['inserted_at']
updated: definitions['pin']['updated_at']
peerId: definitions['pin_location']['peer_id']
peerName: definitions['pin_location']['peer_name']
region: definitions['pin_location']['region']
}

// Backup
export type BackupOutput = {
_id: definitions['backup']['id']
created: definitions['backup']['inserted_at']
url: definitions['backup']['url']
uploadId: definitions['backup']['upload_id']
}

// Deal
export type Deal = {
dealId: definitions['deal']['deal_id']
storageProvider: definitions['deal']['provider']
status: definitions['deal']['status']
pieceCid: definitions['aggregate']['piece_cid']
dataCid: definitions['aggregate_entry']['cid_v1']
dataModelSelector: definitions['aggregate_entry']['datamodel_selector']
dealActivation: definitions['deal']['start_time']
dealExpiration: definitions['deal']['end_time']
created: definitions['deal']['entry_created']
updated: definitions['deal']['entry_last_updated']
}

// Content
export type ContentItem = {
cid: definitions['content']['cid']
dagSize: definitions['content']['dag_size']
created?: definitions['upload']['inserted_at']
pins: Array<{
status: definitions['pin']['status']
updated: definitions['pin']['updated_at']
location: {
_id: definitions['pin_location']['id']
peerId: definitions['pin_location']['peer_id']
peerName: definitions['pin_location']['peer_name']
region: definitions['pin_location']['region']
}
}>
}

export type ContentItemOutput = {
created: definitions['content']['inserted_at']
cid: definitions['content']['cid']
dagSize?: definitions['content']['dag_size']
pins: Array<PinItemOutput>,
deals: Array<Deal>
}


// Upload
export interface CreateUploadInput {
user: definitions['upload']['user_id']
authKey: definitions['upload']['auth_key_id']
contentCid: definitions['upload']['content_cid']
sourceCid: definitions['upload']['source_cid']
type: definitions['upload']['type']
name?: definitions['upload']['name']
dagSize?: definitions['content']['dag_size']
created?: definitions['upload']['inserted_at']
updated?: definitions['upload']['updated_at']
pins: Array<{
status: definitions['pin']['status']
location: {
peerId: definitions['pin_location']['peer_id']
peerName: definitions['pin_location']['peer_name']
region: definitions['pin_location']['region']
}
}>,
backupUrls: Array<definitions['backup']['url']>
}

export type CreateUploadOutput = {
_id: definitions['upload']['id'],
cid: definitions['content']['cid']
}

export type UploadItem = {
id: definitions['upload']['id']
type: definitions['upload']['type']
name?: definitions['upload']['name']
created?: definitions['upload']['inserted_at']
updated?: definitions['upload']['updated_at']
content: {
cid: definitions['content']['cid']
dagSize: definitions['content']['dag_size']
pins: Array<{
status: definitions['pin']['status']
updated: definitions['pin']['updated_at']
location: {
id: definitions['pin_location']['id']
peerId: definitions['pin_location']['peer_id']
peerName: definitions['pin_location']['peer_name']
region: definitions['pin_location']['region']
}
}>
}
}

export type UploadItemOutput = {
_id: definitions['upload']['id']
type: definitions['upload']['type']
name?: definitions['upload']['name']
created: definitions['upload']['inserted_at']
updated: definitions['upload']['updated_at']
cid: definitions['content']['cid']
dagSize?: definitions['content']['dag_size']
pins: Array<PinItemOutput>,
deals: Array<Deal>
}

export type UploadOutput = definitions['upload'] & {
user: Pick<definitions['user'], 'id' | 'issuer'>
key: Pick<definitions['auth_key'], 'name'>
content: Pick<definitions['content'], 'dag_size'> & {
pin: Pick<definitions['pin'], 'id'> & {
location: Pick<definitions['pin_location'], 'peer_id' | 'peer_name' | 'region'>
}[]
},
deals: Deal[]
}

export type ListUploadsOptions = {
/**
* Uploads created before a given timestamp.
*/
before?: string
/**
* Uploads created after a given timestamp.
*/
after?: string
/**
* Max records (default: 10).
*/
size?: number
/**
* Sort by given property.
*/
sortBy?: 'Date' | 'Name'
/**
* Sort order.
*/
sortOrder?: 'Asc' | 'Desc'
}
17 changes: 17 additions & 0 deletions packages/db/errors.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
export class DBError extends Error {
/**
* @param {{
* message: string
* details: string
* hint: string
* code: string
* }} cause
*/
constructor ({ message, details, hint, code }) {
super(`${message}, details: ${details}, hint: ${hint}, code: ${code}`)
this.name = 'DBError'
this.code = DBError.CODE
}
}

DBError.CODE = 'ERROR_DB'
36 changes: 36 additions & 0 deletions packages/db/index.d.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,45 @@
import { gql } from 'graphql-request'
import { RequestDocument } from 'graphql-request/dist/types'

import type {
UpsertUserInput,
UpsertUserOutput,
UserOutput,
CreateUploadInput,
ListUploadsOptions,
CreateUploadOutput,
UploadItemOutput,
ContentItemOutput,
Deal,
CreateAuthKeyInput,
CreateAuthKeyOutput,
AuthKey,
AuthKeyItemOutput,
PinItemOutput,
BackupOutput
} from './db-client-types'

export { gql }

export class DBClient {
constructor(config: { endpoint?: string; token: string })
upsertUser (user: UpsertUserInput): Promise<UpsertUserOutput>
getUser (issuer: string): Promise<UserOutput>
getUsedStorage (userId: number): Promise<number>
createUpload (data: CreateUploadInput): Promise<CreateUploadOutput>
getUpload (cid: string, userId: number): Promise<UploadItemOutput>
listUploads (userId: number, opts?: ListUploadsOptions): Promise<UploadItemOutput[]>
renameUpload (cid: string, userId: number, name: string): Promise<{ name: string }>
deleteUpload(cid: string, userId: number): Promise<{ _id: number }>
getStatus (cid: string): Promise<ContentItemOutput>
getBackups(uploadId: number): Promise<Array<BackupOutput>>
upsertPin (cid: string, pin: PinItemOutput): Promise<number>
getPins (cid: string): Promise<Array<PinItemOutput>>
getDeals (cid: string): Promise<Deal[]>
getDealsForCids (cids: string[]): Promise<Record<string, Deal[]>>
createKey (key: CreateAuthKeyInput): Promise<CreateAuthKeyOutput>
getKey (issuer: string, secret: string): Promise<AuthKey>
listKeys (userId: number): Promise<Array<AuthKeyItemOutput>>
deleteKey (id: number): Promise<void>
query<T, V>(document: RequestDocument, variables: V): Promise<T>
}
Loading

0 comments on commit cd04716

Please sign in to comment.