diff --git a/.circleci/config.yml b/.circleci/config.yml index 1ee94f809d..b672554095 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -16,7 +16,7 @@ workflows: - main - hotfix* - - test-server: + - test-server: &test-server-job-definition context: - speckle-server-licensing - stripe-integration @@ -32,6 +32,8 @@ workflows: requires: - docker-publish-postgres-container + - test-server-multiregion: *test-server-job-definition + - test-frontend-2: filters: *filters-allow-all @@ -190,6 +192,7 @@ workflows: - test-objectsender - test-server - test-server-no-ff + - test-server-multiregion - test-preview-service - docker-publish-frontend: @@ -205,6 +208,7 @@ workflows: - test-objectsender - test-server - test-server-no-ff + - test-server-multiregion - test-preview-service - docker-publish-frontend-2: @@ -220,6 +224,7 @@ workflows: - test-objectsender - test-server - test-server-no-ff + - test-server-multiregion - test-preview-service - docker-publish-webhooks: @@ -235,6 +240,7 @@ workflows: - test-objectsender - test-server - test-server-no-ff + - test-server-multiregion - test-preview-service - docker-publish-file-imports: @@ -250,6 +256,7 @@ workflows: - test-objectsender - test-server - test-server-no-ff + - test-server-multiregion - test-preview-service - docker-publish-previews: @@ -265,6 +272,7 @@ workflows: - test-objectsender - test-server - test-server-no-ff + - test-server-multiregion - test-preview-service - docker-publish-test-container: @@ -280,6 +288,7 @@ workflows: - test-objectsender - test-server - test-server-no-ff + - test-server-multiregion - test-preview-service - docker-publish-postgres-container: @@ -301,6 +310,7 @@ workflows: - test-objectsender - test-server - test-server-no-ff + - test-server-multiregion - test-preview-service - docker-publish-docker-compose-ingress: @@ -316,6 +326,7 @@ workflows: - test-objectsender - test-server - test-server-no-ff + - test-server-multiregion - test-preview-service - publish-helm-chart: @@ -356,6 +367,7 @@ workflows: - get-version - test-server - test-server-no-ff + - test-server-multiregion - test-ui-components - test-frontend-2 - test-viewer @@ -573,6 +585,48 @@ jobs: FF_GATEKEEPER_MODULE_ENABLED: 'false' FF_BILLING_INTEGRATION_ENABLED: 'false' + test-server-multiregion: + <<: *test-server-job + docker: + - image: cimg/node:18.19.0 + - image: cimg/redis:7.2.4 + - image: 'speckle/speckle-postgres' + environment: + POSTGRES_DB: speckle2_test + POSTGRES_PASSWORD: speckle + POSTGRES_USER: speckle + command: -c 'max_connections=1000' + - image: 'speckle/speckle-postgres' + environment: + POSTGRES_DB: speckle2_test + POSTGRES_PASSWORD: speckle + POSTGRES_USER: speckle + command: -c 'max_connections=1000' -c 'port=5433' + - image: 'minio/minio' + command: server /data --console-address ":9001" + environment: + # Same as test-server: + NODE_ENV: test + DATABASE_URL: 'postgres://speckle:speckle@127.0.0.1:5432/speckle2_test' + PGDATABASE: speckle2_test + POSTGRES_MAX_CONNECTIONS_SERVER: 20 + PGUSER: speckle + SESSION_SECRET: 'keyboard cat' + STRATEGY_LOCAL: 'true' + CANONICAL_URL: 'http://127.0.0.1:3000' + S3_ENDPOINT: 'http://127.0.0.1:9000' + S3_ACCESS_KEY: 'minioadmin' + S3_SECRET_KEY: 'minioadmin' + S3_BUCKET: 'speckle-server' + S3_CREATE_BUCKET: 'true' + REDIS_URL: 'redis://127.0.0.1:6379' + S3_REGION: '' # optional, defaults to 'us-east-1' + AUTOMATE_ENCRYPTION_KEYS_PATH: 'test/assets/automate/encryptionKeys.json' + FF_BILLING_INTEGRATION_ENABLED: 'true' + # These are the only 2 different env keys: + MULTI_REGION_CONFIG_PATH: '../../.circleci/multiregion.test-ci.json' + RUN_TESTS_IN_MULTIREGION_MODE: true + test-frontend-2: docker: &docker-node-browsers-image - image: cimg/node:18.19.0-browsers diff --git a/.circleci/multiregion.test-ci.json b/.circleci/multiregion.test-ci.json new file mode 100644 index 0000000000..bbf5d3142b --- /dev/null +++ b/.circleci/multiregion.test-ci.json @@ -0,0 +1,14 @@ +{ + "main": { + "postgres": { + "connectionUri": "postgresql://speckle:speckle@127.0.0.1:5432/speckle2_test" + } + }, + "regions": { + "region1": { + "postgres": { + "connectionUri": "postgresql://speckle:speckle@127.0.0.1:5433/speckle2_test" + } + } + } +} diff --git a/.gitignore b/.gitignore index c090a9b77a..a1dfce7e9a 100644 --- a/.gitignore +++ b/.gitignore @@ -74,4 +74,5 @@ redis-data/ .tshy-build # Server -multiregion.json \ No newline at end of file +multiregion.json +multiregion.test.json \ No newline at end of file diff --git a/package.json b/package.json index f70ce271db..911f0a3344 100644 --- a/package.json +++ b/package.json @@ -21,6 +21,7 @@ "dev:docker": "docker compose -f ./docker-compose-deps.yml", "dev:docker:up": "docker compose -f ./docker-compose-deps.yml up -d", "dev:docker:down": "docker compose -f ./docker-compose-deps.yml down", + "dev:docker:restart": "yarn dev:docker:down && yarn dev:docker:up", "dev:kind:up": "ctlptl apply --filename ./.circleci/deployment/cluster-config.yaml", "dev:kind:down": "ctlptl delete -f ./.circleci/deployment/cluster-config.yaml", "dev:kind:helm:up": "yarn dev:kind:up && tilt up --file ./.circleci/deployment/Tiltfile.helm --context kind-speckle-server", diff --git a/packages/frontend-2/components/projects/AddDialog.vue b/packages/frontend-2/components/projects/AddDialog.vue index fa3aa9490c..5b1758cd94 100644 --- a/packages/frontend-2/components/projects/AddDialog.vue +++ b/packages/frontend-2/components/projects/AddDialog.vue @@ -118,7 +118,8 @@ const emit = defineEmits<{ const isWorkspacesEnabled = useIsWorkspacesEnabled() const createProject = useCreateProject() const router = useRouter() -const { handleSubmit, meta } = useForm() +const logger = useLogger() +const { handleSubmit, meta, isSubmitting } = useForm() const { result: workspaceResult } = useQuery(projectWorkspaceSelectQuery, null, () => ({ enabled: isWorkspacesEnabled.value })) @@ -127,45 +128,62 @@ const visibility = ref(ProjectVisibility.Unlisted) const selectedWorkspace = ref() const showConfirmDialog = ref(false) const confirmActionType = ref<'navigate' | 'close' | null>(null) +const isClosing = ref(false) const open = defineModel('open', { required: true }) const mp = useMixpanel() const onSubmit = handleSubmit(async (values) => { - const workspaceId = props.workspaceId || selectedWorkspace.value?.id - - await createProject({ - name: values.name, - description: values.description, - visibility: visibility.value, - ...(workspaceId ? { workspaceId } : {}) - }) - emit('created') - mp.track('Stream Action', { - type: 'action', - name: 'create', - // eslint-disable-next-line camelcase - workspace_id: props.workspaceId - }) - open.value = false + if (isClosing.value) return // Prevent submission while closing + + try { + isClosing.value = true + const workspaceId = props.workspaceId || selectedWorkspace.value?.id + + await createProject({ + name: values.name, + description: values.description, + visibility: visibility.value, + ...(workspaceId ? { workspaceId } : {}) + }) + emit('created') + mp.track('Stream Action', { + type: 'action', + name: 'create', + // eslint-disable-next-line camelcase + workspace_id: props.workspaceId + }) + open.value = false + } catch (error) { + isClosing.value = false + logger.error('Failed to create project:', error) + } }) const workspaces = computed( () => workspaceResult.value?.activeUser?.workspaces.items ?? [] ) const hasWorkspaces = computed(() => workspaces.value.length > 0) + const dialogButtons = computed((): LayoutDialogButton[] => { + const isDisabled = isSubmitting.value || isClosing.value + return [ { text: 'Cancel', - props: { color: 'outline' }, + props: { + color: 'outline', + disabled: isDisabled + }, onClick: confirmCancel }, { text: 'Create', props: { - submit: true + submit: true, + loading: isDisabled, + disabled: isDisabled }, onClick: onSubmit } @@ -206,6 +224,7 @@ const handleConfirmAction = () => { watch(open, (newVal, oldVal) => { if (newVal && !oldVal) { selectedWorkspace.value = undefined + isClosing.value = false } }) diff --git a/packages/server/.env-example b/packages/server/.env-example index f6104277aa..2982b08b9e 100644 --- a/packages/server/.env-example +++ b/packages/server/.env-example @@ -147,3 +147,8 @@ OIDC_CLIENT_SECRET="gLb9IEutYQ0npyvA8iHxPsObY3duGB0w" # OTEL_TRACE_URL="" # OTEL_TRACE_KEY="" # OTEL_TRACE_VALUE="" + +############################################################ +# Multi region settings +############################################################ +MULTI_REGION_CONFIG_PATH="multiregion.json" \ No newline at end of file diff --git a/packages/server/.env.test-example b/packages/server/.env.test-example index 0662ea0b7f..47dd87c134 100644 --- a/packages/server/.env.test-example +++ b/packages/server/.env.test-example @@ -4,4 +4,6 @@ PORT=0 POSTGRES_URL=postgres://speckle:speckle@127.0.0.1/speckle2_test -POSTGRES_USER='' \ No newline at end of file +POSTGRES_USER='' +MULTI_REGION_CONFIG_PATH="multiregion.test.json" +#RUN_TESTS_IN_MULTIREGION_MODE=true \ No newline at end of file diff --git a/packages/server/.mocharc.js b/packages/server/.mocharc.js index 9ca96c989c..5ce9d9f539 100644 --- a/packages/server/.mocharc.js +++ b/packages/server/.mocharc.js @@ -14,7 +14,7 @@ const ignore = [ /** @type {import("mocha").MochaOptions} */ const config = { spec: ['modules/**/*.spec.js', 'modules/**/*.spec.ts', 'logging/**/*.spec.js'], - require: ['ts-node/register', 'test/hooks.js'], + require: ['ts-node/register', 'test/hooks.ts'], ...(ignore.length ? { ignore } : {}), slow: 0, timeout: '150000', diff --git a/packages/server/assets/workspacesCore/typedefs/workspaces.graphql b/packages/server/assets/workspacesCore/typedefs/workspaces.graphql index 1ebc421c29..294c0ccc47 100644 --- a/packages/server/assets/workspacesCore/typedefs/workspaces.graphql +++ b/packages/server/assets/workspacesCore/typedefs/workspaces.graphql @@ -147,6 +147,7 @@ type WorkspaceMutations { # We are, for the moment, doing the check in the resolver deleteDomain(input: WorkspaceDomainDeleteInput!): Workspace! @hasScope(scope: "workspace:update") + deleteSsoProvider(workspaceId: String!): Boolean! invites: WorkspaceInviteMutations! projects: WorkspaceProjectMutations! @hasServerRole(role: SERVER_USER) } diff --git a/packages/server/modules/cli/commands/db/migrate/latest.ts b/packages/server/modules/cli/commands/db/migrate/latest.ts index 5b73ea855d..2686fd53ce 100644 --- a/packages/server/modules/cli/commands/db/migrate/latest.ts +++ b/packages/server/modules/cli/commands/db/migrate/latest.ts @@ -1,5 +1,8 @@ import knex from '@/db/knex' import { logger } from '@/logging/logging' +import { getRegisteredRegionClients } from '@/modules/multiregion/dbSelector' +import { isTestEnv } from '@/modules/shared/helpers/envHelper' +import { mochaHooks } from '@/test/hooks' import { CommandModule } from 'yargs' const command: CommandModule = { @@ -7,7 +10,19 @@ const command: CommandModule = { describe: 'Run all migrations that have not yet been run', async handler() { logger.info('Running latest migration...') - await knex.migrate.latest() + + // In tests we want different logic - just run beforeAll + if (isTestEnv()) { + // Run before hooks, to properly initialize everything + await (mochaHooks.beforeAll as () => Promise)() + } else { + const regionDbs = await getRegisteredRegionClients() + const dbs = [knex, ...Object.values(regionDbs)] + for (const db of dbs) { + await db.migrate.latest() + } + } + logger.info('Completed running migration') } } diff --git a/packages/server/modules/cli/commands/db/migrate/rollback.ts b/packages/server/modules/cli/commands/db/migrate/rollback.ts index 9b5563bf55..4d86c0ddcd 100644 --- a/packages/server/modules/cli/commands/db/migrate/rollback.ts +++ b/packages/server/modules/cli/commands/db/migrate/rollback.ts @@ -1,5 +1,8 @@ import knex from '@/db/knex' import { logger } from '@/logging/logging' +import { getRegisteredRegionClients } from '@/modules/multiregion/dbSelector' +import { isTestEnv } from '@/modules/shared/helpers/envHelper' +import { mochaHooks, resetPubSubFactory } from '@/test/hooks' import { CommandModule } from 'yargs' const command: CommandModule = { @@ -7,7 +10,21 @@ const command: CommandModule = { describe: 'Roll back all migrations', async handler() { logger.info('Rolling back migrations...') - await knex.migrate.rollback(undefined, true) + + if (isTestEnv()) { + // Run before hooks, to properly initialize everything first + await (mochaHooks.beforeAll as () => Promise)() + } + + const regionDbs = await getRegisteredRegionClients() + const dbs = [knex, ...Object.values(regionDbs)] + + for (const db of dbs) { + const resetPubSub = resetPubSubFactory({ db }) + await resetPubSub() + await db.migrate.rollback(undefined, true) + } + logger.info('Completed rolling back migrations') } } diff --git a/packages/server/modules/comments/tests/comments.spec.js b/packages/server/modules/comments/tests/comments.spec.js index 3ee1460bac..149d35f238 100644 --- a/packages/server/modules/comments/tests/comments.spec.js +++ b/packages/server/modules/comments/tests/comments.spec.js @@ -1290,7 +1290,7 @@ describe('Comments @comments', () => { before(async () => { // Truncate comments - truncateTables([Comments.name]) + await truncateTables([Comments.name]) // Create a single comment with a blob const createCommentResult = await createComment({ diff --git a/packages/server/modules/core/graph/generated/graphql.ts b/packages/server/modules/core/graph/generated/graphql.ts index dcbcf6980c..38c28efc36 100644 --- a/packages/server/modules/core/graph/generated/graphql.ts +++ b/packages/server/modules/core/graph/generated/graphql.ts @@ -4239,6 +4239,7 @@ export type WorkspaceMutations = { create: Workspace; delete: Scalars['Boolean']['output']; deleteDomain: Workspace; + deleteSsoProvider: Scalars['Boolean']['output']; invites: WorkspaceInviteMutations; join: Workspace; leave: Scalars['Boolean']['output']; @@ -4270,6 +4271,11 @@ export type WorkspaceMutationsDeleteDomainArgs = { }; +export type WorkspaceMutationsDeleteSsoProviderArgs = { + workspaceId: Scalars['String']['input']; +}; + + export type WorkspaceMutationsJoinArgs = { input: JoinWorkspaceInput; }; @@ -6532,6 +6538,7 @@ export type WorkspaceMutationsResolvers>; delete?: Resolver>; deleteDomain?: Resolver>; + deleteSsoProvider?: Resolver>; invites?: Resolver; join?: Resolver>; leave?: Resolver>; diff --git a/packages/server/modules/core/graph/resolvers/branches.ts b/packages/server/modules/core/graph/resolvers/branches.ts index 0df76770c9..b6b9aa001e 100644 --- a/packages/server/modules/core/graph/resolvers/branches.ts +++ b/packages/server/modules/core/graph/resolvers/branches.ts @@ -37,13 +37,12 @@ export = { Query: {}, Stream: { async branches(parent, args) { - const projectDb = await getProjectDbClient({ projectId: parent.id }) - + const projectDB = await getProjectDbClient({ projectId: parent.id }) const getPaginatedStreamBranches = getPaginatedStreamBranchesFactory({ getPaginatedStreamBranchesPage: getPaginatedStreamBranchesPageFactory({ - db: projectDb + db: projectDB }), - getStreamBranchCount: getStreamBranchCountFactory({ db: projectDb }) + getStreamBranchCount: getStreamBranchCountFactory({ db: projectDB }) }) return await getPaginatedStreamBranches(parent.id, args) }, @@ -55,13 +54,12 @@ export = { // When getting a branch by name, if not found, we try to do a 'hail mary' attempt // and get it by id as well (this would be coming from a FE2 url). - const projectDb = await getProjectDbClient({ projectId: parent.id }) - - const getStreamBranchByName = getStreamBranchByNameFactory({ db: projectDb }) + const projectDB = await getProjectDbClient({ projectId: parent.id }) + const getStreamBranchByName = getStreamBranchByNameFactory({ db: projectDB }) const branchByName = await getStreamBranchByName(parent.id, args.name) if (branchByName) return branchByName - const getBranchById = getBranchByIdFactory({ db: projectDb }) + const getBranchById = getBranchByIdFactory({ db: projectDB }) const branchByIdRes = await getBranchById(args.name) if (!branchByIdRes) return null @@ -86,16 +84,16 @@ export = { context.resourceAccessRules ) - const projectDb = await getProjectDbClient({ projectId: args.branch.streamId }) + const projectDB = await getProjectDbClient({ projectId: args.branch.streamId }) + const getStreamBranchByName = getStreamBranchByNameFactory({ db: projectDB }) const createBranchAndNotify = createBranchAndNotifyFactory({ - getStreamBranchByName: getStreamBranchByNameFactory({ db: projectDb }), - createBranch: createBranchFactory({ db: projectDb }), + getStreamBranchByName, + createBranch: createBranchFactory({ db: projectDB }), addBranchCreatedActivity: addBranchCreatedActivityFactory({ saveActivity: saveActivityFactory({ db }), publish }) }) - const { id } = await createBranchAndNotify(args.branch, context.userId!) return id @@ -109,17 +107,16 @@ export = { context.resourceAccessRules ) - const projectDb = await getProjectDbClient({ projectId: args.branch.streamId }) - + const projectDB = await getProjectDbClient({ projectId: args.branch.streamId }) + const getBranchById = getBranchByIdFactory({ db: projectDB }) const updateBranchAndNotify = updateBranchAndNotifyFactory({ - getBranchById: getBranchByIdFactory({ db: projectDb }), - updateBranch: updateBranchFactory({ db: projectDb }), + getBranchById, + updateBranch: updateBranchFactory({ db: projectDB }), addBranchUpdatedActivity: addBranchUpdatedActivityFactory({ saveActivity: saveActivityFactory({ db }), publish }) }) - const newBranch = await updateBranchAndNotify(args.branch, context.userId!) return !!newBranch }, @@ -132,20 +129,20 @@ export = { context.resourceAccessRules ) - const projectDb = await getProjectDbClient({ projectId: args.branch.streamId }) - + const projectDB = await getProjectDbClient({ projectId: args.branch.streamId }) + const markBranchStreamUpdated = markBranchStreamUpdatedFactory({ db: projectDB }) + const getStream = getStreamFactory({ db: projectDB }) const deleteBranchAndNotify = deleteBranchAndNotifyFactory({ - getStream: getStreamFactory({ db: projectDb }), - getBranchById: getBranchByIdFactory({ db: projectDb }), + getStream, + getBranchById: getBranchByIdFactory({ db: projectDB }), modelsEventsEmitter: ModelsEmitter.emit, - markBranchStreamUpdated: markBranchStreamUpdatedFactory({ db: projectDb }), + markBranchStreamUpdated, addBranchDeletedActivity: addBranchDeletedActivityFactory({ saveActivity: saveActivityFactory({ db }), publish }), - deleteBranchById: deleteBranchByIdFactory({ db: projectDb }) + deleteBranchById: deleteBranchByIdFactory({ db: projectDB }) }) - const deleted = await deleteBranchAndNotify(args.branch, context.userId!) return deleted } diff --git a/packages/server/modules/cross-server-sync/graph/generated/graphql.ts b/packages/server/modules/cross-server-sync/graph/generated/graphql.ts index d198006cf4..6420348085 100644 --- a/packages/server/modules/cross-server-sync/graph/generated/graphql.ts +++ b/packages/server/modules/cross-server-sync/graph/generated/graphql.ts @@ -4220,6 +4220,7 @@ export type WorkspaceMutations = { create: Workspace; delete: Scalars['Boolean']['output']; deleteDomain: Workspace; + deleteSsoProvider: Scalars['Boolean']['output']; invites: WorkspaceInviteMutations; join: Workspace; leave: Scalars['Boolean']['output']; @@ -4251,6 +4252,11 @@ export type WorkspaceMutationsDeleteDomainArgs = { }; +export type WorkspaceMutationsDeleteSsoProviderArgs = { + workspaceId: Scalars['String']['input']; +}; + + export type WorkspaceMutationsJoinArgs = { input: JoinWorkspaceInput; }; diff --git a/packages/server/modules/fileuploads/tests/fileuploads.integration.spec.ts b/packages/server/modules/fileuploads/tests/fileuploads.integration.spec.ts index bb219862dc..2256439d0a 100644 --- a/packages/server/modules/fileuploads/tests/fileuploads.integration.spec.ts +++ b/packages/server/modules/fileuploads/tests/fileuploads.integration.spec.ts @@ -142,7 +142,7 @@ describe('FileUploads @fileuploads', () => { let existingCanonicalUrl: string let existingPort: string // eslint-disable-next-line @typescript-eslint/no-explicit-any - let sendRequest: (token: string, query: unknown) => Promise + let sendRequest: (token: string, query: string | object) => Promise let serverAddress: string let serverPort: string diff --git a/packages/server/modules/multiregion/dbSelector.ts b/packages/server/modules/multiregion/dbSelector.ts index 905dd87368..1bad4eebe6 100644 --- a/packages/server/modules/multiregion/dbSelector.ts +++ b/packages/server/modules/multiregion/dbSelector.ts @@ -23,9 +23,16 @@ import { getMainRegionConfig } from '@/modules/multiregion/regionConfig' import { MaybeNullOrUndefined } from '@speckle/shared' +import { isTestEnv } from '@/modules/shared/helpers/envHelper' let getter: GetProjectDb | undefined = undefined +/** + * All dbs share the list of pubs/subs, so we need to make sure the test db uses their own. + * As long as there's only 1 test db per instance, it should be fine + */ +const createPubSubName = (name: string): string => (isTestEnv() ? `test_${name}` : name) + export const getRegionDb: GetRegionDb = async ({ regionKey }) => { const getRegion = getRegionFactory({ db }) const regionClients = await getRegisteredRegionClients() @@ -86,11 +93,16 @@ export const getProjectDbClient: GetProjectDb = async ({ projectId }) => { type RegionClients = Record let registeredRegionClients: RegionClients | undefined = undefined -const initializeRegisteredRegionClients = async (): Promise => { +/** + * Idempotently initialize registered region (in db) Knex clients + */ +export const initializeRegisteredRegionClients = async (): Promise => { const configuredRegions = await getRegionsFactory({ db })() - const regionConfigs = await getAvailableRegionConfig() + if (!configuredRegions.length) return {} - return Object.fromEntries( + // init knex clients + const regionConfigs = await getAvailableRegionConfig() + const ret = Object.fromEntries( configuredRegions.map((region) => { if (!(region.key in regionConfigs)) throw new MisconfiguredEnvironmentError( @@ -99,6 +111,17 @@ const initializeRegisteredRegionClients = async (): Promise => { return [region.key, configureClient(regionConfigs[region.key]).public] }) ) + + // run migrations + await Promise.all(Object.values(ret).map((db) => db.migrate.latest())) + + // (re-)set up pub-sub, if needed + await Promise.all( + Object.keys(ret).map((regionKey) => initializeRegion({ regionKey })) + ) + + registeredRegionClients = ret + return ret } export const getRegisteredRegionClients = async (): Promise => { @@ -110,11 +133,10 @@ export const getRegisteredRegionClients = async (): Promise => { export const getRegisteredDbClients = async (): Promise => Object.values(await getRegisteredRegionClients()) +/** + * Idempotently initialize region + */ export const initializeRegion: InitializeRegion = async ({ regionKey }) => { - const knownClients = await getRegisteredRegionClients() - if (regionKey in knownClients) - throw new Error(`Region ${regionKey} is already initialized`) - const regionConfigs = await getAvailableRegionConfig() if (!(regionKey in regionConfigs)) throw new Error(`RegionKey ${regionKey} not available in config`) @@ -122,7 +144,6 @@ export const initializeRegion: InitializeRegion = async ({ regionKey }) => { const newRegionConfig = regionConfigs[regionKey] const regionDb = configureClient(newRegionConfig) await regionDb.public.migrate.latest() - // TODO, set up pub-sub shit const mainDbConfig = await getMainRegionConfig() const mainDb = configureClient(mainDbConfig) @@ -142,8 +163,12 @@ export const initializeRegion: InitializeRegion = async ({ regionKey }) => { regionName: regionKey, sslmode }) - // pushing to the singleton object here - knownClients[regionKey] = regionDb.public + + // pushing to the singleton object here, its only not available + // if this is being triggered from init, and in that case its gonna be set after anyway + if (registeredRegionClients) { + registeredRegionClients[regionKey] = regionDb.public + } } interface ReplicationArgs { @@ -159,9 +184,11 @@ const setUpUserReplication = async ({ sslmode, regionName }: ReplicationArgs): Promise => { - // TODO: ensure its created... + const subName = createPubSubName(`userssub_${regionName}`) + const pubName = createPubSubName('userspub') + try { - await from.public.raw('CREATE PUBLICATION userspub FOR TABLE users;') + await from.public.raw(`CREATE PUBLICATION ${pubName} FOR TABLE users;`) } catch (err) { if (!(err instanceof Error)) throw err if (!err.message.includes('already exists')) throw err @@ -174,11 +201,10 @@ const setUpUserReplication = async ({ ) const port = fromUrl.port ? fromUrl.port : '5432' const fromDbName = fromUrl.pathname.replace('/', '') - const subName = `userssub_${regionName}` const rawSqeel = `SELECT * FROM aiven_extras.pg_create_subscription( '${subName}', 'dbname=${fromDbName} host=${fromUrl.hostname} port=${port} sslmode=${sslmode} user=${fromUrl.username} password=${fromUrl.password}', - 'userspub', + '${pubName}', '${subName}', TRUE, TRUE @@ -198,9 +224,11 @@ const setUpProjectReplication = async ({ regionName, sslmode }: ReplicationArgs): Promise => { - // TODO: ensure its created... + const subName = createPubSubName(`projectsub_${regionName}`) + const pubName = createPubSubName('projectpub') + try { - await from.public.raw('CREATE PUBLICATION projectpub FOR TABLE streams;') + await from.public.raw(`CREATE PUBLICATION ${pubName} FOR TABLE streams;`) } catch (err) { if (!(err instanceof Error)) throw err if (!err.message.includes('already exists')) throw err @@ -213,11 +241,10 @@ const setUpProjectReplication = async ({ ) const port = fromUrl.port ? fromUrl.port : '5432' const fromDbName = fromUrl.pathname.replace('/', '') - const subName = `projectsub_${regionName}` const rawSqeel = `SELECT * FROM aiven_extras.pg_create_subscription( '${subName}', 'dbname=${fromDbName} host=${fromUrl.hostname} port=${port} sslmode=${sslmode} user=${fromUrl.username} password=${fromUrl.password}', - 'projectpub', + '${pubName}', '${subName}', TRUE, TRUE diff --git a/packages/server/modules/multiregion/index.ts b/packages/server/modules/multiregion/index.ts index f5957245d2..ca3d2c7ff5 100644 --- a/packages/server/modules/multiregion/index.ts +++ b/packages/server/modules/multiregion/index.ts @@ -1,5 +1,5 @@ import { moduleLogger } from '@/logging/logging' -import { getRegisteredRegionClients } from '@/modules/multiregion/dbSelector' +import { initializeRegisteredRegionClients } from '@/modules/multiregion/dbSelector' import { isMultiRegionEnabled } from '@/modules/multiregion/helpers' import { SpeckleModule } from '@/modules/shared/helpers/typeHelper' @@ -11,12 +11,9 @@ const multiRegion: SpeckleModule = { } moduleLogger.info('🌍 Init multiRegion module') - // this should have all the builtin checks to make sure all regions are working - // and no regions are missing - const regionClients = await getRegisteredRegionClients() - moduleLogger.info('Migrating region databases') - await Promise.all(Object.values(regionClients).map((db) => db.migrate.latest())) - moduleLogger.info('Migrations done') + + // Init registered region clients + await initializeRegisteredRegionClients() } } diff --git a/packages/server/modules/multiregion/regionConfig.ts b/packages/server/modules/multiregion/regionConfig.ts index 3b98a43002..db13cce516 100644 --- a/packages/server/modules/multiregion/regionConfig.ts +++ b/packages/server/modules/multiregion/regionConfig.ts @@ -17,9 +17,11 @@ import { let multiRegionConfig: Optional = undefined const getMultiRegionConfig = async (): Promise => { - if (isDevOrTestEnv() && !isMultiRegionEnabled()) + if (isDevOrTestEnv() && !isMultiRegionEnabled()) { // this should throw somehow return { main: { postgres: { connectionUri: '' } }, regions: {} } + } + if (!multiRegionConfig) { const relativePath = getMultiRegionConfigPath() diff --git a/packages/server/modules/multiregion/tests/e2e/serverAdmin.graph.spec.ts b/packages/server/modules/multiregion/tests/e2e/serverAdmin.graph.spec.ts index 4064b9412f..535b587b2c 100644 --- a/packages/server/modules/multiregion/tests/e2e/serverAdmin.graph.spec.ts +++ b/packages/server/modules/multiregion/tests/e2e/serverAdmin.graph.spec.ts @@ -15,11 +15,11 @@ import { TestApolloServer } from '@/test/graphqlHelper' import { beforeEachContext, truncateTables } from '@/test/hooks' -import { MultiRegionConfigServiceMock } from '@/test/mocks/global' +import { MultiRegionConfigMock, MultiRegionDbSelectorMock } from '@/test/mocks/global' import { Roles } from '@speckle/shared' import { expect } from 'chai' -describe.skip('Multi Region Server Settings', () => { +describe('Multi Region Server Settings', () => { let testAdminUser: BasicTestUser let testBasicUser: BasicTestUser let apollo: TestApolloServer @@ -41,14 +41,12 @@ describe.skip('Multi Region Server Settings', () => { } before(async () => { - // Have to mock both - // MultiRegionConfigServiceMock.mockFunction( - // 'getAvailableRegionConfigsFactory', - // () => async () => fakeRegionConfig - // ) - MultiRegionConfigServiceMock.mockFunction( - 'getAvailableRegionKeysFactory', - () => async () => Object.keys(fakeRegionConfig) + MultiRegionConfigMock.mockFunction( + 'getAvailableRegionConfig', + async () => fakeRegionConfig + ) + MultiRegionDbSelectorMock.mockFunction('initializeRegion', async () => + Promise.resolve() ) await beforeEachContext() @@ -58,7 +56,8 @@ describe.skip('Multi Region Server Settings', () => { }) after(() => { - MultiRegionConfigServiceMock.resetMockedFunctions() + MultiRegionConfigMock.resetMockedFunctions() + MultiRegionDbSelectorMock.resetMockedFunctions() }) describe('server config', () => { @@ -121,11 +120,11 @@ describe.skip('Multi Region Server Settings', () => { } const res = await createRegion(input) + expect(res).to.not.haveGraphQLErrors() expect(res.data?.serverInfoMutations.multiRegion.create).to.deep.equal({ ...input, id: input.key }) - expect(res).to.not.haveGraphQLErrors() }) it("doesn't work with already used up key", async () => { diff --git a/packages/server/modules/multiregion/tests/intergration/repositories/projectRegion.spec.ts b/packages/server/modules/multiregion/tests/intergration/repositories/projectRegion.spec.ts index 7db90fe899..56ad593723 100644 --- a/packages/server/modules/multiregion/tests/intergration/repositories/projectRegion.spec.ts +++ b/packages/server/modules/multiregion/tests/intergration/repositories/projectRegion.spec.ts @@ -10,8 +10,13 @@ import { createInmemoryRedisClient } from '@/test/redisHelper' import { createStreamFactory } from '@/modules/core/repositories/streams' import { db } from '@/db/knex' import { storeRegionFactory } from '@/modules/multiregion/repositories' +import { truncateRegionsSafely } from '@/test/speckle-helpers/regions' describe('projectRegion repositories @multiregion', () => { + after(async () => { + await truncateRegionsSafely() + }) + describe('inMemoryKeyStoreFactory creates an object, which', () => { const { getRegionKey, writeRegion } = inMemoryRegionKeyStoreFactory() it('returns undefined if projectId is not in the cache', () => { diff --git a/packages/server/modules/shared/helpers/envHelper.ts b/packages/server/modules/shared/helpers/envHelper.ts index b08683e024..fc220a8135 100644 --- a/packages/server/modules/shared/helpers/envHelper.ts +++ b/packages/server/modules/shared/helpers/envHelper.ts @@ -418,3 +418,6 @@ export function getOtelHeaderValue() { export function getMultiRegionConfigPath() { return getStringFromEnv('MULTI_REGION_CONFIG_PATH') } + +export const shouldRunTestsInMultiregionMode = () => + getBooleanFromEnv('RUN_TESTS_IN_MULTIREGION_MODE') diff --git a/packages/server/modules/workspaces/domain/operations.ts b/packages/server/modules/workspaces/domain/operations.ts index 7ab953fdee..230b80ce17 100644 --- a/packages/server/modules/workspaces/domain/operations.ts +++ b/packages/server/modules/workspaces/domain/operations.ts @@ -12,6 +12,7 @@ import { EventBusPayloads } from '@/modules/shared/services/eventBus' import { MaybeNullOrUndefined, Nullable, + NullableKeysToOptional, Optional, PartialNullable, StreamRoles, @@ -22,11 +23,22 @@ import { WorkspaceTeam } from '@/modules/workspaces/domain/types' import { Stream } from '@/modules/core/domain/streams/types' import { TokenResourceIdentifier } from '@/modules/core/domain/tokens/types' import { ServerRegion } from '@/modules/multiregion/domain/types' +import { SetOptional } from 'type-fest' /** Workspace */ -type UpsertWorkspaceArgs = { - workspace: Omit +export type UpsertWorkspaceArgs = { + workspace: Omit< + SetOptional< + NullableKeysToOptional, + | 'domainBasedMembershipProtectionEnabled' + | 'discoverabilityEnabled' + | 'defaultLogoIndex' + | 'defaultProjectRole' + | 'slug' + >, + 'domains' + > } export type UpsertWorkspace = (args: UpsertWorkspaceArgs) => Promise diff --git a/packages/server/modules/workspaces/domain/sso/operations.ts b/packages/server/modules/workspaces/domain/sso/operations.ts index 5e7a7b5ec8..79f90c532e 100644 --- a/packages/server/modules/workspaces/domain/sso/operations.ts +++ b/packages/server/modules/workspaces/domain/sso/operations.ts @@ -29,6 +29,8 @@ export type StoreProviderRecord = (args: { providerRecord: SsoProviderRecord }) => Promise +export type DeleteSsoProvider = (args: { workspaceId: string }) => Promise + // User session management /** diff --git a/packages/server/modules/workspaces/graph/resolvers/workspaces.ts b/packages/server/modules/workspaces/graph/resolvers/workspaces.ts index a074c90d6a..de0989d01a 100644 --- a/packages/server/modules/workspaces/graph/resolvers/workspaces.ts +++ b/packages/server/modules/workspaces/graph/resolvers/workspaces.ts @@ -174,6 +174,7 @@ import { listWorkspaceSsoMembershipsByUserEmailFactory } from '@/modules/workspaces/services/sso' import { + deleteSsoProviderFactory, getUserSsoSessionFactory, getWorkspaceSsoProviderFactory, getWorkspaceSsoProviderRecordFactory, @@ -448,12 +449,14 @@ export = FF_WORKSPACES_MODULE_ENABLED ) // Delete workspace and associated resources (i.e. invites) - const getStreams = legacyGetStreamsFactory({ db }) const deleteWorkspace = deleteWorkspaceFactory({ deleteWorkspace: repoDeleteWorkspaceFactory({ db }), deleteProject: deleteStream, deleteAllResourceInvites: deleteAllResourceInvitesFactory({ db }), - queryAllWorkspaceProjects: queryAllWorkspaceProjectsFactory({ getStreams }) + queryAllWorkspaceProjects: queryAllWorkspaceProjectsFactory({ + getStreams: legacyGetStreamsFactory({ db }) + }), + deleteSsoProvider: deleteSsoProviderFactory({ db }) }) await deleteWorkspace({ workspaceId }) @@ -585,6 +588,18 @@ export = FF_WORKSPACES_MODULE_ENABLED userId: context.userId }) }, + deleteSsoProvider: async (_parent, args, context) => { + await authorizeResolver( + context.userId, + args.workspaceId, + Roles.Workspace.Admin, + context.resourceAccessRules + ) + + await deleteSsoProviderFactory({ db })({ workspaceId: args.workspaceId }) + + return true + }, async join(_parent, args, context) { if (!context.userId) throw new WorkspaceJoinNotAllowedError() diff --git a/packages/server/modules/workspaces/repositories/sso.ts b/packages/server/modules/workspaces/repositories/sso.ts index 1988f91dae..431be4dee8 100644 --- a/packages/server/modules/workspaces/repositories/sso.ts +++ b/packages/server/modules/workspaces/repositories/sso.ts @@ -9,7 +9,8 @@ import { ListWorkspaceSsoMemberships, GetWorkspaceSsoProviderRecord, ListUserSsoSessions, - GetUserSsoSession + GetUserSsoSession, + DeleteSsoProvider } from '@/modules/workspaces/domain/sso/operations' import { SsoProviderRecord, @@ -100,6 +101,20 @@ export const storeSsoProviderRecordFactory = await tables.ssoProviders(db).insert(insertModel) } +export const deleteSsoProviderFactory = + ({ db }: { db: Knex }): DeleteSsoProvider => + async ({ workspaceId }) => { + await tables + .ssoProviders(db) + .join( + 'workspace_sso_providers', + 'workspace_sso_providers.providerId', + 'sso_providers.id' + ) + .where({ workspaceId }) + .delete() + } + export const associateSsoProviderWithWorkspaceFactory = ({ db }: { db: Knex }): AssociateSsoProviderWithWorkspace => async ({ providerId, workspaceId }) => { diff --git a/packages/server/modules/workspaces/services/management.ts b/packages/server/modules/workspaces/services/management.ts index 335869668a..6ed9968fc1 100644 --- a/packages/server/modules/workspaces/services/management.ts +++ b/packages/server/modules/workspaces/services/management.ts @@ -64,6 +64,7 @@ import { userEmailsCompliantWithWorkspaceDomains } from '@/modules/workspaces/do import { workspaceRoles as workspaceRoleDefinitions } from '@/modules/workspaces/roles' import { blockedDomains } from '@speckle/shared' import { DeleteStreamRecord } from '@/modules/core/domain/streams/operations' +import { DeleteSsoProvider } from '@/modules/workspaces/domain/sso/operations' type WorkspaceCreateArgs = { userId: string @@ -275,14 +276,19 @@ export const deleteWorkspaceFactory = deleteWorkspace, deleteProject, queryAllWorkspaceProjects, - deleteAllResourceInvites + deleteAllResourceInvites, + deleteSsoProvider }: { deleteWorkspace: DeleteWorkspace deleteProject: DeleteStreamRecord queryAllWorkspaceProjects: QueryAllWorkspaceProjects deleteAllResourceInvites: DeleteAllResourceInvites + deleteSsoProvider: DeleteSsoProvider }) => async ({ workspaceId }: WorkspaceDeleteArgs): Promise => { + // Delete workspace SSO provider, if present + await deleteSsoProvider({ workspaceId }) + // Cache project ids for post-workspace-delete cleanup const projectIds: string[] = [] for await (const projects of queryAllWorkspaceProjects({ workspaceId })) { diff --git a/packages/server/modules/workspaces/tests/integration/regions.graph.spec.ts b/packages/server/modules/workspaces/tests/integration/regions.graph.spec.ts index d4132d1f5f..18456ec886 100644 --- a/packages/server/modules/workspaces/tests/integration/regions.graph.spec.ts +++ b/packages/server/modules/workspaces/tests/integration/regions.graph.spec.ts @@ -13,8 +13,9 @@ import { SetWorkspaceDefaultRegionDocument } from '@/test/graphql/generated/graphql' import { testApolloServer, TestApolloServer } from '@/test/graphqlHelper' -import { beforeEachContext } from '@/test/hooks' +import { beforeEachContext, getRegionKeys } from '@/test/hooks' import { MultiRegionDbSelectorMock } from '@/test/mocks/global' +import { truncateRegionsSafely } from '@/test/speckle-helpers/regions' import { Roles } from '@speckle/shared' import { expect } from 'chai' @@ -71,8 +72,9 @@ describe('Workspace regions GQL', () => { apollo = await testApolloServer({ authUserId: me.id }) }) - after(() => { + after(async () => { MultiRegionDbSelectorMock.resetMockedFunctions() + await truncateRegionsSafely() }) describe('when listing', () => { @@ -95,7 +97,7 @@ describe('Workspace regions GQL', () => { expect(res).to.not.haveGraphQLErrors() expect( res.data?.workspace.availableRegions.map((r) => r.key) - ).to.deep.equalInAnyOrder([region1Key, region2Key]) + ).to.deep.equalInAnyOrder([region1Key, region2Key, ...getRegionKeys()]) }) }) diff --git a/packages/server/modules/workspaces/tests/integration/repositories.spec.ts b/packages/server/modules/workspaces/tests/integration/repositories.spec.ts index e83834ff27..4e6b2201b2 100644 --- a/packages/server/modules/workspaces/tests/integration/repositories.spec.ts +++ b/packages/server/modules/workspaces/tests/integration/repositories.spec.ts @@ -161,7 +161,7 @@ describe('Workspace repositories', () => { }) afterEach(async () => { - truncateTables(['workspaces']) + await truncateTables(['workspaces']) }) it('returns all workspace members', async () => { @@ -209,7 +209,7 @@ describe('Workspace repositories', () => { }) afterEach(async () => { - truncateTables(['workspaces']) + await truncateTables(['workspaces']) }) it('limits search results to specified workspace', async () => { diff --git a/packages/server/modules/workspaces/tests/integration/sso.graph.spec.ts b/packages/server/modules/workspaces/tests/integration/sso.graph.spec.ts index c26948d745..28df025dea 100644 --- a/packages/server/modules/workspaces/tests/integration/sso.graph.spec.ts +++ b/packages/server/modules/workspaces/tests/integration/sso.graph.spec.ts @@ -117,7 +117,7 @@ describe('Workspace SSO', () => { }) afterEach(async () => { - truncateTables(['user_sso_sessions']) + await truncateTables(['user_sso_sessions']) }) describe('given a workspace with SSO configured', () => { diff --git a/packages/server/modules/workspaces/tests/integration/sso.spec.ts b/packages/server/modules/workspaces/tests/integration/sso.spec.ts index 8d77da647d..a0a0d7ca7e 100644 --- a/packages/server/modules/workspaces/tests/integration/sso.spec.ts +++ b/packages/server/modules/workspaces/tests/integration/sso.spec.ts @@ -1,11 +1,14 @@ import { + deleteSsoProviderFactory, getUserSsoSessionFactory, getWorkspaceSsoProviderFactory, + getWorkspaceSsoProviderRecordFactory, listUserSsoSessionsFactory, listWorkspaceSsoMembershipsFactory, upsertUserSsoSessionFactory } from '@/modules/workspaces/repositories/sso' import { + assignToWorkspace, BasicTestWorkspace, createTestOidcProvider, createTestSsoSession, @@ -22,10 +25,12 @@ import { UserSsoSessionRecord } from '@/modules/workspaces/domain/sso/types' import { truncateTables } from '@/test/hooks' import { isValidSsoSession } from '@/modules/workspaces/domain/sso/logic' +const deleteSsoProvider = deleteSsoProviderFactory({ db }) const listUserSsoSessions = listUserSsoSessionsFactory({ db }) const listWorkspaceSsoMemberships = listWorkspaceSsoMembershipsFactory({ db }) const upsertUserSsoSession = upsertUserSsoSessionFactory({ db }) const getUserSsoSession = getUserSsoSessionFactory({ db }) +const getWorkspaceSsoProviderRecord = getWorkspaceSsoProviderRecordFactory({ db }) describe('Workspace SSO repositories', () => { const serverAdminUser: BasicTestUser = { @@ -251,7 +256,7 @@ describe('Workspace SSO repositories', () => { }) afterEach(async () => { - truncateTables(['user_sso_sessions']) + await truncateTables(['user_sso_sessions']) }) it('returns an empty array if there are no sessions', async () => { @@ -368,4 +373,84 @@ describe('Workspace SSO repositories', () => { expect(session).to.be.null }) }) + + describe('deleteSsoProviderFactory returns a function, that', async () => { + const testWorkspaceAdmin: BasicTestUser = { + id: '', + name: 'John Speckle', + email: `${cryptoRandomString({ length: 9 })}@example.org` + } + + const testWorkspaceMember: BasicTestUser = { + id: '', + name: 'Jane Speckle', + email: `${cryptoRandomString({ length: 9 })}@example.org` + } + + const testWorkspace: BasicTestWorkspace = { + id: '', + ownerId: '', + name: 'Test SSO Workspace', + slug: 'test-delete-sso-workspace' + } + + before(async () => { + await createTestUsers([testWorkspaceAdmin, testWorkspaceMember]) + await createTestWorkspace(testWorkspace, testWorkspaceAdmin) + await assignToWorkspace(testWorkspace, testWorkspaceMember) + }) + + beforeEach(async () => { + await createTestOidcProvider(testWorkspace.id) + await Promise.all([ + createTestSsoSession(testWorkspaceAdmin.id, testWorkspace.id), + createTestSsoSession(testWorkspaceMember.id, testWorkspace.id) + ]) + }) + + afterEach(async () => { + truncateTables(['user_sso_sessions']) + }) + + describe('when deleting an sso provider that exists', async () => { + beforeEach(async () => { + await deleteSsoProvider({ workspaceId: testWorkspace.id }) + }) + + it('deletes SSO encrypted provider data for specified workspace', async () => { + const provider = await getWorkspaceSsoProviderFactory({ + db, + decrypt: getDecryptor() + })({ workspaceId: testWorkspace.id }) + expect(provider).to.be.null + }) + + it('deletes all SSO sessions for provider for specified workspace', async () => { + const adminSession = await getUserSsoSession({ + userId: testWorkspaceAdmin.id, + workspaceId: testWorkspace.id + }) + const memberSession = await getUserSsoSession({ + userId: testWorkspaceMember.id, + workspaceId: testWorkspace.id + }) + + expect(adminSession).to.be.null + expect(memberSession).to.be.null + }) + + it('deletes workspace SSO provider record for specified workspaces', async () => { + const providerRecord = await getWorkspaceSsoProviderRecord({ + workspaceId: testWorkspace.id + }) + expect(providerRecord).to.be.null + }) + }) + + describe('when deleting an sso provider that does not exist', async () => { + it('should noop', async () => { + await deleteSsoProvider({ workspaceId: cryptoRandomString({ length: 9 }) }) + }) + }) + }) }) diff --git a/packages/server/modules/workspaces/tests/unit/services/management.spec.ts b/packages/server/modules/workspaces/tests/unit/services/management.spec.ts index 7a4803ed04..5d22a8f3aa 100644 --- a/packages/server/modules/workspaces/tests/unit/services/management.spec.ts +++ b/packages/server/modules/workspaces/tests/unit/services/management.spec.ts @@ -35,12 +35,15 @@ import { } from '@/modules/workspaces/errors/workspace' import { UserEmail } from '@/modules/core/domain/userEmails/types' import { merge, omit } from 'lodash' -import { GetWorkspaceWithDomains } from '@/modules/workspaces/domain/operations' +import { + GetWorkspaceWithDomains, + UpsertWorkspaceArgs +} from '@/modules/workspaces/domain/operations' import { FindVerifiedEmailsByUserId } from '@/modules/core/domain/userEmails/operations' import { EventNames } from '@/modules/shared/services/eventBus' type WorkspaceTestContext = { - storedWorkspaces: Omit[] + storedWorkspaces: UpsertWorkspaceArgs['workspace'][] storedRoles: WorkspaceAcl[] eventData: { isCalled: boolean @@ -63,11 +66,7 @@ const buildCreateWorkspaceWithTestContext = ( } const deps: Parameters[0] = { - upsertWorkspace: async ({ - workspace - }: { - workspace: Omit - }) => { + upsertWorkspace: async ({ workspace }) => { context.storedWorkspaces.push(workspace) }, validateSlug: async () => {}, @@ -1160,7 +1159,7 @@ describe('Workspace role services', () => { } let storedDomains: WorkspaceDomain | undefined = undefined - let storedWorkspace: Omit | undefined = undefined + let storedWorkspace: UpsertWorkspaceArgs['workspace'] | undefined = undefined let omittedEventName: EventNames | undefined = undefined const workspace: Workspace = { diff --git a/packages/server/multiregion.test.example.json b/packages/server/multiregion.test.example.json new file mode 100644 index 0000000000..d3693edd9f --- /dev/null +++ b/packages/server/multiregion.test.example.json @@ -0,0 +1,16 @@ +{ + "main": { + "postgres": { + "connectionUri": "postgresql://speckle:speckle@127.0.0.1:5432/speckle2_test", + "privateConnectionUri": "postgresql://speckle:speckle@postgres:5432/speckle2_test" + } + }, + "regions": { + "region1": { + "postgres": { + "connectionUri": "postgresql://speckle:speckle@127.0.0.1:5401/speckle2_test", + "privateConnectionUri": "postgresql://speckle:speckle@postgres-region1:5432/speckle2_test" + } + } + } +} diff --git a/packages/server/package.json b/packages/server/package.json index 60cf07f84b..cd0c38ffdf 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -23,6 +23,7 @@ "dev:clean": "yarn build:clean && yarn dev", "dev:server:test": "cross-env DISABLE_NOTIFICATIONS_CONSUMPTION=true NODE_ENV=test LOG_LEVEL=silent LOG_PRETTY=true node ./bin/ts-www", "test": "cross-env NODE_ENV=test LOG_LEVEL=silent LOG_PRETTY=true mocha", + "test:multiregion": "cross-env RUN_TESTS_IN_MULTIREGION_MODE=true yarn test", "test:coverage": "cross-env NODE_ENV=test LOG_LEVEL=silent LOG_PRETTY=true nyc --reporter lcov mocha", "test:report": "yarn test:coverage -- --reporter mocha-junit-reporter --reporter-options mochaFile=reports/test-results.xml", "lint": "yarn lint:tsc && yarn lint:eslint", @@ -30,6 +31,7 @@ "lint:tsc": "tsc --noEmit", "lint:eslint": "eslint .", "cli": "cross-env LOG_LEVEL=debug LOG_PRETTY=true NODE_ENV=development ts-node ./modules/cli/index.ts", + "cli:test": "cross-env LOG_LEVEL=debug LOG_PRETTY=true NODE_ENV=test ts-node ./modules/cli/index.ts", "cli:download:commit": "cross-env LOG_PRETTY=true LOG_LEVEL=debug yarn cli download commit", "migrate": "yarn cli db migrate", "migrate:test": "cross-env NODE_ENV=test ts-node ./modules/cli/index.js db migrate", diff --git a/packages/server/test/graphql/generated/graphql.ts b/packages/server/test/graphql/generated/graphql.ts index 7df2f56629..7c5381bc79 100644 --- a/packages/server/test/graphql/generated/graphql.ts +++ b/packages/server/test/graphql/generated/graphql.ts @@ -4221,6 +4221,7 @@ export type WorkspaceMutations = { create: Workspace; delete: Scalars['Boolean']['output']; deleteDomain: Workspace; + deleteSsoProvider: Scalars['Boolean']['output']; invites: WorkspaceInviteMutations; join: Workspace; leave: Scalars['Boolean']['output']; @@ -4252,6 +4253,11 @@ export type WorkspaceMutationsDeleteDomainArgs = { }; +export type WorkspaceMutationsDeleteSsoProviderArgs = { + workspaceId: Scalars['String']['input']; +}; + + export type WorkspaceMutationsJoinArgs = { input: JoinWorkspaceInput; }; diff --git a/packages/server/test/hooks.js b/packages/server/test/hooks.js deleted file mode 100644 index 9ac7bda20e..0000000000 --- a/packages/server/test/hooks.js +++ /dev/null @@ -1,119 +0,0 @@ -require('../bootstrap') - -// Register global mocks as early as possible -require('@/test/mocks/global') - -const chai = require('chai') -const chaiAsPromised = require('chai-as-promised') -const chaiHttp = require('chai-http') -const deepEqualInAnyOrder = require('deep-equal-in-any-order') -const { knex } = require(`@/db/knex`) -const { init, startHttp, shutdown } = require(`@/app`) -const { default: graphqlChaiPlugin } = require('@/test/plugins/graphql') -const { logger } = require('@/logging/logging') -const { once } = require('events') - -// Register chai plugins -chai.use(chaiAsPromised) -chai.use(chaiHttp) -chai.use(deepEqualInAnyOrder) -chai.use(graphqlChaiPlugin) - -const unlock = async () => { - const exists = await knex.schema.hasTable('knex_migrations_lock') - if (exists) { - await knex('knex_migrations_lock').update('is_locked', '0') - } -} - -exports.truncateTables = async (tableNames) => { - if (!tableNames?.length) { - //why is server config only created once!???? - // because its done in a migration, to not override existing configs - const protectedTables = ['server_config'] - // const protectedTables = [ 'server_config', 'user_roles', 'scopes', 'server_acl' ] - tableNames = ( - await knex('pg_tables') - .select('tablename') - .where({ schemaname: 'public' }) - .whereRaw("tablename not like '%knex%'") - .whereNotIn('tablename', protectedTables) - ).map((table) => table.tablename) - if (!tableNames.length) return // Nothing to truncate - - // We're deleting everything, so lets turn off triggers to avoid deadlocks/slowdowns - await knex.transaction(async (trx) => { - await trx.raw(` - -- Disable triggers and foreign key constraints for this session - SET session_replication_role = replica; - - truncate table ${tableNames.join(',')}; - - -- Re-enable triggers and foreign key constraints - SET session_replication_role = DEFAULT; - `) - }) - } else { - await knex.raw(`truncate table ${tableNames.join(',')} cascade`) - } -} - -/** - * @param {import('http').Server} server - * @param {import('express').Express} app - */ -const initializeTestServer = async (server, app) => { - await startHttp(server, app, 0) - - await once(app, 'appStarted') - const port = server.address().port - const serverAddress = `http://127.0.0.1:${port}` - const wsAddress = `ws://127.0.0.1:${port}` - return { - server, - serverAddress, - serverPort: port, - wsAddress, - sendRequest(auth, obj) { - return ( - chai - .request(serverAddress) - .post('/graphql') - // if you set the header to null, the actual header in the req will be - // a string -> 'null' - // this is now treated as an invalid token, and gets forbidden - // switching to an empty string token - .set('Authorization', auth || '') - .send(obj) - ) - } - } -} - -exports.mochaHooks = { - beforeAll: async () => { - logger.info('running before all') - await unlock() - await exports.truncateTables() - await knex.migrate.rollback() - await knex.migrate.latest() - await init() - }, - afterAll: async () => { - logger.info('running after all') - await unlock() - await shutdown() - } -} - -exports.buildApp = async () => { - const { app, graphqlServer, server } = await init() - return { app, graphqlServer, server } -} - -exports.beforeEachContext = async () => { - await exports.truncateTables() - return await exports.buildApp() -} - -exports.initializeTestServer = initializeTestServer diff --git a/packages/server/test/hooks.ts b/packages/server/test/hooks.ts new file mode 100644 index 0000000000..0a0ce1a97c --- /dev/null +++ b/packages/server/test/hooks.ts @@ -0,0 +1,276 @@ +// eslint-disable-next-line no-restricted-imports +import '../bootstrap' + +// Register global mocks as early as possible +import '@/test/mocks/global' + +import chai from 'chai' +import chaiAsPromised from 'chai-as-promised' +import chaiHttp from 'chai-http' +import deepEqualInAnyOrder from 'deep-equal-in-any-order' +import { knex as mainDb } from '@/db/knex' +import { init, startHttp, shutdown } from '@/app' +import graphqlChaiPlugin from '@/test/plugins/graphql' +import { logger } from '@/logging/logging' +import { once } from 'events' +import type http from 'http' +import type express from 'express' +import type net from 'net' +import { MaybeAsync, MaybeNullOrUndefined } from '@speckle/shared' +import type mocha from 'mocha' +import { shouldRunTestsInMultiregionMode } from '@/modules/shared/helpers/envHelper' +import { + getAvailableRegionKeysFactory, + getFreeRegionKeysFactory +} from '@/modules/multiregion/services/config' +import { getAvailableRegionConfig } from '@/modules/multiregion/regionConfig' +import { createAndValidateNewRegionFactory } from '@/modules/multiregion/services/management' +import { + getRegionFactory, + getRegionsFactory, + Regions, + storeRegionFactory +} from '@/modules/multiregion/repositories' +import { + getRegisteredRegionClients, + initializeRegion +} from '@/modules/multiregion/dbSelector' +import { Knex } from 'knex' + +// why is server config only created once!???? +// because its done in a migration, to not override existing configs +// similarly wiping regions will break multi region setup +const protectedTables = ['server_config', 'regions'] +let regionClients: Record = {} + +// Register chai plugins +chai.use(chaiAsPromised) +chai.use(chaiHttp) +chai.use(deepEqualInAnyOrder) +chai.use(graphqlChaiPlugin) + +const inEachDb = async (fn: (db: Knex) => MaybeAsync) => { + await fn(mainDb) + for (const regionClient of Object.values(regionClients)) { + await fn(regionClient) + } +} + +const ensureAivenExtrasFactory = (deps: { db: Knex }) => async () => { + await deps.db.raw('CREATE EXTENSION IF NOT EXISTS "aiven_extras";') +} + +const setupMultiregionMode = async () => { + const db = mainDb + const getAvailableRegionKeys = getAvailableRegionKeysFactory({ + getAvailableRegionConfig + }) + const regionKeys = await getAvailableRegionKeys() + + // Create DB region entries for each key + const createRegion = createAndValidateNewRegionFactory({ + getFreeRegionKeys: getFreeRegionKeysFactory({ + getAvailableRegionKeys, + getRegions: getRegionsFactory({ db }) + }), + getRegion: getRegionFactory({ db }), + storeRegion: storeRegionFactory({ db }), + initializeRegion + }) + for (const regionKey of regionKeys) { + await createRegion({ + region: { + key: regionKey, + name: regionKey, + description: 'Auto created test region' + } + }) + } + + // Store active region clients + regionClients = await getRegisteredRegionClients() + + // Reset each DB client (re-run all migrations and setup) + for (const [, regionClient] of Object.entries(regionClients)) { + const reset = resetSchemaFactory({ db: regionClient }) + await reset() + } + + // If not in multi region mode, delete region entries + // we only needed them to reset schemas + if (!shouldRunTestsInMultiregionMode()) { + await truncateTables([Regions.name]) + } +} + +const unlockFactory = (deps: { db: Knex }) => async () => { + const exists = await deps.db.schema.hasTable('knex_migrations_lock') + if (exists) { + await deps.db('knex_migrations_lock').update('is_locked', '0') + } +} + +export const getRegionKeys = () => Object.keys(regionClients) + +export const resetPubSubFactory = (deps: { db: Knex }) => async () => { + if (!shouldRunTestsInMultiregionMode()) { + return { drop: async () => {}, reenable: async () => {} } + } + + const ensureAivenExtras = ensureAivenExtrasFactory(deps) + await ensureAivenExtras() + + const subscriptions = (await deps.db.raw( + `SELECT subname, subconninfo, subpublications, subslotname FROM aiven_extras.pg_list_all_subscriptions() WHERE subname ILIKE 'test_%';` + )) as { + rows: Array<{ + subname: string + subconninfo: string + subpublications: string[] + subslotname: string + }> + } + const publications = (await deps.db.raw( + `SELECT pubname FROM pg_publication WHERE pubname ILIKE 'test_%';` + )) as { + rows: Array<{ pubname: string }> + } + + // Drop all subs + for (const sub of subscriptions.rows) { + await deps.db.raw(` + SELECT * FROM aiven_extras.pg_alter_subscription_disable('${sub.subname}'); + SELECT * FROM aiven_extras.pg_drop_subscription('${sub.subname}'); + SELECT * FROM aiven_extras.dblink_slot_create_or_drop('${sub.subconninfo}', '${sub.subslotname}', 'drop'); + `) + } + + // Drop all pubs + for (const pub of publications.rows) { + await deps.db.raw(`DROP PUBLICATION ${pub.pubname};`) + } +} + +const truncateTablesFactory = (deps: { db: Knex }) => async (tableNames?: string[]) => { + if (!tableNames?.length) { + tableNames = ( + await deps + .db('pg_tables') + .select('tablename') + .where({ schemaname: 'public' }) + .whereRaw("tablename not like '%knex%'") + .whereNotIn('tablename', protectedTables) + ).map((table: { tablename: string }) => table.tablename) + if (!tableNames.length) return // Nothing to truncate + + // We're deleting everything, so lets turn off triggers to avoid deadlocks/slowdowns + await deps.db.transaction(async (trx) => { + await trx.raw(` + -- Disable triggers and foreign key constraints for this session + SET session_replication_role = replica; + + truncate table ${tableNames?.join(',') || ''}; + + -- Re-enable triggers and foreign key constraints + SET session_replication_role = DEFAULT; + `) + }) + } else { + await deps.db.raw(`truncate table ${tableNames.join(',')} cascade`) + } +} + +const resetSchemaFactory = (deps: { db: Knex }) => async () => { + const resetPubSub = resetPubSubFactory(deps) + + await unlockFactory(deps)() + await resetPubSub() + + // Reset schema + await deps.db.migrate.rollback() + await deps.db.migrate.latest() +} + +export const truncateTables = async (tableNames?: string[]) => { + const dbs = [mainDb, ...Object.values(regionClients)] + + // First reset pubsubs + for (const db of dbs) { + const resetPubSub = resetPubSubFactory({ db }) + await resetPubSub() + } + + // Now truncate + for (const db of dbs) { + const truncate = truncateTablesFactory({ db }) + await truncate(tableNames) + } +} + +export const initializeTestServer = async ( + server: http.Server, + app: express.Express +) => { + await startHttp(server, app, 0) + + await once(app, 'appStarted') + const port = (server.address() as net.AddressInfo).port + '' + const serverAddress = `http://127.0.0.1:${port}` + const wsAddress = `ws://127.0.0.1:${port}` + return { + server, + serverAddress, + serverPort: port, + wsAddress, + sendRequest(auth: MaybeNullOrUndefined, obj: string | object) { + return ( + chai + .request(serverAddress) + .post('/graphql') + // if you set the header to null, the actual header in the req will be + // a string -> 'null' + // this is now treated as an invalid token, and gets forbidden + // switching to an empty string token + .set('Authorization', auth || '') + .send(obj) + ) + } + } +} + +export const mochaHooks: mocha.RootHookObject = { + beforeAll: async () => { + if (shouldRunTestsInMultiregionMode()) { + console.log('Running tests in multi-region mode...') + } + + logger.info('running before all') + + // Init main db + const reset = resetSchemaFactory({ db: mainDb }) + await reset() + + // Init (or cleanup) multi-region mode + await setupMultiregionMode() + + // Init app + await init() + }, + afterAll: async () => { + logger.info('running after all') + await inEachDb(async (db) => { + await unlockFactory({ db })() + }) + await shutdown() + } +} + +export const buildApp = async () => { + const { app, graphqlServer, server } = await init() + return { app, graphqlServer, server } +} + +export const beforeEachContext = async () => { + await truncateTables() + return await buildApp() +} diff --git a/packages/server/test/mocks/global.ts b/packages/server/test/mocks/global.ts index 76d136da99..d80a63e08c 100644 --- a/packages/server/test/mocks/global.ts +++ b/packages/server/test/mocks/global.ts @@ -12,10 +12,10 @@ export const CommentsRepositoryMock = mockRequireModule< typeof import('@/modules/comments/repositories/comments') >(['@/modules/comments/repositories/comments']) -export const MultiRegionConfigServiceMock = mockRequireModule< - typeof import('@/modules/multiregion/services/config') ->(['@/modules/multiregion/services/config']) - export const MultiRegionDbSelectorMock = mockRequireModule< typeof import('@/modules/multiregion/dbSelector') >(['@/modules/multiregion/dbSelector']) + +export const MultiRegionConfigMock = mockRequireModule< + typeof import('@/modules/multiregion/regionConfig') +>(['@/modules/multiregion/regionConfig']) diff --git a/packages/server/test/speckle-helpers/regions.ts b/packages/server/test/speckle-helpers/regions.ts new file mode 100644 index 0000000000..33efbecc9e --- /dev/null +++ b/packages/server/test/speckle-helpers/regions.ts @@ -0,0 +1,11 @@ +import { db } from '@/db/knex' +import { Regions } from '@/modules/multiregion/repositories' +import { getRegionKeys } from '@/test/hooks' + +/** + * Delete all regions entries that are not part of the main multi region mode + */ +export const truncateRegionsSafely = async () => { + const regionKeys = getRegionKeys() + await db(Regions.name).whereNotIn(Regions.col.key, regionKeys).delete() +}