diff --git a/.run/Template Jest.run.xml b/.run/Template Jest.run.xml
new file mode 100644
index 00000000..a9f73fe3
--- /dev/null
+++ b/.run/Template Jest.run.xml
@@ -0,0 +1,10 @@
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/package.json b/package.json
index d2428680..e13d96b6 100644
--- a/package.json
+++ b/package.json
@@ -21,7 +21,8 @@
"supertest": "^6.3.3",
"ts-jest": "^29.0.5",
"ts-standard": "^12.0.0",
- "typescript": "4.9.5"
+ "typescript": "4.9.5",
+ "wait-for-expect": "^3.0.2"
},
"dependencies": {
"@babel/runtime": "^7.17.2",
diff --git a/src/__tests__/areas.ts b/src/__tests__/areas.ts
index 57d75f0a..602c1779 100644
--- a/src/__tests__/areas.ts
+++ b/src/__tests__/areas.ts
@@ -1,12 +1,14 @@
-import { ApolloServer } from 'apollo-server-express'
+import {ApolloServer} from 'apollo-server-express'
import muuid from 'uuid-mongodb'
-import { jest } from '@jest/globals'
+import {jest} from '@jest/globals'
import MutableAreaDataSource from '../model/MutableAreaDataSource.js'
import MutableOrganizationDataSource from '../model/MutableOrganizationDataSource.js'
-import { AreaType } from '../db/AreaTypes.js'
-import { OrgType, OrganizationType, OrganizationEditableFieldsType } from '../db/OrganizationTypes.js'
-import { queryAPI, setUpServer } from '../utils/testUtils.js'
-import { muuidToString } from '../utils/helpers.js'
+import {AreaType} from '../db/AreaTypes.js'
+import {OrganizationEditableFieldsType, OrganizationType, OrgType} from '../db/OrganizationTypes.js'
+import {queryAPI, setUpServer} from '../utils/testUtils.js'
+import {muuidToString} from '../utils/helpers.js'
+import {InMemoryDB} from "../utils/inMemoryDB.js";
+import express from "express";
jest.setTimeout(60000)
@@ -14,7 +16,8 @@ describe('areas API', () => {
let server: ApolloServer
let user: muuid.MUUID
let userUuid: string
- let inMemoryDB
+ let app: express.Application
+ let inMemoryDB: InMemoryDB
// Mongoose models for mocking pre-existing state.
let areas: MutableAreaDataSource
@@ -24,7 +27,7 @@ describe('areas API', () => {
let wa: AreaType
beforeAll(async () => {
- ({ server, inMemoryDB } = await setUpServer())
+ ({server, inMemoryDB, app} = await setUpServer())
// Auth0 serializes uuids in "relaxed" mode, resulting in this hex string format
// "59f1d95a-627d-4b8c-91b9-389c7424cb54" instead of base64 "WfHZWmJ9S4yRuTicdCTLVA==".
user = muuid.mode('relaxed').v4()
@@ -66,18 +69,19 @@ describe('areas API', () => {
excludedAreaIds: [ca.metadata.area_id]
}
alphaOrg = await organizations.addOrganization(user, OrgType.localClimbingOrganization, alphaFields)
- .then((res: OrganizationType | null) => {
- if (res === null) throw new Error('Failure mocking organization.')
- return res
- })
+ .then((res: OrganizationType | null) => {
+ if (res === null) throw new Error('Failure mocking organization.')
+ return res
+ })
})
it('retrieves an area and lists associated organizations', async () => {
const response = await queryAPI({
query: areaQuery,
operationName: 'area',
- variables: { input: wa.metadata.area_id },
- userUuid
+ variables: {input: wa.metadata.area_id},
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
@@ -91,8 +95,9 @@ describe('areas API', () => {
const response = await queryAPI({
query: areaQuery,
operationName: 'area',
- variables: { input: ca.metadata.area_id },
- userUuid
+ variables: {input: ca.metadata.area_id},
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
const areaResult = response.body.data.area
diff --git a/src/__tests__/history.ts b/src/__tests__/history.ts
index 7600f5a5..dd6d3813 100644
--- a/src/__tests__/history.ts
+++ b/src/__tests__/history.ts
@@ -1,13 +1,15 @@
-import { ApolloServer } from 'apollo-server-express'
+import {ApolloServer} from 'apollo-server-express'
import muuid from 'uuid-mongodb'
-import { jest } from '@jest/globals'
+import {jest} from '@jest/globals'
import MutableAreaDataSource from '../model/MutableAreaDataSource.js'
import MutableOrganizationDataSource from '../model/MutableOrganizationDataSource.js'
import MutableClimbDataSource from '../model/MutableClimbDataSource.js'
-import { AreaType } from '../db/AreaTypes.js'
-import { OrgType, OrganizationType } from '../db/OrganizationTypes.js'
-import { muuidToString } from '../utils/helpers.js'
-import { queryAPI, setUpServer } from '../utils/testUtils.js'
+import {AreaType} from '../db/AreaTypes.js'
+import {OrganizationType, OrgType} from '../db/OrganizationTypes.js'
+import {muuidToString} from '../utils/helpers.js'
+import {queryAPI, setUpServer} from '../utils/testUtils.js'
+import {InMemoryDB} from "../utils/inMemoryDB.js";
+import express from "express";
jest.setTimeout(60000)
@@ -15,7 +17,8 @@ describe('history API', () => {
let server: ApolloServer
let user: muuid.MUUID
let userUuid: string
- let inMemoryDB
+ let app: express.Application
+ let inMemoryDB: InMemoryDB
// Mongoose models for mocking pre-existing state.
let areas: MutableAreaDataSource
@@ -23,7 +26,7 @@ describe('history API', () => {
let climbs: MutableClimbDataSource
beforeAll(async () => {
- ({ server, inMemoryDB } = await setUpServer())
+ ({server, inMemoryDB, app} = await setUpServer())
// Auth0 serializes uuids in "relaxed" mode, resulting in this hex string format
// "59f1d95a-627d-4b8c-91b9-389c7424cb54" instead of base64 "WfHZWmJ9S4yRuTicdCTLVA==".
user = muuid.mode('relaxed').v4()
@@ -101,13 +104,14 @@ describe('history API', () => {
email: 'admin@alphaopenbeta.com'
}
alphaOrg = await organizations.addOrganization(user, OrgType.localClimbingOrganization, alphaFields)
- climbIds = await climbs.addOrUpdateClimbs(user, ca.metadata.area_id, [{ name: 'Alpha Climb' }])
+ climbIds = await climbs.addOrUpdateClimbs(user, ca.metadata.area_id, [{name: 'Alpha Climb'}])
// Query for changes and ensure they are tracked.
const resp = await queryAPI({
query: QUERY_RECENT_CHANGE_HISTORY,
- variables: { filter: {} },
- userUuid
+ variables: {filter: {}},
+ userUuid,
+ app
})
expect(resp.statusCode).toBe(200)
const histories = resp.body.data.getChangeHistory
diff --git a/src/__tests__/organizations.ts b/src/__tests__/organizations.ts
index 3489f139..7e4d0383 100644
--- a/src/__tests__/organizations.ts
+++ b/src/__tests__/organizations.ts
@@ -1,22 +1,22 @@
-import { ApolloServer } from 'apollo-server-express'
+import {ApolloServer} from 'apollo-server-express'
import muuid from 'uuid-mongodb'
-import { jest } from '@jest/globals'
import MutableAreaDataSource from '../model/MutableAreaDataSource.js'
import MutableOrganizationDataSource from '../model/MutableOrganizationDataSource.js'
-import { AreaType } from '../db/AreaTypes.js'
-import { OrgType, OrganizationType, OperationType, OrganizationEditableFieldsType } from '../db/OrganizationTypes.js'
-import { changelogDataSource } from '../model/ChangeLogDataSource.js'
-import { queryAPI, setUpServer } from '../utils/testUtils.js'
-import { muuidToString } from '../utils/helpers.js'
-import { validate as validateMuuid } from 'uuid'
-
-jest.setTimeout(110000)
+import {AreaType} from '../db/AreaTypes.js'
+import {OperationType, OrganizationEditableFieldsType, OrganizationType, OrgType} from '../db/OrganizationTypes.js'
+import {changelogDataSource} from '../model/ChangeLogDataSource.js'
+import {queryAPI, setUpServer} from '../utils/testUtils.js'
+import {muuidToString} from '../utils/helpers.js'
+import {validate as validateMuuid} from 'uuid'
+import {InMemoryDB} from "../utils/inMemoryDB.js";
+import express from "express";
describe('organizations API', () => {
let server: ApolloServer
let user: muuid.MUUID
let userUuid: string
- let inMemoryDB
+ let app: express.Application
+ let inMemoryDB: InMemoryDB
// Mongoose models for mocking pre-existing state.
let areas: MutableAreaDataSource
@@ -26,7 +26,7 @@ describe('organizations API', () => {
let wa: AreaType
beforeAll(async () => {
- ({ server, inMemoryDB } = await setUpServer())
+ ({server, inMemoryDB, app} = await setUpServer())
// Auth0 serializes uuids in "relaxed" mode, resulting in this hex string format
// "59f1d95a-627d-4b8c-91b9-389c7424cb54" instead of base64 "WfHZWmJ9S4yRuTicdCTLVA==".
user = muuid.mode('relaxed').v4()
@@ -43,8 +43,8 @@ describe('organizations API', () => {
})
afterAll(async () => {
- await server.stop()
- await inMemoryDB.close()
+ await server?.stop()
+ await inMemoryDB?.close()
})
describe('mutations', () => {
@@ -85,9 +85,10 @@ describe('organizations API', () => {
const createResponse = await queryAPI({
query: createQuery,
operationName: 'addOrganization',
- variables: { input: { displayName: 'Friends of Openbeta', orgType: 'LOCAL_CLIMBING_ORGANIZATION' } },
+ variables: {input: {displayName: 'Friends of Openbeta', orgType: 'LOCAL_CLIMBING_ORGANIZATION'}},
userUuid,
- roles: ['user_admin']
+ roles: ['user_admin'],
+ app
})
expect(createResponse.statusCode).toBe(200)
@@ -120,7 +121,8 @@ describe('organizations API', () => {
}
},
userUuid,
- roles: ['user_admin']
+ roles: ['user_admin'],
+ app
})
expect(updateResponse.statusCode).toBe(200)
expect(updateResponse.body.errors).toBeUndefined()
@@ -163,9 +165,10 @@ describe('organizations API', () => {
const response = await queryAPI({
query: createQuery,
operationName: 'addOrganization',
- variables: { input: { displayName: 'Friends of Openbeta', orgType: 'LOCAL_CLIMBING_ORGANIZATION' } },
+ variables: {input: {displayName: 'Friends of Openbeta', orgType: 'LOCAL_CLIMBING_ORGANIZATION'}},
userUuid,
- roles: ['editor']
+ roles: ['editor'],
+ app
})
expect(response.statusCode).toBe(200)
expect(response.body.data.organization).toBeNull()
@@ -219,20 +222,20 @@ describe('organizations API', () => {
hardwareReportLink: 'https://alphaopenbeta.com/reporthardware'
}
alphaOrg = await organizations.addOrganization(user, OrgType.localClimbingOrganization, alphaFields)
- .then((res: OrganizationType | null) => {
- if (res === null) throw new Error('Failure mocking organization.')
- return res
- })
+ .then((res: OrganizationType | null) => {
+ if (res === null) throw new Error('Failure mocking organization.')
+ return res
+ })
deltaFields = {
displayName: 'Delta OpenBeta Club',
email: 'admin@deltaopenbeta.com'
}
deltaOrg = await organizations.addOrganization(user, OrgType.localClimbingOrganization, deltaFields)
- .then((res: OrganizationType | null) => {
- if (res === null) throw new Error('Failure mocking organization.')
- return res
- })
+ .then((res: OrganizationType | null) => {
+ if (res === null) throw new Error('Failure mocking organization.')
+ return res
+ })
gammaFields = {
displayName: 'Delta Gamma OpenBeta Club',
@@ -240,18 +243,19 @@ describe('organizations API', () => {
excludedAreaIds: [wa.metadata.area_id]
}
gammaOrg = await organizations.addOrganization(user, OrgType.localClimbingOrganization, gammaFields)
- .then((res: OrganizationType | null) => {
- if (res === null) throw new Error('Failure mocking organization.')
- return res
- })
+ .then((res: OrganizationType | null) => {
+ if (res === null) throw new Error('Failure mocking organization.')
+ return res
+ })
})
it('retrieves an organization with an MUUID', async () => {
const response = await queryAPI({
query: organizationQuery,
operationName: 'organization',
- variables: { input: muuidToString(alphaOrg.orgId) },
- userUuid
+ variables: {input: muuidToString(alphaOrg.orgId)},
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
const orgResult = response.body.data.organization
@@ -268,8 +272,9 @@ describe('organizations API', () => {
const response = await queryAPI({
query: organizationsQuery,
operationName: 'organizations',
- variables: { filter: { displayName: { match: 'Delta OpenBeta Club', exactMatch: true } } },
- userUuid
+ variables: {filter: {displayName: {match: 'Delta OpenBeta Club', exactMatch: true}}},
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
@@ -282,8 +287,9 @@ describe('organizations API', () => {
const response = await queryAPI({
query: organizationsQuery,
operationName: 'organizations',
- variables: { filter: { displayName: { match: 'delta', exactMatch: false } } },
- userUuid
+ variables: {filter: {displayName: {match: 'delta', exactMatch: false}}},
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
const dataResult = response.body.data.organizations
@@ -298,7 +304,8 @@ describe('organizations API', () => {
variables: {
limit: 1
},
- userUuid
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
const dataResult = response.body.data.organizations
@@ -309,8 +316,9 @@ describe('organizations API', () => {
const response = await queryAPI({
query: organizationsQuery,
operationName: 'organizations',
- variables: { filter: { associatedAreaIds: { includes: [muuidToString(ca.metadata.area_id)] } } },
- userUuid
+ variables: {filter: {associatedAreaIds: {includes: [muuidToString(ca.metadata.area_id)]}}},
+ userUuid,
+ app
})
// Graphql should convert `includes` from a string[] to MUUID[]
expect(response.statusCode).toBe(200)
@@ -323,8 +331,9 @@ describe('organizations API', () => {
const response = await queryAPI({
query: organizationsQuery,
operationName: 'organizations',
- variables: { filter: { excludedAreaIds: { excludes: [muuidToString(wa.metadata.area_id)] } } },
- userUuid
+ variables: {filter: {excludedAreaIds: {excludes: [muuidToString(wa.metadata.area_id)]}}},
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
const dataResult = response.body.data.organizations
diff --git a/src/__tests__/ticks.ts b/src/__tests__/ticks.ts
index e19e90f8..d18ae48a 100644
--- a/src/__tests__/ticks.ts
+++ b/src/__tests__/ticks.ts
@@ -1,12 +1,14 @@
-import { ApolloServer } from 'apollo-server-express'
+import {ApolloServer} from 'apollo-server-express'
import muuid from 'uuid-mongodb'
-import { jest } from '@jest/globals'
-import { queryAPI, setUpServer } from '../utils/testUtils.js'
-import { muuidToString } from '../utils/helpers.js'
-import { TickInput } from '../db/TickTypes.js'
+import {jest} from '@jest/globals'
+import {queryAPI, setUpServer} from '../utils/testUtils.js'
+import {muuidToString} from '../utils/helpers.js'
+import {TickInput} from '../db/TickTypes.js'
import TickDataSource from '../model/TickDataSource.js'
import UserDataSource from '../model/UserDataSource.js'
-import { UpdateProfileGQLInput } from '../db/UserTypes.js'
+import {UpdateProfileGQLInput} from '../db/UserTypes.js'
+import {InMemoryDB} from "../utils/inMemoryDB.js";
+import express from "express";
jest.setTimeout(110000)
@@ -14,7 +16,8 @@ describe('ticks API', () => {
let server: ApolloServer
let user: muuid.MUUID
let userUuid: string
- let inMemoryDB
+ let app: express.Application
+ let inMemoryDB: InMemoryDB
// Mongoose models for mocking pre-existing state.
let ticks: TickDataSource
@@ -22,7 +25,7 @@ describe('ticks API', () => {
let tickOne: TickInput
beforeAll(async () => {
- ({ server, inMemoryDB } = await setUpServer())
+ ({server, inMemoryDB, app} = await setUpServer())
user = muuid.v4()
userUuid = muuidToString(user)
@@ -92,8 +95,9 @@ describe('ticks API', () => {
await ticks.addTick(tickOne)
const response = await queryAPI({
query: userQuery,
- variables: { userId: userUuid },
- userUuid
+ variables: {userId: userUuid},
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
const res = response.body.data.userTicks
@@ -111,8 +115,9 @@ describe('ticks API', () => {
await ticks.addTick(tickOne)
const response = await queryAPI({
query: userQuery,
- variables: { username: 'cat.dog' },
- userUuid
+ variables: {username: 'cat.dog'},
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
const res = response.body.data.userTicks
@@ -124,8 +129,9 @@ describe('ticks API', () => {
await ticks.addTick(tickOne)
const response = await queryAPI({
query: userTickByClimbQuery,
- variables: { userId: userUuid, climbId: tickOne.climbId },
- userUuid
+ variables: {userId: userUuid, climbId: tickOne.climbId},
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
const res = response.body.data.userTicksByClimbId
@@ -170,9 +176,10 @@ describe('ticks API', () => {
it('creates and updates a tick', async () => {
const createResponse = await queryAPI({
query: createQuery,
- variables: { input: tickOne },
+ variables: {input: tickOne},
userUuid,
- roles: ['user_admin']
+ roles: ['user_admin'],
+ app
})
expect(createResponse.statusCode).toBe(200)
@@ -204,7 +211,8 @@ describe('ticks API', () => {
}
},
userUuid,
- roles: ['user_admin']
+ roles: ['user_admin'],
+ app
})
expect(updateResponse.statusCode).toBe(200)
diff --git a/src/auth/middleware.ts b/src/auth/middleware.ts
index 6c554217..153419f3 100644
--- a/src/auth/middleware.ts
+++ b/src/auth/middleware.ts
@@ -1,12 +1,12 @@
import muid from 'uuid-mongodb'
-import { AuthUserType } from '../types.js'
-import { verifyJWT } from './util.js'
-import { logger } from '../logger.js'
+import {AuthUserType} from '../types.js'
+import {verifyJWT} from './util.js'
+import {logger} from '../logger.js'
/**
* Create a middleware context for Apollo server
*/
-export const createContext = async ({ req }): Promise => {
+export const createContext = async ({req}): Promise => {
const user: AuthUserType = {
roles: [],
uuid: undefined,
@@ -20,12 +20,12 @@ export const createContext = async ({ req }): Promise => {
throw new Error('An unexpected error has occurred. Please notify us at support@openbeta.io.')
}
- return { user }
+ return {user}
}
export const authMiddleware = async (req, res, next): Promise => {
try {
- const { user, token } = await validateTokenAndExtractUser(req)
+ const {user, token} = await validateTokenAndExtractUser(req)
req.user = user
req.userId = user.uuid
req.token = token
@@ -36,9 +36,9 @@ export const authMiddleware = async (req, res, next): Promise => {
}
}
-async function validateTokenAndExtractUser (req: Request): Promise<{ user: AuthUserType, token: string }> {
- const { headers } = req
- const authHeader = String(headers?.authorization ?? '')
+async function validateTokenAndExtractUser(req: Request): Promise<{ user: AuthUserType, token: string }> {
+ const {headers} = req
+ const authHeader = String(headers?.['authorization'] ?? '')
if (!authHeader.startsWith('Bearer ')) {
throw new Error('Unauthorized. Please provide a valid JWT token in the Authorization header.')
}
diff --git a/src/db/edit/streamListener.ts b/src/db/edit/streamListener.ts
index aff9b11f..a7b577a9 100644
--- a/src/db/edit/streamListener.ts
+++ b/src/db/edit/streamListener.ts
@@ -1,25 +1,46 @@
import mongoose from 'mongoose'
-import { ChangeStream, ChangeStreamDocument, ChangeStreamUpdateDocument } from 'mongodb'
+import {ChangeStream, ChangeStreamDocument, ChangeStreamUpdateDocument} from 'mongodb'
import dot from 'dot-object'
-import { changelogDataSource } from '../../model/ChangeLogDataSource.js'
-import { logger } from '../../logger.js'
-import { BaseChangeRecordType, ResumeToken, UpdateDescription, DBOperation, SupportedCollectionTypes, DocumentKind } from '../ChangeLogType.js'
-import { checkVar } from '../index.js'
-import { updateAreaIndex, updateClimbIndex } from '../export/Typesense/Client.js'
-import { AreaType } from '../AreaTypes.js'
-import { exhaustiveCheck } from '../../utils/helpers.js'
-import { ClimbType } from '../ClimbTypes.js'
+import {changelogDataSource} from '../../model/ChangeLogDataSource.js'
+import {logger} from '../../logger.js'
+import {
+ BaseChangeRecordType,
+ DBOperation,
+ DocumentKind,
+ ResumeToken,
+ SupportedCollectionTypes,
+ UpdateDescription
+} from '../ChangeLogType.js'
+import {checkVar} from '../index.js'
+import {updateAreaIndex, updateClimbIndex} from '../export/Typesense/Client.js'
+import {AreaType} from '../AreaTypes.js'
+import {exhaustiveCheck} from '../../utils/helpers.js'
+import {ClimbType} from '../ClimbTypes.js'
/**
* Start a new stream listener to track changes
*/
-export default async function streamListener (): Promise {
+export default async function streamListener(): Promise {
+ return (await createChangeStream()).on('change', onChange)
+}
+
+/**
+ * The test stream listener awaits all change events
+ */
+export async function testStreamListener(callback?: (change: ChangeStreamDocument) => void): Promise {
+ return (await createChangeStream()).on('change', async (change: ChangeStreamDocument) => {
+ await onChange(change)
+ callback && callback(change)
+ })
+}
+
+async function createChangeStream(): Promise {
const resumeId = await mostRecentResumeId()
- logger.info({ resumeId }, 'Starting stream listener')
+ logger.info({resumeId}, 'Starting stream listener')
const opts: any = {
- fullDocument: 'updateLookup'
+ fullDocument: 'updateLookup',
}
if (resumeId != null) {
opts.resumeId = resumeId
@@ -40,32 +61,40 @@ export default async function streamListener (): Promise {
}
}]
- const changeStream = mongoose.connection.watch(pipeline, opts)
- return changeStream.on('change', onChange)
+ return mongoose.connection.watch(pipeline, opts)
}
-const onChange = (change: ChangeStreamDocument): void => {
- const { operationType } = change
+const onChange = async (change: ChangeStreamDocument): Promise => {
+ const {operationType} = change
switch (operationType) {
case 'replace':
case 'update': {
let dbOp: DBOperation = 'update'
const source = DocumentKind[change.ns.coll]
- const { fullDocument, _id, updateDescription } = change as ChangeStreamUpdateDocument
+ const {fullDocument, _id, updateDescription} = change as ChangeStreamUpdateDocument
if (fullDocument?._deleting != null) {
dbOp = 'delete'
}
- void recordChange({ _id: _id as ResumeToken, source, fullDocument: fullDocument as SupportedCollectionTypes, updateDescription, dbOp })
- break
+ return recordChange({
+ _id: _id as ResumeToken,
+ source,
+ fullDocument: fullDocument as SupportedCollectionTypes,
+ updateDescription,
+ dbOp
+ })
}
case 'insert': {
const dbOp = 'insert'
const source = DocumentKind[change.ns.coll]
- const { fullDocument, _id } = change
- void recordChange({ _id: _id as ResumeToken, source, fullDocument: fullDocument as SupportedCollectionTypes, dbOp })
- break
+ const {fullDocument, _id} = change
+ return recordChange({
+ _id: _id as ResumeToken,
+ source,
+ fullDocument: fullDocument as SupportedCollectionTypes,
+ dbOp
+ })
}
}
}
@@ -78,7 +107,7 @@ interface ChangeRecordType {
dbOp: DBOperation
}
-const recordChange = async ({ source, dbOp, fullDocument, updateDescription, _id }: ChangeRecordType): Promise => {
+const recordChange = async ({source, dbOp, fullDocument, updateDescription, _id}: ChangeRecordType): Promise => {
fullDocument.kind = source
switch (source) {
case DocumentKind.climbs: {
@@ -89,9 +118,7 @@ const recordChange = async ({ source, dbOp, fullDocument, updateDescription, _id
updateDescription: dotifyUpdateDescription(updateDescription),
kind: DocumentKind.climbs
}
- void changelogDataSource.record(newDocument)
- void updateClimbIndex(fullDocument as ClimbType, dbOp)
- break
+ return changelogDataSource.record(newDocument).then(() => updateClimbIndex(fullDocument as ClimbType, dbOp))
}
case DocumentKind.areas: {
const newDocument: BaseChangeRecordType = {
@@ -101,9 +128,7 @@ const recordChange = async ({ source, dbOp, fullDocument, updateDescription, _id
updateDescription: dotifyUpdateDescription(updateDescription),
kind: DocumentKind.areas
}
- void changelogDataSource.record(newDocument)
- void updateAreaIndex(fullDocument as AreaType, dbOp)
- break
+ return changelogDataSource.record(newDocument).then(() => updateAreaIndex(fullDocument as AreaType, dbOp))
}
case DocumentKind.organizations: {
const newDocument: BaseChangeRecordType = {
@@ -113,8 +138,7 @@ const recordChange = async ({ source, dbOp, fullDocument, updateDescription, _id
updateDescription: dotifyUpdateDescription(updateDescription),
kind: DocumentKind.organizations
}
- void changelogDataSource.record(newDocument)
- break
+ return changelogDataSource.record(newDocument).then()
}
default:
exhaustiveCheck(source)
@@ -174,7 +198,7 @@ const dotifyUpdateDescription = (updateDescription: UpdateDescriptionType): Upda
}
}
- const { updatedFields, removedFields, truncatedArrays } = updateDescription
+ const {updatedFields, removedFields, truncatedArrays} = updateDescription
cleanupObj(updatedFields)
return {
updatedFields: updatedFields != null ? Object.keys(dot.dot(updatedFields)) : [],
diff --git a/src/db/import/json/__tests__/import-json.test.ts b/src/db/import/json/__tests__/import-json.test.ts
index 561d8e58..f4903b7c 100644
--- a/src/db/import/json/__tests__/import-json.test.ts
+++ b/src/db/import/json/__tests__/import-json.test.ts
@@ -1,18 +1,13 @@
-import { ChangeStream } from 'mongodb';
-import mongoose from 'mongoose';
+import {ChangeStream} from 'mongodb';
import muuid from 'uuid-mongodb';
-import {
- connectDB,
- getAreaModel,
- getClimbModel,
-} from '../../../../db/index.js';
-import { changelogDataSource } from '../../../../model/ChangeLogDataSource.js';
+import {changelogDataSource} from '../../../../model/ChangeLogDataSource.js';
import MutableAreaDataSource from '../../../../model/MutableAreaDataSource.js';
import MutableClimbDataSource from '../../../../model/MutableClimbDataSource.js';
-import { AreaType } from '../../../AreaTypes.js';
-import { ClimbType } from '../../../ClimbTypes.js';
+import {AreaType} from '../../../AreaTypes.js';
+import {ClimbType} from '../../../ClimbTypes.js';
import streamListener from '../../../edit/streamListener.js';
-import { AreaJson, BulkImportResult, bulkImportJson } from '../import-json.js';
+import {AreaJson, bulkImportJson, BulkImportResult} from '../import-json.js';
+import inMemoryDB from "../../../../utils/inMemoryDB.js";
type TestResult = BulkImportResult & {
addedAreas: Partial[];
@@ -38,12 +33,12 @@ describe('bulk import e2e', () => {
const isRejected = (
p: PromiseSettledResult
): p is PromiseRejectedResult => p.status === 'rejected';
- const comittedAreas = await Promise.allSettled(
+ const committedAreas = await Promise.allSettled(
result.addedAreas.map((area) =>
areas.findOneAreaByUUID(area.metadata.area_id)
)
);
- const comittedClimbs = await Promise.allSettled(
+ const committedClimbs = await Promise.allSettled(
result.climbIds.map((id) => climbs.findOneClimbByMUUID(muuid.from(id)))
);
@@ -51,25 +46,25 @@ describe('bulk import e2e', () => {
...result,
errors: [
...result.errors,
- ...comittedAreas.filter(isRejected).map((p) => p.reason),
- ...comittedClimbs.filter(isRejected).map((p) => p.reason),
+ ...committedAreas.filter(isRejected).map((p) => p.reason),
+ ...committedClimbs.filter(isRejected).map((p) => p.reason),
],
- addedAreas: comittedAreas.filter(isFulfilled).map((p) => p.value),
- addedClimbs: comittedClimbs
- .filter(isFulfilled)
- .map((p) => p.value as Partial),
+ addedAreas: committedAreas.filter(isFulfilled).map((p) => p.value),
+ addedClimbs: committedClimbs
+ .filter(isFulfilled)
+ .map((p) => p.value as Partial),
};
};
beforeAll(async () => {
- await connectDB();
+ await inMemoryDB.connect()
stream = await streamListener();
});
afterAll(async () => {
try {
await stream.close();
- await mongoose.disconnect();
+ await inMemoryDB.close()
} catch (e) {
console.log('error closing mongoose', e);
}
@@ -84,12 +79,7 @@ describe('bulk import e2e', () => {
afterEach(async () => {
await changelogDataSource._testRemoveAll();
- try {
- await getAreaModel().collection.drop();
- } catch {}
- try {
- await getClimbModel().collection.drop();
- } catch {}
+ await inMemoryDB.clear()
});
describe('adding new areas and climbs', () => {
@@ -145,8 +135,8 @@ describe('bulk import e2e', () => {
).resolves.toMatchObject({
errors: [],
addedAreas: [
- { area_name: 'Parent Area', gradeContext: 'US' },
- { area_name: 'Child Area 2', gradeContext: 'US' },
+ {area_name: 'Parent Area', gradeContext: 'US'},
+ {area_name: 'Child Area 2', gradeContext: 'US'},
] as Partial[],
});
});
@@ -204,7 +194,7 @@ describe('bulk import e2e', () => {
{
name: 'Test Climb',
grade: '5.10a',
- disciplines: { sport: true },
+ disciplines: {sport: true},
},
],
})
@@ -251,7 +241,7 @@ describe('bulk import e2e', () => {
})
).resolves.toMatchObject({
errors: [],
- addedAreas: [{ area_name: 'New Name' }],
+ addedAreas: [{area_name: 'New Name'}],
});
});
});
diff --git a/src/db/index.ts b/src/db/index.ts
index 25656e8a..a0a7f800 100644
--- a/src/db/index.ts
+++ b/src/db/index.ts
@@ -1,18 +1,18 @@
import mongoose from 'mongoose'
-import { ChangeStream } from 'mongodb'
-import { config } from 'dotenv'
-import { enableAllPlugins } from 'immer'
+import {ChangeStream} from 'mongodb'
+import {config} from 'dotenv'
+import {enableAllPlugins} from 'immer'
-import { getAreaModel } from './AreaSchema.js'
-import { getClimbModel } from './ClimbSchema.js'
-import { getMediaObjectModel } from './MediaObjectSchema.js'
-import { getOrganizationModel } from './OrganizationSchema.js'
-import { getTickModel } from './TickSchema.js'
-import { getXMediaModel } from './XMediaSchema.js'
-import { getPostModel } from './PostSchema.js'
-import { getChangeLogModel } from './ChangeLogSchema.js'
-import { getExperimentalUserModel, getUserModel } from './UserSchema.js'
-import { logger } from '../logger.js'
+import {getAreaModel} from './AreaSchema.js'
+import {getClimbModel} from './ClimbSchema.js'
+import {getMediaObjectModel} from './MediaObjectSchema.js'
+import {getOrganizationModel} from './OrganizationSchema.js'
+import {getTickModel} from './TickSchema.js'
+import {getXMediaModel} from './XMediaSchema.js'
+import {getPostModel} from './PostSchema.js'
+import {getChangeLogModel} from './ChangeLogSchema.js'
+import {getExperimentalUserModel, getUserModel} from './UserSchema.js'
+import {logger} from '../logger.js'
import streamListener from './edit/streamListener.js'
config()
@@ -55,7 +55,7 @@ export const connectDB = async (onConnected: () => any = defaultFn): Promise => {
})
}
-export const defaultPostConnect = async (): Promise => {
+export const defaultPostConnect = async (changeStreamListener = streamListener): Promise => {
console.log('Kudos!')
await createIndexes()
- return await streamListener()
+ return await changeStreamListener()
}
// eslint-disable-next-line
diff --git a/src/main.ts b/src/main.ts
index aaf9583c..0ef9c20b 100644
--- a/src/main.ts
+++ b/src/main.ts
@@ -1,5 +1,5 @@
-import { connectDB, defaultPostConnect } from './db/index.js'
-import { startServer } from './server.js'
+import {connectDB, defaultPostConnect} from './db/index.js'
+import {createServer, startServer} from './server.js'
await connectDB(defaultPostConnect)
-await startServer()
+await startServer(await createServer())
diff --git a/src/model/ChangeLogDataSource.ts b/src/model/ChangeLogDataSource.ts
index 267a6215..a88ad9bd 100644
--- a/src/model/ChangeLogDataSource.ts
+++ b/src/model/ChangeLogDataSource.ts
@@ -1,12 +1,19 @@
-import mongoose, { ClientSession } from 'mongoose'
-import { MongoDataSource } from 'apollo-datasource-mongodb'
-import { MUUID } from 'uuid-mongodb'
+import mongoose, {ClientSession} from 'mongoose'
+import {MongoDataSource} from 'apollo-datasource-mongodb'
+import {MUUID} from 'uuid-mongodb'
-import { getChangeLogModel } from '../db/index.js'
-import { ChangeLogType, OpType, BaseChangeRecordType, AreaChangeLogType, ClimbChangeLogType, OrganizationChangeLogType } from '../db/ChangeLogType'
-import { logger } from '../logger.js'
-import { areaHistoryDataSource } from './AreaHistoryDatasource.js'
-import { organizationHistoryDataSource } from './OrganizationHistoryDatasource.js'
+import {getChangeLogModel} from '../db/index.js'
+import {
+ AreaChangeLogType,
+ BaseChangeRecordType,
+ ChangeLogType,
+ ClimbChangeLogType,
+ OpType,
+ OrganizationChangeLogType
+} from '../db/ChangeLogType'
+import {logger} from '../logger.js'
+import {areaHistoryDataSource} from './AreaHistoryDatasource.js'
+import {organizationHistoryDataSource} from './OrganizationHistoryDatasource.js'
export default class ChangeLogDataSource extends MongoDataSource {
changeLogModel = getChangeLogModel()
@@ -17,14 +24,14 @@ export default class ChangeLogDataSource extends MongoDataSource
* @param operation
* @returns
*/
- async create (session: ClientSession, uuid: MUUID, operation: OpType): Promise {
+ async create(session: ClientSession, uuid: MUUID, operation: OpType): Promise {
const newChangeDoc: ChangeLogType = {
_id: new mongoose.Types.ObjectId(),
editedBy: uuid,
operation,
changes: []
}
- const rs = await this.changeLogModel.insertMany(newChangeDoc, { session })
+ const rs = await this.changeLogModel.insertMany(newChangeDoc, {session})
if (rs?.length !== 1) throw new Error('Error inserting new change')
return rs[0]
}
@@ -33,7 +40,7 @@ export default class ChangeLogDataSource extends MongoDataSource
* Record a new change in the changeset
* @param changeRecord
*/
- async record (changeRecord: BaseChangeRecordType): Promise {
+ async record(changeRecord: BaseChangeRecordType): Promise {
const filter = {
_id: changeRecord.fullDocument._change?.historyId
}
@@ -43,7 +50,7 @@ export default class ChangeLogDataSource extends MongoDataSource
$push: {
changes: {
$each: [changeRecord],
- $sort: { 'fullDocument._change.seq': -1 }
+ $sort: {'fullDocument._change.seq': -1}
}
}
}, {
@@ -56,11 +63,11 @@ export default class ChangeLogDataSource extends MongoDataSource
return this
}
- async getAreaChangeSets (areaUuid?: MUUID): Promise {
+ async getAreaChangeSets(areaUuid?: MUUID): Promise {
return await areaHistoryDataSource.getChangeSetsByUuid(areaUuid)
}
- async getOrganizationChangeSets (orgId?: MUUID): Promise {
+ async getOrganizationChangeSets(orgId?: MUUID): Promise {
return await organizationHistoryDataSource.getChangeSetsByOrgId(orgId)
}
@@ -69,24 +76,23 @@ export default class ChangeLogDataSource extends MongoDataSource
* @param uuidList optional filter
* @returns change sets
*/
- async getChangeSets (uuidList: MUUID[]): Promise> {
- const rs = await this.changeLogModel.aggregate([
+ async getChangeSets(uuidList: MUUID[]): Promise> {
+ return this.changeLogModel.aggregate([
{
$sort: {
createdAt: -1
}
}
- ]).limit(500)
- return rs
+ ]).limit(500);
}
- async _testRemoveAll (): Promise {
+ async _testRemoveAll(): Promise {
await this.changeLogModel.deleteMany()
}
static instance: ChangeLogDataSource
- static getInstance (): ChangeLogDataSource {
+ static getInstance(): ChangeLogDataSource {
if (ChangeLogDataSource.instance == null) {
/**
* Why suppress TS error? See: https://github.com/GraphQLGuide/apollo-datasource-mongodb/issues/88
diff --git a/src/model/MutableAreaDataSource.ts b/src/model/MutableAreaDataSource.ts
index 6d952cea..faefe171 100644
--- a/src/model/MutableAreaDataSource.ts
+++ b/src/model/MutableAreaDataSource.ts
@@ -1,27 +1,34 @@
import bbox2Polygon from '@turf/bbox-polygon'
-import { Point, geometry } from '@turf/helpers'
-import { UserInputError } from 'apollo-server-express'
+import {geometry, Point} from '@turf/helpers'
+import {UserInputError} from 'apollo-server-express'
import isoCountries from 'i18n-iso-countries'
-import enJson from 'i18n-iso-countries/langs/en.json' assert { type: 'json' }
-import { produce } from 'immer'
-import mongoose, { ClientSession } from 'mongoose'
-import { NIL, v5 as uuidv5 } from 'uuid'
-import muuid, { MUUID } from 'uuid-mongodb'
-
-import { GradeContexts } from '../GradeUtils.js'
-import CountriesLngLat from '../data/countries-with-lnglat.json' assert { type: 'json' }
-import { AreaDocumnent, AreaEditableFieldsType, AreaType, OperationType, UpdateSortingOrderType } from '../db/AreaTypes.js'
-import { ChangeRecordMetadataType } from '../db/ChangeLogType.js'
-import { ExperimentalAuthorType } from '../db/UserTypes.js'
-import { makeDBArea } from '../db/import/usa/AreaTransformer.js'
-import { createRootNode } from '../db/import/usa/AreaTree.js'
-import { StatsSummary, leafReducer, nodesReducer } from '../db/utils/jobs/TreeUpdaters/updateAllAreas.js'
-import { bboxFrom } from '../geo-utils.js'
-import { logger } from '../logger.js'
-import { createInstance as createExperimentalUserDataSource } from '../model/ExperimentalUserDataSource.js'
-import { sanitizeStrict } from '../utils/sanitize.js'
+import enJson from 'i18n-iso-countries/langs/en.json' assert {type: 'json'}
+import {produce} from 'immer'
+import mongoose, {ClientSession} from 'mongoose'
+import {NIL, v5 as uuidv5} from 'uuid'
+import muuid, {MUUID} from 'uuid-mongodb'
+
+import {GradeContexts} from '../GradeUtils.js'
+import CountriesLngLat from '../data/countries-with-lnglat.json' assert {type: 'json'}
+import {
+ AreaDocumnent,
+ AreaEditableFieldsType,
+ AreaType,
+ OperationType,
+ UpdateSortingOrderType
+} from '../db/AreaTypes.js'
+import {ChangeRecordMetadataType} from '../db/ChangeLogType.js'
+import {ExperimentalAuthorType} from '../db/UserTypes.js'
+import {makeDBArea} from '../db/import/usa/AreaTransformer.js'
+import {createRootNode} from '../db/import/usa/AreaTree.js'
+import {leafReducer, nodesReducer, StatsSummary} from '../db/utils/jobs/TreeUpdaters/updateAllAreas.js'
+import {bboxFrom} from '../geo-utils.js'
+import {logger} from '../logger.js'
+import {createInstance as createExperimentalUserDataSource} from '../model/ExperimentalUserDataSource.js'
+import {sanitizeStrict} from '../utils/sanitize.js'
import AreaDataSource from './AreaDataSource.js'
-import { changelogDataSource } from './ChangeLogDataSource.js'
+import {changelogDataSource} from './ChangeLogDataSource.js'
+import {withTransaction} from "../utils/helpers";
isoCountries.registerLocale(enJson)
@@ -46,7 +53,7 @@ export interface UpdateAreaOptions {
export default class MutableAreaDataSource extends AreaDataSource {
experimentalUserDataSource = createExperimentalUserDataSource()
- async setDestinationFlag (user: MUUID, uuid: MUUID, flag: boolean): Promise {
+ async setDestinationFlag(user: MUUID, uuid: MUUID, flag: boolean): Promise {
const session = await this.areaModel.startSession()
let ret: AreaType | null = null
@@ -60,10 +67,10 @@ export default class MutableAreaDataSource extends AreaDataSource {
return ret
}
- async _setDestinationFlag (session, user: MUUID, uuid: MUUID, flag: boolean): Promise {
+ async _setDestinationFlag(session, user: MUUID, uuid: MUUID, flag: boolean): Promise {
const change = await changelogDataSource.create(session, uuid, OperationType.updateDestination)
- const filter = { 'metadata.area_id': uuid }
+ const filter = {'metadata.area_id': uuid}
const update: Pick }> = [{
$set: {
'metadata.isDestination': flag,
@@ -76,16 +83,16 @@ export default class MutableAreaDataSource extends AreaDataSource {
}
}
}]
- const opts = { new: true, session, timestamps: false } // return newly updated doc
+ const opts = {new: true, session, timestamps: false} // return newly updated doc
return await this.areaModel
- .updateOne(filter, update, opts).orFail().lean()
+ .updateOne(filter, update, opts).orFail().lean()
}
/**
* Add a country
* @param _countryCode alpha2 or 3 ISO code
*/
- async addCountry (_countryCode: string): Promise {
+ async addCountry(_countryCode: string): Promise {
const countryCode = _countryCode.toLocaleUpperCase('en-US')
if (!isoCountries.isValid(countryCode)) {
throw new Error('Invalid ISO code: ' + countryCode)
@@ -119,7 +126,16 @@ export default class MutableAreaDataSource extends AreaDataSource {
throw new Error('Error inserting ' + countryCode)
}
- async addAreaWith ({ user, areaName, parentUuid = null, countryCode, experimentalAuthor, isLeaf, isBoulder, session }: AddAreaOptions): Promise {
+ async addAreaWith({
+ user,
+ areaName,
+ parentUuid = null,
+ countryCode,
+ experimentalAuthor,
+ isLeaf,
+ isBoulder,
+ session
+ }: AddAreaOptions): Promise {
return await this.addArea(user, areaName, parentUuid, countryCode, experimentalAuthor, isLeaf, isBoulder, session)
}
@@ -130,14 +146,14 @@ export default class MutableAreaDataSource extends AreaDataSource {
* @param parentUuid
* @param countryCode
*/
- async addArea (user: MUUID,
- areaName: string,
- parentUuid: MUUID | null,
- countryCode?: string,
- experimentalAuthor?: ExperimentalAuthorType,
- isLeaf?: boolean,
- isBoulder?: boolean,
- sessionCtx?: ClientSession): Promise {
+ async addArea(user: MUUID,
+ areaName: string,
+ parentUuid: MUUID | null,
+ countryCode?: string,
+ experimentalAuthor?: ExperimentalAuthorType,
+ isLeaf?: boolean,
+ isBoulder?: boolean,
+ sessionCtx?: ClientSession): Promise {
if (parentUuid == null && countryCode == null) {
throw new Error(`Adding area "${areaName}" failed. Must provide parent Id or country code`)
}
@@ -152,26 +168,15 @@ export default class MutableAreaDataSource extends AreaDataSource {
}
const session = sessionCtx ?? await this.areaModel.startSession()
-
- let ret: AreaType
-
if (session.inTransaction()) {
- ret = await this._addArea(session, user, areaName, uuid, experimentalAuthor, isLeaf, isBoulder)
+ return this._addArea(session, user, areaName, uuid, experimentalAuthor, isLeaf, isBoulder)
} else {
- // withTransaction() doesn't return the callback result
- // see https://jira.mongodb.org/browse/NODE-2014
- await session.withTransaction(
- async (session) => {
- ret = await this._addArea(session, user, areaName, uuid, experimentalAuthor, isLeaf, isBoulder)
- return ret
- })
+ return withTransaction(session, () => this._addArea(session, user, areaName, uuid, experimentalAuthor, isLeaf, isBoulder))
}
- // @ts-expect-error
- return ret
}
- async _addArea (session, user: MUUID, areaName: string, parentUuid: MUUID, experimentalAuthor?: ExperimentalAuthorType, isLeaf?: boolean, isBoulder?: boolean): Promise {
- const parentFilter = { 'metadata.area_id': parentUuid }
+ async _addArea(session, user: MUUID, areaName: string, parentUuid: MUUID, experimentalAuthor?: ExperimentalAuthorType, isLeaf?: boolean, isBoulder?: boolean): Promise {
+ const parentFilter = {'metadata.area_id': parentUuid}
const parent = await this.areaModel.findOne(parentFilter).session(session).orFail(new UserInputError(`[${areaName}]: Expecting country or area parent, found none with id ${parentUuid.toString()}`))
if (parent.metadata.leaf || (parent.metadata?.isBoulder ?? false)) {
@@ -223,16 +228,16 @@ export default class MutableAreaDataSource extends AreaDataSource {
newArea._change = produce(newChangeMeta, draft => {
draft.seq = 1
})
- const rs1 = await this.areaModel.insertMany(newArea, { session })
+ const rs1 = await this.areaModel.insertMany(newArea, {session})
// Make sure parent knows about this new area
parent.children.push(newArea._id)
parent.updatedBy = experimentaAuthorId ?? user
- await parent.save({ timestamps: false })
+ await parent.save({timestamps: false})
return rs1[0].toObject()
}
- async deleteArea (user: MUUID, uuid: MUUID): Promise {
+ async deleteArea(user: MUUID, uuid: MUUID): Promise {
const session = await this.areaModel.startSession()
let ret: AreaType | null = null
@@ -246,10 +251,10 @@ export default class MutableAreaDataSource extends AreaDataSource {
return ret
}
- async _deleteArea (session: ClientSession, user: MUUID, uuid: MUUID): Promise {
+ async _deleteArea(session: ClientSession, user: MUUID, uuid: MUUID): Promise {
const filter = {
'metadata.area_id': uuid,
- deleting: { $ne: null }
+ deleting: {$ne: null}
}
const area = await this.areaModel.findOne(filter).session(session).orFail()
@@ -285,7 +290,7 @@ export default class MutableAreaDataSource extends AreaDataSource {
$filter: {
input: '$children',
as: 'child',
- cond: { $ne: ['$$child', area._id] }
+ cond: {$ne: ['$$child', area._id]}
}
},
updatedBy: user,
@@ -306,7 +311,7 @@ export default class MutableAreaDataSource extends AreaDataSource {
// See https://www.mongodb.com/community/forums/t/change-stream-fulldocument-on-delete/15963
// Mongo TTL indexes: https://www.mongodb.com/docs/manual/core/index-ttl/
return await this.areaModel.findOneAndUpdate(
- { 'metadata.area_id': uuid },
+ {'metadata.area_id': uuid},
[{
$set: {
updatedBy: user,
@@ -322,7 +327,7 @@ export default class MutableAreaDataSource extends AreaDataSource {
}).session(session).lean()
}
- async updateAreaWith ({ user, areaUuid, document, session }: UpdateAreaOptions): Promise {
+ async updateAreaWith({user, areaUuid, document, session}: UpdateAreaOptions): Promise {
return await this.updateArea(user, areaUuid, document, session)
}
@@ -335,11 +340,11 @@ export default class MutableAreaDataSource extends AreaDataSource {
* @param document New fields
* @returns Newly updated area
*/
- async updateArea (user: MUUID, areaUuid: MUUID, document: AreaEditableFieldsType, sessionCtx?: ClientSession): Promise {
+ async updateArea(user: MUUID, areaUuid: MUUID, document: AreaEditableFieldsType, sessionCtx?: ClientSession): Promise {
const _updateArea = async (session: ClientSession, user: MUUID, areaUuid: MUUID, document: AreaEditableFieldsType): Promise => {
const filter = {
'metadata.area_id': areaUuid,
- deleting: { $ne: null }
+ deleting: {$ne: null}
}
const area = await this.areaModel.findOne(filter).session(session)
@@ -347,7 +352,17 @@ export default class MutableAreaDataSource extends AreaDataSource {
throw new Error('Area update error. Reason: Area not found.')
}
- const { areaName, description, shortCode, isDestination, isLeaf, isBoulder, lat, lng, experimentalAuthor } = document
+ const {
+ areaName,
+ description,
+ shortCode,
+ isDestination,
+ isLeaf,
+ isBoulder,
+ lat,
+ lng,
+ experimentalAuthor
+ } = document
// See https://github.com/OpenBeta/openbeta-graphql/issues/244
let experimentaAuthorId: MUUID | null = null
@@ -365,7 +380,7 @@ export default class MutableAreaDataSource extends AreaDataSource {
operation: opType,
seq: 0
}
- area.set({ _change })
+ area.set({_change})
area.updatedBy = experimentaAuthorId ?? user
if (area.pathTokens.length === 1) {
@@ -378,25 +393,25 @@ export default class MutableAreaDataSource extends AreaDataSource {
if (areaName != null) {
const sanitizedName = sanitizeStrict(areaName)
- area.set({ area_name: sanitizedName })
+ area.set({area_name: sanitizedName})
// change our pathTokens
await this.updatePathTokens(session, _change, area, sanitizedName)
}
- if (shortCode != null) area.set({ shortCode: shortCode.toUpperCase() })
- if (isDestination != null) area.set({ 'metadata.isDestination': isDestination })
- if (isLeaf != null) area.set({ 'metadata.leaf': isLeaf })
+ if (shortCode != null) area.set({shortCode: shortCode.toUpperCase()})
+ if (isDestination != null) area.set({'metadata.isDestination': isDestination})
+ if (isLeaf != null) area.set({'metadata.leaf': isLeaf})
if (isBoulder != null) {
- area.set({ 'metadata.isBoulder': isBoulder })
+ area.set({'metadata.isBoulder': isBoulder})
if (isBoulder) {
// boulfer == true implies leaf = true
- area.set({ 'metadata.leaf': true })
+ area.set({'metadata.leaf': true})
}
}
if (description != null) {
const sanitized = sanitizeStrict(description)
- area.set({ 'content.description': sanitized })
+ area.set({'content.description': sanitized})
}
const latLngHasChanged = lat != null && lng != null
@@ -420,22 +435,11 @@ export default class MutableAreaDataSource extends AreaDataSource {
}
const session = sessionCtx ?? await this.areaModel.startSession()
- let ret: AreaType | null = null
-
if (session.inTransaction()) {
- return await _updateArea(session, user, areaUuid, document)
+ return _updateArea(session, user, areaUuid, document)
} else {
- // withTransaction() doesn't return the callback result
- // see https://jira.mongodb.org/browse/NODE-2014
- await session.withTransaction(
- async session => {
- ret = await _updateArea(session, user, areaUuid, document)
- return ret
- }
- )
+ return withTransaction(session, () => _updateArea(session, user, areaUuid, document))
}
-
- return ret
}
/**
@@ -446,15 +450,17 @@ export default class MutableAreaDataSource extends AreaDataSource {
* @param newAreaName new area name
* @param depth tree depth
*/
- async updatePathTokens (session: ClientSession, changeRecord: ChangeRecordMetadataType, area: AreaDocumnent, newAreaName: string, changeIndex: number = -1): Promise {
+ async updatePathTokens(session: ClientSession, changeRecord: ChangeRecordMetadataType, area: AreaDocumnent, newAreaName: string, changeIndex: number = -1): Promise {
if (area.pathTokens.length > 1) {
- if (changeIndex === -1) { changeIndex = area.pathTokens.length - 1 }
+ if (changeIndex === -1) {
+ changeIndex = area.pathTokens.length - 1
+ }
const newPath = [...area.pathTokens]
newPath[changeIndex] = newAreaName
- area.set({ pathTokens: newPath })
- area.set({ _change: changeRecord })
- await area.save({ session })
+ area.set({pathTokens: newPath})
+ area.set({_change: changeRecord})
+ await area.save({session})
// hydrate children_ids array with actual area documents
await area.populate('children')
@@ -474,16 +480,16 @@ export default class MutableAreaDataSource extends AreaDataSource {
* @param input area sorting input array
* @returns
*/
- async updateSortingOrder (user: MUUID, input: UpdateSortingOrderType[]): Promise {
+ async updateSortingOrder(user: MUUID, input: UpdateSortingOrderType[]): Promise {
const doUpdate = async (session: ClientSession, user: MUUID, input: UpdateSortingOrderType[]): Promise => {
const opType = OperationType.orderAreas
const change = await changelogDataSource.create(session, user, opType)
const updates: any[] = []
- input.forEach(({ areaId, leftRightIndex }, index) => {
+ input.forEach(({areaId, leftRightIndex}, index) => {
updates.push({
updateOne: {
- filter: { 'metadata.area_id': muuid.from(areaId) },
+ filter: {'metadata.area_id': muuid.from(areaId)},
update: {
$set: {
'metadata.leftRightIndex': leftRightIndex,
@@ -500,7 +506,7 @@ export default class MutableAreaDataSource extends AreaDataSource {
})
})
- const rs = (await this.areaModel.bulkWrite(updates, { session })).toJSON()
+ const rs = (await this.areaModel.bulkWrite(updates, {session})).toJSON()
if (rs.ok === 1 && rs.nMatched === rs.nModified) {
return input.map(item => item.areaId)
@@ -530,7 +536,7 @@ export default class MutableAreaDataSource extends AreaDataSource {
* @param startingArea
* @param excludeStartingArea true to exlude the starting area from the update. Useful when deleting an area.
*/
- async updateLeafStatsAndGeoData (session: ClientSession, changeRecord: ChangeRecordMetadataType, startingArea: AreaDocumnent, excludeStartingArea: boolean = false): Promise {
+ async updateLeafStatsAndGeoData(session: ClientSession, changeRecord: ChangeRecordMetadataType, startingArea: AreaDocumnent, excludeStartingArea: boolean = false): Promise {
/**
* Update function. For each node, recalculate stats and recursively update its acenstors until we reach the country node.
*/
@@ -543,12 +549,12 @@ export default class MutableAreaDataSource extends AreaDataSource {
const ancestors = area.ancestors.split(',')
const parentUuid = muuid.from(ancestors[ancestors.length - 2])
const parentArea =
- await this.areaModel.findOne({ 'metadata.area_id': parentUuid })
- .batchSize(10)
- .populate<{ children: AreaDocumnent[] }>({ path: 'children', model: this.areaModel })
- .allowDiskUse(true)
- .session(session)
- .orFail()
+ await this.areaModel.findOne({'metadata.area_id': parentUuid})
+ .batchSize(10)
+ .populate<{ children: AreaDocumnent[] }>({path: 'children', model: this.areaModel})
+ .allowDiskUse(true)
+ .session(session)
+ .orFail()
const acc: StatsSummary[] = []
/**
@@ -562,7 +568,7 @@ export default class MutableAreaDataSource extends AreaDataSource {
}
}
- const current = await nodesReducer(acc, parentArea as any as AreaDocumnent, { session, changeRecord })
+ const current = await nodesReducer(acc, parentArea as any as AreaDocumnent, {session, changeRecord})
await updateFn(session, changeRecord, parentArea as any as AreaDocumnent, current)
}
@@ -582,7 +588,7 @@ export default class MutableAreaDataSource extends AreaDataSource {
static instance: MutableAreaDataSource
- static getInstance (): MutableAreaDataSource {
+ static getInstance(): MutableAreaDataSource {
if (MutableAreaDataSource.instance == null) {
MutableAreaDataSource.instance = new MutableAreaDataSource(mongoose.connection.db.collection('areas'))
}
diff --git a/src/model/MutableClimbDataSource.ts b/src/model/MutableClimbDataSource.ts
index f11d9afa..603a486a 100644
--- a/src/model/MutableClimbDataSource.ts
+++ b/src/model/MutableClimbDataSource.ts
@@ -1,19 +1,20 @@
-import { UserInputError } from 'apollo-server-express'
-import { ClientSession } from 'mongoose'
-import muid, { MUUID } from 'uuid-mongodb'
-
-import { createGradeObject, gradeContextToGradeScales, sanitizeDisciplines } from '../GradeUtils.js'
-import { getAreaModel } from '../db/AreaSchema.js'
-import { AreaDocumnent } from '../db/AreaTypes.js'
-import { ChangeRecordMetadataType } from '../db/ChangeLogType.js'
-import { getClimbModel } from '../db/ClimbSchema.js'
-import { ClimbChangeDocType, ClimbChangeInputType, ClimbEditOperationType, ClimbType, IPitch } from '../db/ClimbTypes.js'
-import { aggregateCragStats } from '../db/utils/Aggregate.js'
-import { sanitize, sanitizeStrict } from '../utils/sanitize.js'
-import { changelogDataSource } from './ChangeLogDataSource.js'
+import {UserInputError} from 'apollo-server-express'
+import {ClientSession} from 'mongoose'
+import muid, {MUUID} from 'uuid-mongodb'
+
+import {createGradeObject, gradeContextToGradeScales, sanitizeDisciplines} from '../GradeUtils.js'
+import {getAreaModel} from '../db/AreaSchema.js'
+import {AreaDocumnent} from '../db/AreaTypes.js'
+import {ChangeRecordMetadataType} from '../db/ChangeLogType.js'
+import {getClimbModel} from '../db/ClimbSchema.js'
+import {ClimbChangeDocType, ClimbChangeInputType, ClimbEditOperationType, ClimbType, IPitch} from '../db/ClimbTypes.js'
+import {aggregateCragStats} from '../db/utils/Aggregate.js'
+import {sanitize, sanitizeStrict} from '../utils/sanitize.js'
+import {changelogDataSource} from './ChangeLogDataSource.js'
import ClimbDataSource from './ClimbDataSource.js'
-import { createInstance as createExperimentalUserDataSource } from './ExperimentalUserDataSource.js'
+import {createInstance as createExperimentalUserDataSource} from './ExperimentalUserDataSource.js'
import MutableAreaDataSource from './MutableAreaDataSource.js'
+import {withTransaction} from "../utils/helpers";
export interface AddOrUpdateClimbsOptions {
userId: MUUID
@@ -25,7 +26,7 @@ export interface AddOrUpdateClimbsOptions {
export default class MutableClimbDataSource extends ClimbDataSource {
experimentalUserDataSource = createExperimentalUserDataSource()
- async _addOrUpdateClimbs (userId: MUUID, session: ClientSession, parentId: MUUID, userInput: ClimbChangeInputType[]): Promise {
+ async _addOrUpdateClimbs(userId: MUUID, session: ClientSession, parentId: MUUID, userInput: ClimbChangeInputType[]): Promise {
const newClimbIds = new Array(userInput.length)
for (let i = 0; i < newClimbIds.length; i++) {
// make sure there's some input
@@ -41,18 +42,19 @@ export default class MutableClimbDataSource extends ClimbDataSource {
}
}
- const existingIds = await this.climbModel.find({ _id: { $in: newClimbIds } }).select('_id')
+ const existingIds = await this.climbModel.find({_id: {$in: newClimbIds}}).select('_id')
interface IdMapType {
id: MUUID
existed: boolean
}
+
// A list of ID objects to track whether the ID exists in the DB
const idList = newClimbIds.reduce((acc, curr) => {
if (existingIds.some(item => item._id.toUUID().toString() === curr.toUUID().toString())) {
- acc.push({ id: curr, existed: true })
+ acc.push({id: curr, existed: true})
} else {
- acc.push({ id: curr, existed: false })
+ acc.push({id: curr, existed: false})
}
return acc
}, [])
@@ -60,11 +62,11 @@ export default class MutableClimbDataSource extends ClimbDataSource {
const opType = ClimbEditOperationType.updateClimb
const change = await changelogDataSource.create(session, userId, opType)
- const parentFilter = { 'metadata.area_id': parentId }
+ const parentFilter = {'metadata.area_id': parentId}
const parent = await this.areaModel
- .findOne(parentFilter).session(session)
- .orFail(new UserInputError(`Area with id: ${parentId.toUUID().toString()} not found`))
+ .findOne(parentFilter).session(session)
+ .orFail(new UserInputError(`Area with id: ${parentId.toUUID().toString()} not found`))
const _change: ChangeRecordMetadataType = {
user: userId,
@@ -73,7 +75,7 @@ export default class MutableClimbDataSource extends ClimbDataSource {
operation: ClimbEditOperationType.updateClimb,
seq: 0
}
- parent.set({ _change })
+ parent.set({_change})
// does the parent area have subareas?
if (parent.children.length > 0) {
@@ -101,7 +103,7 @@ export default class MutableClimbDataSource extends ClimbDataSource {
// It's ok to have empty disciplines obj in the input in case
// we just want to update other fields.
// However, if disciplines is non-empty, is there 1 non-boulder problem in the input?
- const hasARouteClimb = userInput.some(({ disciplines }) =>
+ const hasARouteClimb = userInput.some(({disciplines}) =>
disciplines != null && Object.keys(disciplines).length > 0 && !(disciplines?.bouldering ?? false))
if (hasARouteClimb && (parent.metadata?.isBoulder ?? false)) {
@@ -140,7 +142,7 @@ export default class MutableClimbDataSource extends ClimbDataSource {
const newPitchesWithIDs = pitches != null
? pitches.map((pitch): IPitch => {
- const { id, ...partialPitch } = pitch // separate 'id' input and rest of the pitch properties to avoid duplicate id and _id
+ const {id, ...partialPitch} = pitch // separate 'id' input and rest of the pitch properties to avoid duplicate id and _id
if (partialPitch.pitchNumber === undefined) {
throw new UserInputError('Each pitch in a multi-pitch climb must have a pitchNumber representing its sequence in the climb. Please ensure that every pitch is numbered.')
}
@@ -154,14 +156,14 @@ export default class MutableClimbDataSource extends ClimbDataSource {
})
: null
- const { description, location, protection, name, fa, length, boltsCount } = userInput[i]
+ const {description, location, protection, name, fa, length, boltsCount} = userInput[i]
// Make sure we don't update content = {}
// See https://www.mongodb.com/community/forums/t/mongoservererror-invalid-set-caused-by-an-empty-object-is-not-a-valid-value/148344/2
const content = {
- ...description != null && { description: sanitize(description) },
- ...location != null && { location: sanitize(location) },
- ...protection != null && { protection: sanitize(protection) }
+ ...description != null && {description: sanitize(description)},
+ ...location != null && {location: sanitize(location)},
+ ...protection != null && {protection: sanitize(protection)}
}
/**
@@ -174,22 +176,22 @@ export default class MutableClimbDataSource extends ClimbDataSource {
*/
const doc: ClimbChangeDocType = {
_id: newClimbIds[i],
- ...name != null && { name: sanitizeStrict(name) },
- ...newGradeObj != null && { grades: newGradeObj },
- ...typeSafeDisciplines != null && { type: typeSafeDisciplines },
+ ...name != null && {name: sanitizeStrict(name)},
+ ...newGradeObj != null && {grades: newGradeObj},
+ ...typeSafeDisciplines != null && {type: typeSafeDisciplines},
gradeContext: parent.gradeContext,
- ...fa != null && { fa },
- ...length != null && length > 0 && { length },
- ...boltsCount != null && boltsCount >= 0 && { boltsCount }, // Include 'boltsCount' if it's defined and its value is 0 (no bolts) or greater
- ...newPitchesWithIDs != null && { pitches: newPitchesWithIDs },
- ...Object.keys(content).length > 0 && { content },
+ ...fa != null && {fa},
+ ...length != null && length > 0 && {length},
+ ...boltsCount != null && boltsCount >= 0 && {boltsCount}, // Include 'boltsCount' if it's defined and its value is 0 (no bolts) or greater
+ ...newPitchesWithIDs != null && {pitches: newPitchesWithIDs},
+ ...Object.keys(content).length > 0 && {content},
metadata: {
areaRef: parent.metadata.area_id,
lnglat: parent.metadata.lnglat,
- ...userInput[i]?.leftRightIndex != null && { left_right_index: userInput[i].leftRightIndex }
+ ...userInput[i]?.leftRightIndex != null && {left_right_index: userInput[i].leftRightIndex}
},
- ...!idList[i].existed && { createdBy: experimentalUserId ?? userId },
- ...idList[i].existed && { updatedBy: userId },
+ ...!idList[i].existed && {createdBy: experimentalUserId ?? userId},
+ ...idList[i].existed && {updatedBy: userId},
_change: {
user: experimentalUserId ?? userId,
historyId: change._id,
@@ -203,7 +205,7 @@ export default class MutableClimbDataSource extends ClimbDataSource {
const bulk = newDocs.map(doc => ({
updateOne: {
- filter: { _id: doc._id },
+ filter: {_id: doc._id},
update: [{
$set: {
...doc,
@@ -217,18 +219,18 @@ export default class MutableClimbDataSource extends ClimbDataSource {
}
}))
- const rs = await (await this.climbModel.bulkWrite(bulk, { session })).toJSON()
+ const rs = await (await this.climbModel.bulkWrite(bulk, {session})).toJSON()
if (rs.ok === 1) {
const idList: MUUID[] = []
const idStrList: string[] = []
- rs.upserted.forEach(({ _id }) => {
+ rs.upserted.forEach(({_id}) => {
idList.push(_id)
idStrList.push(_id.toUUID().toString())
})
if (idList.length > 0) {
- parent.set({ climbs: parent.climbs.concat(idList) })
+ parent.set({climbs: parent.climbs.concat(idList)})
}
await parent.save()
@@ -244,7 +246,7 @@ export default class MutableClimbDataSource extends ClimbDataSource {
}
}
- async addOrUpdateClimbsWith ({ userId, parentId, changes, session }: AddOrUpdateClimbsOptions): Promise {
+ async addOrUpdateClimbsWith({userId, parentId, changes, session}: AddOrUpdateClimbsOptions): Promise {
return await this.addOrUpdateClimbs(userId, parentId, changes, session)
}
@@ -254,24 +256,13 @@ export default class MutableClimbDataSource extends ClimbDataSource {
* @param changes
* @returns a list of updated (or newly added) climb IDs
*/
- async addOrUpdateClimbs (userId: MUUID, parentId: MUUID, changes: ClimbChangeInputType[], sessionCtx?: ClientSession): Promise {
+ async addOrUpdateClimbs(userId: MUUID, parentId: MUUID, changes: ClimbChangeInputType[], sessionCtx?: ClientSession): Promise {
const session = sessionCtx ?? await this.areaModel.startSession()
- let ret: string[]
-
if (session.inTransaction()) {
- return await this._addOrUpdateClimbs(userId, session, parentId, changes)
+ return this._addOrUpdateClimbs(userId, session, parentId, changes)
} else {
- // withTransaction() doesn't return the callback result
- // see https://jira.mongodb.org/browse/NODE-2014
- await session.withTransaction(
- async (session) => {
- ret = await this._addOrUpdateClimbs(userId, session, parentId, changes)
- return ret
- })
+ return await withTransaction(session, () => this._addOrUpdateClimbs(userId, session, parentId, changes)) ?? []
}
-
- // @ts-expect-error
- return ret
}
/**
@@ -281,7 +272,7 @@ export default class MutableClimbDataSource extends ClimbDataSource {
* @param idListStr Array of climb IDs
* @returns number of climbs was deleted
*/
- async deleteClimbs (userId: MUUID, parentId: MUUID, idList: MUUID[]): Promise {
+ async deleteClimbs(userId: MUUID, parentId: MUUID, idList: MUUID[]): Promise {
const session = await this.areaModel.startSession()
let ret = 0
@@ -298,20 +289,20 @@ export default class MutableClimbDataSource extends ClimbDataSource {
}
// Remove climb IDs from parent.climbs[]
await this.areaModel.updateOne(
- { 'metadata.area_id': parentId },
+ {'metadata.area_id': parentId},
{
- $pullAll: { climbs: idList },
+ $pullAll: {climbs: idList},
$set: {
_change,
updatedBy: userId
}
},
- { session })
+ {session})
// Mark climbs delete
const filter = {
- _id: { $in: idList },
- _deleting: { $exists: false }
+ _id: {$in: idList},
+ _deleting: {$exists: false}
}
const rs = await this.climbModel.updateMany(
filter,
@@ -334,7 +325,7 @@ export default class MutableClimbDataSource extends ClimbDataSource {
static instance: MutableClimbDataSource
- static getInstance (): MutableClimbDataSource {
+ static getInstance(): MutableClimbDataSource {
if (MutableClimbDataSource.instance == null) {
// Why suppress TS error? See: https://github.com/GraphQLGuide/apollo-datasource-mongodb/issues/88
// @ts-expect-error
@@ -355,10 +346,10 @@ const updateStats = async (areaIdOrAreaCursor: MUUID | AreaDocumnent, session: C
if ((areaIdOrAreaCursor as AreaDocumnent).totalClimbs != null) {
area = areaIdOrAreaCursor as AreaDocumnent
} else {
- area = await getAreaModel().findOne({ 'metadata.area_id': areaIdOrAreaCursor as MUUID }).session(session).orFail()
+ area = await getAreaModel().findOne({'metadata.area_id': areaIdOrAreaCursor as MUUID}).session(session).orFail()
}
- await area.populate<{ climbs: ClimbType[] }>({ path: 'climbs', model: getClimbModel() })
+ await area.populate<{ climbs: ClimbType[] }>({path: 'climbs', model: getClimbModel()})
area.set({
totalClimbs: area.climbs.length,
aggregate: aggregateCragStats(area.toObject())
diff --git a/src/model/__tests__/AreaHistoryDataSource.ts b/src/model/__tests__/AreaHistoryDataSource.ts
index 171433b3..57eaa8c4 100644
--- a/src/model/__tests__/AreaHistoryDataSource.ts
+++ b/src/model/__tests__/AreaHistoryDataSource.ts
@@ -1,52 +1,36 @@
-import mongoose from 'mongoose'
-import { ChangeStream } from 'mongodb'
-import { jest } from '@jest/globals'
import muuid from 'uuid-mongodb'
import MutableAreaDataSource from '../MutableAreaDataSource.js'
-import { connectDB, createIndexes, getAreaModel } from '../../db/index.js'
-import streamListener from '../../db/edit/streamListener.js'
-import { logger } from '../../logger.js'
-import { changelogDataSource } from '../ChangeLogDataSource.js'
-import { OperationType } from '../../db/AreaTypes.js'
-
-jest.setTimeout(120000)
+import {changelogDataSource} from '../ChangeLogDataSource.js'
+import {OperationType} from '../../db/AreaTypes.js'
+import inMemoryDB from "../../utils/inMemoryDB.js";
+import waitForExpect from "wait-for-expect";
+import jest from "jest-mock";
describe('Area history', () => {
let areas: MutableAreaDataSource
- let stream: ChangeStream
+ let onChange: jest.Mock
const testUser = muuid.v4()
beforeAll(async () => {
- await connectDB()
-
- stream = await streamListener()
-
- try {
- await getAreaModel().collection.drop()
- await createIndexes()
- } catch (e) {
- logger.info('Expected exception')
- }
-
+ onChange = jest.fn()
+ await inMemoryDB.connect(onChange)
await changelogDataSource._testRemoveAll()
- areas = new MutableAreaDataSource(mongoose.connection.db.collection('areas'))
+ areas = MutableAreaDataSource.getInstance()
})
afterAll(async () => {
try {
- await stream.close()
- await mongoose.disconnect()
+ await inMemoryDB.close()
} catch (e) {
console.log('closing mongoose', e)
}
})
beforeEach(async () => {
- // await changelogDataSource._testRemoveAll()
- // eslint-disable-next-line
- await new Promise(res => setTimeout(res, 3000))
+ await changelogDataSource._testRemoveAll()
+ onChange.mockClear()
})
it('should create history records for new subareas', async () => {
@@ -55,16 +39,12 @@ describe('Area history', () => {
expect(newArea.area_name).toEqual(usa.area_name)
const or = await areas.addArea(testUser, 'oregon', usa.metadata.area_id)
- // eslint-disable-next-line
- await new Promise(res => setTimeout(res, 1000))
const nv = await areas.addArea(testUser, 'nevada', usa.metadata.area_id)
expect(nv?._id).toBeTruthy()
expect(or?._id).toBeTruthy()
- // eslint-disable-next-line
- await new Promise(res => setTimeout(res, 2000))
-
+ await waitForExpect(() => expect(onChange).toHaveBeenCalledTimes(5))
const areaHistory = await changelogDataSource.getAreaChangeSets()
expect(areaHistory).toHaveLength(2)
@@ -125,9 +105,7 @@ describe('Area history', () => {
await areas.setDestinationFlag(testUser, areaUuid, true)
await areas.setDestinationFlag(testUser, areaUuid, false)
- // eslint-disable-next-line
- await new Promise(res => setTimeout(res, 2000))
-
+ await waitForExpect(() => expect(onChange).toHaveBeenCalledTimes(5))
const changset = await changelogDataSource.getAreaChangeSets(areaUuid)
expect(changset).toHaveLength(3)
@@ -149,9 +127,7 @@ describe('Area history', () => {
await areas.deleteArea(testUser, leonidio.metadata.area_id)
- // eslint-disable-next-line
- await new Promise(res => setTimeout(res, 10000))
-
+ await waitForExpect(() => expect(onChange).toHaveBeenCalledTimes(5))
const history = await changelogDataSource.getAreaChangeSets(leonidio.metadata.area_id)
expect(history).toHaveLength(2)
@@ -181,9 +157,7 @@ describe('Area history', () => {
expect(deleted).toBeTruthy()
- // eslint-disable-next-line
- await new Promise(res => setTimeout(res, 3000))
-
+ await waitForExpect(() => expect(onChange).toHaveBeenCalledTimes(5))
const history = await changelogDataSource.getAreaChangeSets(spain.metadata.area_id)
// should only have 2 entries:
diff --git a/src/model/__tests__/ChangeLogDS.ts b/src/model/__tests__/ChangeLogDS.ts
index 4edfa5fb..6cf4e36a 100644
--- a/src/model/__tests__/ChangeLogDS.ts
+++ b/src/model/__tests__/ChangeLogDS.ts
@@ -1,20 +1,17 @@
-import mongoose from 'mongoose'
-import { jest } from '@jest/globals'
import muuid from 'uuid-mongodb'
-import { connectDB, getChangeLogModel, getAreaModel } from '../../db/index.js'
+import {getAreaModel, getChangeLogModel} from '../../db/index.js'
import ChangeLogDataSource from '../ChangeLogDataSource.js'
-import { OpType } from '../../db/ChangeLogType.js'
-import { OperationType } from '../../db/AreaTypes.js'
+import {OpType} from '../../db/ChangeLogType.js'
+import {OperationType} from '../../db/AreaTypes.js'
-import { logger } from '../../logger.js'
-
-jest.setTimeout(10000)
+import {logger} from '../../logger.js'
+import inMemoryDB from "../../utils/inMemoryDB.js";
describe('Area history', () => {
let changeLog: ChangeLogDataSource
beforeAll(async () => {
- await connectDB()
+ await inMemoryDB.connect()
try {
await getAreaModel().collection.drop()
@@ -23,14 +20,11 @@ describe('Area history', () => {
logger.info('Expected exception')
}
- changeLog = new ChangeLogDataSource(
- mongoose.connection.db.collection(
- getChangeLogModel().modelName)
- )
+ changeLog = ChangeLogDataSource.getInstance()
})
afterAll(async () => {
- await mongoose.connection.close()
+ await inMemoryDB.close()
})
it('should create a change record', async () => {
diff --git a/src/model/__tests__/MediaDataSource.ts b/src/model/__tests__/MediaDataSource.ts
index de567c3a..05f05fe3 100644
--- a/src/model/__tests__/MediaDataSource.ts
+++ b/src/model/__tests__/MediaDataSource.ts
@@ -1,13 +1,21 @@
import mongoose from 'mongoose'
-import muuid, { MUUID } from 'uuid-mongodb'
+import muuid, {MUUID} from 'uuid-mongodb'
import MutableMediaDataSource from '../MutableMediaDataSource.js'
import AreaDataSource from '../MutableAreaDataSource.js'
import ClimbDataSource from '../MutableClimbDataSource.js'
-import { connectDB, createIndexes } from '../../db/index.js'
-import { AreaType } from '../../db/AreaTypes.js'
-import { EntityTag, MediaObject, MediaObjectGQLInput, AddTagEntityInput, UserMediaQueryInput, UserMedia } from '../../db/MediaObjectTypes.js'
-import { newSportClimb1 } from './MutableClimbDataSource.js'
+import {createIndexes} from '../../db/index.js'
+import {AreaType} from '../../db/AreaTypes.js'
+import {
+ AddTagEntityInput,
+ EntityTag,
+ MediaObject,
+ MediaObjectGQLInput,
+ UserMedia,
+ UserMediaQueryInput
+} from '../../db/MediaObjectTypes.js'
+import {newSportClimb1} from './MutableClimbDataSource.js'
+import inMemoryDB from "../../utils/inMemoryDB.js";
const TEST_MEDIA: MediaObjectGQLInput = {
userUuid: 'a2eb6353-65d1-445f-912c-53c6301404bd',
@@ -34,7 +42,7 @@ describe('MediaDataSource', () => {
let testMediaObject: MediaObject
beforeAll(async () => {
- await connectDB()
+ await inMemoryDB.connect()
areas = AreaDataSource.getInstance()
climbs = ClimbDataSource.getInstance()
@@ -85,7 +93,7 @@ describe('MediaDataSource', () => {
})
afterAll(async () => {
- await mongoose.connection.close()
+ await inMemoryDB.close()
})
it('should not tag a nonexistent area', async () => {
@@ -133,7 +141,7 @@ describe('MediaDataSource', () => {
expect(mediaObjects[0].entityTags).toHaveLength(2)
// remove tag
- const res = await media.removeEntityTag({ mediaId: climbTag.mediaId, tagId: tag._id })
+ const res = await media.removeEntityTag({mediaId: climbTag.mediaId, tagId: tag._id})
expect(res).toBe(true)
// verify the number tags
@@ -194,7 +202,7 @@ describe('MediaDataSource', () => {
const rs = await media.addMediaObjects([{
...TEST_MEDIA,
mediaUrl: 'photo101.jpg',
- entityTag: { entityType: 0, entityId: climbIdForTagging.toUUID().toString() }
+ entityTag: {entityType: 0, entityId: climbIdForTagging.toUUID().toString()}
}
])
@@ -214,7 +222,7 @@ describe('MediaDataSource', () => {
*/
const newMediaListInput: MediaObjectGQLInput[] = []
for (let i = 0; i < 7; i = i + 1) {
- newMediaListInput.push({ ...MEDIA_TEMPLATE, mediaUrl: `/photo${i}.jpg` })
+ newMediaListInput.push({...MEDIA_TEMPLATE, mediaUrl: `/photo${i}.jpg`})
}
const expectedMedia = await media.addMediaObjects(newMediaListInput)
diff --git a/src/model/__tests__/MutableClimbDataSource.ts b/src/model/__tests__/MutableClimbDataSource.ts
index 58c73ff1..ad1bf83f 100644
--- a/src/model/__tests__/MutableClimbDataSource.ts
+++ b/src/model/__tests__/MutableClimbDataSource.ts
@@ -1,16 +1,16 @@
-import mongoose from 'mongoose'
import muid from 'uuid-mongodb'
-import { ChangeStream } from 'mongodb'
+import {ChangeStream} from 'mongodb'
import MutableClimbDataSource from '../MutableClimbDataSource.js'
import MutableAreaDataSource from '../MutableAreaDataSource.js'
-import { connectDB, createIndexes, getAreaModel, getClimbModel } from '../../db/index.js'
-import { logger } from '../../logger.js'
-import { ClimbType, ClimbChangeInputType } from '../../db/ClimbTypes.js'
-import { sanitizeDisciplines } from '../../GradeUtils.js'
+import {createIndexes, getAreaModel, getClimbModel} from '../../db/index.js'
+import {logger} from '../../logger.js'
+import {ClimbChangeInputType, ClimbType} from '../../db/ClimbTypes.js'
+import {sanitizeDisciplines} from '../../GradeUtils.js'
import streamListener from '../../db/edit/streamListener.js'
-import { changelogDataSource } from '../ChangeLogDataSource.js'
+import {changelogDataSource} from '../ChangeLogDataSource.js'
+import inMemoryDB from "../../utils/inMemoryDB.js";
export const newSportClimb1: ClimbChangeInputType = {
name: 'Cool route 1',
@@ -122,16 +122,16 @@ describe('Climb CRUD', () => {
pitches: [
{
pitchNumber: 1,
- grades: { uiaa: '7' },
- disciplines: { sport: true },
+ grades: {uiaa: '7'},
+ disciplines: {sport: true},
length: 30,
boltsCount: 5,
description: 'First pitch description'
},
{
pitchNumber: 2,
- grades: { uiaa: '6+' },
- disciplines: { sport: true },
+ grades: {uiaa: '6+'},
+ disciplines: {sport: true},
length: 40,
boltsCount: 6,
description: 'Second pitch description'
@@ -140,7 +140,7 @@ describe('Climb CRUD', () => {
}
beforeAll(async () => {
- await connectDB()
+ await inMemoryDB.connect()
stream = await streamListener()
try {
@@ -161,7 +161,7 @@ describe('Climb CRUD', () => {
afterAll(async () => {
try {
await stream.close()
- await mongoose.disconnect()
+ await inMemoryDB.close()
} catch (e) {
console.log('closing mongoose', e)
}
@@ -293,13 +293,13 @@ describe('Climb CRUD', () => {
const newIDs = await climbs.addOrUpdateClimbs(
testUser,
newBoulderingArea.metadata.area_id,
- [{ ...newBoulderProblem1, grade: 'V3' }, // good grade
- { ...newBoulderProblem2, grade: '5.9' }]) // invalid grade (YDS grade for a boulder problem)
+ [{...newBoulderProblem1, grade: 'V3'}, // good grade
+ {...newBoulderProblem2, grade: '5.9'}]) // invalid grade (YDS grade for a boulder problem)
expect(newIDs).toHaveLength(2)
const climb1 = await climbs.findOneClimbByMUUID(muid.from(newIDs[0]))
- expect(climb1?.grades).toEqual({ vscale: 'V3' })
+ expect(climb1?.grades).toEqual({vscale: 'V3'})
const climb2 = await climbs.findOneClimbByMUUID(muid.from(newIDs[1]))
expect(climb2?.grades).toEqual(undefined)
@@ -314,11 +314,11 @@ describe('Climb CRUD', () => {
if (newClimbingArea == null) fail('Expect new area to be created')
const newclimbs = [
- { ...newSportClimb1, grade: '17' }, // good sport grade
- { ...newSportClimb2, grade: '29/30', disciplines: { trad: true } }, // good trad and slash grade
- { ...newSportClimb2, grade: '5.9' }, // bad AU context grade
- { ...newIceRoute, grade: 'WI4+' }, // good WI AU context grade
- { ...newAidRoute, grade: 'A0' } // good aid grade
+ {...newSportClimb1, grade: '17'}, // good sport grade
+ {...newSportClimb2, grade: '29/30', disciplines: {trad: true}}, // good trad and slash grade
+ {...newSportClimb2, grade: '5.9'}, // bad AU context grade
+ {...newIceRoute, grade: 'WI4+'}, // good WI AU context grade
+ {...newAidRoute, grade: 'A0'} // good aid grade
]
const newIDs = await climbs.addOrUpdateClimbs(
@@ -329,12 +329,12 @@ describe('Climb CRUD', () => {
expect(newIDs).toHaveLength(newclimbs.length)
const climb1 = await climbs.findOneClimbByMUUID(muid.from(newIDs[0]))
- expect(climb1?.grades).toEqual({ ewbank: '17' })
+ expect(climb1?.grades).toEqual({ewbank: '17'})
expect(climb1?.type.sport).toBe(true)
expect(newSportClimb1?.boltsCount).toEqual(2)
const climb2 = await climbs.findOneClimbByMUUID(muid.from(newIDs[1]))
- expect(climb2?.grades).toEqual({ ewbank: '29/30' })
+ expect(climb2?.grades).toEqual({ewbank: '29/30'})
expect(climb2?.type.sport).toBe(false)
expect(climb2?.type.trad).toBe(true)
@@ -342,14 +342,14 @@ describe('Climb CRUD', () => {
expect(climb3?.grades).toEqual(undefined)
const climb4 = await climbs.findOneClimbByMUUID(muid.from(newIDs[3]))
- expect(climb4?.grades).toEqual({ wi: 'WI4+' })
+ expect(climb4?.grades).toEqual({wi: 'WI4+'})
expect(climb4?.type.sport).toBe(false)
expect(climb4?.type.trad).toBe(false)
expect(climb4?.type.bouldering).toBe(false)
expect(climb4?.type.ice).toBe(true)
const climb5 = await climbs.findOneClimbByMUUID(muid.from(newIDs[4]))
- expect(climb5?.grades).toEqual({ aid: 'A0' })
+ expect(climb5?.grades).toEqual({aid: 'A0'})
expect(climb5?.type.sport).toBe(false)
expect(climb5?.type.trad).toBe(false)
expect(climb5?.type.aid).toBe(true)
@@ -363,14 +363,14 @@ describe('Climb CRUD', () => {
const newIDs = await climbs.addOrUpdateClimbs(
testUser,
newBoulderingArea.metadata.area_id,
- [{ ...newBoulderProblem1, grade: 'V3' }, // good grade
- { ...newBoulderProblem2, grade: '23' }, // bad boulder grade
- { ...newBoulderProblem2, grade: '7B' }]) // invalid grade (font grade for a AU context boulder problem)
+ [{...newBoulderProblem1, grade: 'V3'}, // good grade
+ {...newBoulderProblem2, grade: '23'}, // bad boulder grade
+ {...newBoulderProblem2, grade: '7B'}]) // invalid grade (font grade for a AU context boulder problem)
expect(newIDs).toHaveLength(3)
const climb1 = await climbs.findOneClimbByMUUID(muid.from(newIDs[0]))
- expect(climb1?.grades).toEqual({ vscale: 'V3' })
+ expect(climb1?.grades).toEqual({vscale: 'V3'})
const climb2 = await climbs.findOneClimbByMUUID(muid.from(newIDs[1]))
expect(climb2?.grades).toEqual(undefined)
@@ -389,11 +389,11 @@ describe('Climb CRUD', () => {
if (newClimbingArea == null) fail('Expect new area to be created in Brazil')
const newclimbs = [
- { ...newSportClimb1, grade: 'VIsup' }, // good sport grade
- { ...newSportClimb2, grade: 'VIsup/VIIa', disciplines: { trad: true } }, // good trad and slash grade
- { ...newSportClimb2, grade: '5.9' }, // bad BRZ context grade
- { ...newIceRoute, grade: 'WI4+' }, // good WI BRZ context grade
- { ...newAidRoute, grade: 'A0' } // good aid grade
+ {...newSportClimb1, grade: 'VIsup'}, // good sport grade
+ {...newSportClimb2, grade: 'VIsup/VIIa', disciplines: {trad: true}}, // good trad and slash grade
+ {...newSportClimb2, grade: '5.9'}, // bad BRZ context grade
+ {...newIceRoute, grade: 'WI4+'}, // good WI BRZ context grade
+ {...newAidRoute, grade: 'A0'} // good aid grade
]
const newIDs = await climbs.addOrUpdateClimbs(
@@ -404,12 +404,12 @@ describe('Climb CRUD', () => {
expect(newIDs).toHaveLength(newclimbs.length)
const climb1 = await climbs.findOneClimbByMUUID(muid.from(newIDs[0]))
- expect(climb1?.grades).toEqual({ brazilian_crux: 'VIsup' })
+ expect(climb1?.grades).toEqual({brazilian_crux: 'VIsup'})
expect(climb1?.type.sport).toBe(true)
expect(newSportClimb1?.boltsCount).toEqual(2)
const climb2 = await climbs.findOneClimbByMUUID(muid.from(newIDs[1]))
- expect(climb2?.grades).toEqual({ brazilian_crux: 'VIsup/VIIa' })
+ expect(climb2?.grades).toEqual({brazilian_crux: 'VIsup/VIIa'})
expect(climb2?.type.sport).toBe(false)
expect(climb2?.type.trad).toBe(true)
@@ -417,14 +417,14 @@ describe('Climb CRUD', () => {
expect(climb3?.grades).toEqual(undefined)
const climb4 = await climbs.findOneClimbByMUUID(muid.from(newIDs[3]))
- expect(climb4?.grades).toEqual({ wi: 'WI4+' })
+ expect(climb4?.grades).toEqual({wi: 'WI4+'})
expect(climb4?.type.sport).toBe(false)
expect(climb4?.type.trad).toBe(false)
expect(climb4?.type.bouldering).toBe(false)
expect(climb4?.type.ice).toBe(true)
const climb5 = await climbs.findOneClimbByMUUID(muid.from(newIDs[4]))
- expect(climb5?.grades).toEqual({ aid: 'A0' })
+ expect(climb5?.grades).toEqual({aid: 'A0'})
expect(climb5?.type.sport).toBe(false)
expect(climb5?.type.trad).toBe(false)
expect(climb5?.type.aid).toBe(true)
@@ -438,14 +438,14 @@ describe('Climb CRUD', () => {
const newIDs = await climbs.addOrUpdateClimbs(
testUser,
newBoulderingArea.metadata.area_id,
- [{ ...newBoulderProblem1, grade: 'V3' }, // good grade
- { ...newBoulderProblem2, grade: '23' }, // bad boulder grade
- { ...newBoulderProblem2, grade: '7B' }]) // invalid grade (font grade for a BRZ context boulder problem)
+ [{...newBoulderProblem1, grade: 'V3'}, // good grade
+ {...newBoulderProblem2, grade: '23'}, // bad boulder grade
+ {...newBoulderProblem2, grade: '7B'}]) // invalid grade (font grade for a BRZ context boulder problem)
expect(newIDs).toHaveLength(3)
const climb1 = await climbs.findOneClimbByMUUID(muid.from(newIDs[0]))
- expect(climb1?.grades).toEqual({ vscale: 'V3' })
+ expect(climb1?.grades).toEqual({vscale: 'V3'})
const climb2 = await climbs.findOneClimbByMUUID(muid.from(newIDs[1]))
expect(climb2?.grades).toEqual(undefined)
@@ -465,21 +465,21 @@ describe('Climb CRUD', () => {
const newIDs = await climbs.addOrUpdateClimbs(
testUser,
newClimbingArea.metadata.area_id,
- [{ ...newSportClimb1, grade: '6+' }, // good UIAA grade
- { ...newSportClimb2, grade: '7-' }, // good UIAA grade
- { ...newSportClimb2, grade: '5' }, // good UIAA grade
- { ...newSportClimb1, grade: 'V6' }]) // bad UIAA grade (V-scale used)
+ [{...newSportClimb1, grade: '6+'}, // good UIAA grade
+ {...newSportClimb2, grade: '7-'}, // good UIAA grade
+ {...newSportClimb2, grade: '5'}, // good UIAA grade
+ {...newSportClimb1, grade: 'V6'}]) // bad UIAA grade (V-scale used)
expect(newIDs).toHaveLength(4)
const climb1 = await climbs.findOneClimbByMUUID(muid.from(newIDs[0]))
- expect(climb1?.grades).toEqual({ uiaa: '6+' })
+ expect(climb1?.grades).toEqual({uiaa: '6+'})
const climb2 = await climbs.findOneClimbByMUUID(muid.from(newIDs[1]))
- expect(climb2?.grades).toEqual({ uiaa: '7-' })
+ expect(climb2?.grades).toEqual({uiaa: '7-'})
const climb3 = await climbs.findOneClimbByMUUID(muid.from(newIDs[2]))
- expect(climb3?.grades).toEqual({ uiaa: '5' })
+ expect(climb3?.grades).toEqual({uiaa: '5'})
const climb4 = await climbs.findOneClimbByMUUID(muid.from(newIDs[3]))
expect(climb4?.grades).toEqual(undefined)
@@ -510,7 +510,7 @@ describe('Climb CRUD', () => {
id: newIDs[0],
name: 'new name A100',
grade: '6b',
- disciplines: sanitizeDisciplines({ bouldering: true })
+ disciplines: sanitizeDisciplines({bouldering: true})
},
{
id: newIDs[1],
@@ -652,8 +652,8 @@ describe('Climb CRUD', () => {
id: originalPitch1ID,
parentId: originalPitch1ParentID,
pitchNumber: 1,
- grades: { ewbank: '19' },
- disciplines: { sport: false, alpine: true },
+ grades: {ewbank: '19'},
+ disciplines: {sport: false, alpine: true},
length: 20,
boltsCount: 6,
description: 'Updated first pitch description'
@@ -663,8 +663,8 @@ describe('Climb CRUD', () => {
id: originalPitch2ID,
parentId: originalPitch2ParentID,
pitchNumber: 2,
- grades: { ewbank: '18' },
- disciplines: { sport: false, alpine: true },
+ grades: {ewbank: '18'},
+ disciplines: {sport: false, alpine: true},
length: 25,
boltsCount: 5,
description: 'Updated second pitch description'
diff --git a/src/model/__tests__/MutableOrganizationDataSource.ts b/src/model/__tests__/MutableOrganizationDataSource.ts
index e2f06cca..f951e5d7 100644
--- a/src/model/__tests__/MutableOrganizationDataSource.ts
+++ b/src/model/__tests__/MutableOrganizationDataSource.ts
@@ -1,12 +1,12 @@
-import mongoose from 'mongoose'
import muuid from 'uuid-mongodb'
import MutableOrganizationDataSource from '../MutableOrganizationDataSource.js'
import MutableAreaDataSource from '../MutableAreaDataSource.js'
-import { connectDB, createIndexes, getAreaModel, getOrganizationModel } from '../../db/index.js'
-import { OrganizationEditableFieldsType, OrgType } from '../../db/OrganizationTypes.js'
-import { AreaType } from '../../db/AreaTypes.js'
-import { muuidToString } from '../../utils/helpers.js'
+import {createIndexes, getAreaModel, getOrganizationModel} from '../../db/index.js'
+import {OrganizationEditableFieldsType, OrgType} from '../../db/OrganizationTypes.js'
+import {AreaType} from '../../db/AreaTypes.js'
+import {muuidToString} from '../../utils/helpers.js'
+import inMemoryDB from "../../utils/inMemoryDB.js";
describe('Organization', () => {
let organizations: MutableOrganizationDataSource
@@ -19,7 +19,7 @@ describe('Organization', () => {
const testUser = muuid.v4()
beforeAll(async () => {
- await connectDB()
+ await inMemoryDB.connect()
try { // Use the same fixed areas for testing so no need to drop and re-create on each test.
await getAreaModel().collection.drop()
} catch (e) {
@@ -57,12 +57,12 @@ describe('Organization', () => {
})
afterAll(async () => {
- await mongoose.connection.close()
+ await inMemoryDB.close()
})
it('should successfully create a document when passed valid input', async () => {
const newOrg = await organizations.addOrganization(testUser, OrgType.localClimbingOrganization, fullOrg)
- const document = { ...fullOrg }
+ const document = {...fullOrg}
expect(newOrg.displayName).toBe(document.displayName)
expect(newOrg.content?.website).toBe(document.website)
expect(newOrg.content?.email).toBe(document.email)
@@ -82,7 +82,12 @@ describe('Organization', () => {
it('should retrieve documents based on displayName', async () => {
const newOrg = await organizations.addOrganization(testUser, OrgType.localClimbingOrganization, fullOrg)
// Match should be case-insensitive.
- const displayNameSearchCursor = await organizations.findOrganizationsByFilter({ displayName: { match: 'openbeta', exactMatch: false } })
+ const displayNameSearchCursor = await organizations.findOrganizationsByFilter({
+ displayName: {
+ match: 'openbeta',
+ exactMatch: false
+ }
+ })
const displayNameSearchRes = await displayNameSearchCursor.toArray()
expect(displayNameSearchRes).toHaveLength(1)
expect(displayNameSearchRes[0]._id).toEqual(newOrg._id)
@@ -94,7 +99,7 @@ describe('Organization', () => {
associatedAreaIds: [ca.metadata.area_id, wa.metadata.area_id]
}
await organizations.updateOrganization(testUser, newOrg.orgId, document)
- const areaIdSearchCursor = await organizations.findOrganizationsByFilter({ associatedAreaIds: { includes: [ca.metadata.area_id] } })
+ const areaIdSearchCursor = await organizations.findOrganizationsByFilter({associatedAreaIds: {includes: [ca.metadata.area_id]}})
const areaIdSearchRes = await areaIdSearchCursor.toArray()
expect(areaIdSearchRes).toHaveLength(1)
expect(areaIdSearchRes[0]._id).toEqual(newOrg._id)
@@ -103,15 +108,17 @@ describe('Organization', () => {
describe('update', () => {
it('should succeed on valid input', async () => {
const newOrg = await organizations.addOrganization(testUser, OrgType.localClimbingOrganization, emptyOrg)
- const document = { ...fullOrg }
+ const document = {...fullOrg}
const updatedOrg = await organizations.updateOrganization(testUser, newOrg.orgId, document)
expect(updatedOrg).toBeDefined()
- if (updatedOrg == null) { fail('should not reach here.') }
+ if (updatedOrg == null) {
+ fail('should not reach here.')
+ }
expect(updatedOrg.associatedAreaIds.map(muuidToString).sort())
- .toStrictEqual(document?.associatedAreaIds?.map(muuidToString).sort())
+ .toStrictEqual(document?.associatedAreaIds?.map(muuidToString).sort())
expect(updatedOrg.excludedAreaIds.map(muuidToString).sort())
- .toStrictEqual(document?.excludedAreaIds?.map(muuidToString).sort())
+ .toStrictEqual(document?.excludedAreaIds?.map(muuidToString).sort())
expect(updatedOrg.displayName).toBe(document.displayName)
expect(updatedOrg.content?.website).toBe(document.website)
expect(updatedOrg.content?.email).toBe(document.email)
@@ -132,8 +139,8 @@ describe('Organization', () => {
associatedAreaIds: [muuid.v4()]
}
await expect(organizations.updateOrganization(testUser, newOrg.orgId, document))
- .rejects
- .toThrow(/Organization update error. Reason: Associated areas not found: /)
+ .rejects
+ .toThrow(/Organization update error. Reason: Associated areas not found: /)
})
})
})
diff --git a/src/model/__tests__/UserDataSource.ts b/src/model/__tests__/UserDataSource.ts
index 2921b247..e460ac4b 100644
--- a/src/model/__tests__/UserDataSource.ts
+++ b/src/model/__tests__/UserDataSource.ts
@@ -1,16 +1,17 @@
import mongoose from 'mongoose'
import muuid from 'uuid-mongodb'
-import { jest } from '@jest/globals'
+import {jest} from '@jest/globals'
-import { connectDB, getUserModel } from '../../db/index.js'
+import {getUserModel} from '../../db/index.js'
import UserDataSource from '../UserDataSource.js'
-import { UpdateProfileGQLInput } from '../../db/UserTypes.js'
+import {UpdateProfileGQLInput} from '../../db/UserTypes.js'
+import inMemoryDB from "../../utils/inMemoryDB.js";
describe('UserDataSource', () => {
let users: UserDataSource
beforeAll(async () => {
- await connectDB()
+ await inMemoryDB.connect()
const userModel = getUserModel()
try {
await userModel.collection.drop()
@@ -22,7 +23,7 @@ describe('UserDataSource', () => {
})
afterAll(async () => {
- await mongoose.connection.close()
+ await inMemoryDB.close()
})
afterEach(() => {
@@ -85,7 +86,7 @@ describe('UserDataSource', () => {
expect(u2?._id.toUUID().toString()).toEqual(input.userUuid)
// should allow website as an empty string to clear existing value
- await users.createOrUpdateUserProfile(updater, { userUuid: input.userUuid, website: '' })
+ await users.createOrUpdateUserProfile(updater, {userUuid: input.userUuid, website: ''})
u2 = await users.getUserPublicProfile(username)
@@ -143,8 +144,8 @@ describe('UserDataSource', () => {
await users.createOrUpdateUserProfile(updater, input)
jest
- .spyOn(UserDataSource, 'calculateLastUpdatedInDays')
- .mockImplementation(() => 14)
+ .spyOn(UserDataSource, 'calculateLastUpdatedInDays')
+ .mockImplementation(() => 14)
const newInput: UpdateProfileGQLInput = {
userUuid: input.userUuid,
diff --git a/src/model/__tests__/ticks.ts b/src/model/__tests__/ticks.ts
index 5325f34c..7da08d59 100644
--- a/src/model/__tests__/ticks.ts
+++ b/src/model/__tests__/ticks.ts
@@ -1,11 +1,11 @@
-import mongoose from 'mongoose'
-import { produce } from 'immer'
+import {produce} from 'immer'
import TickDataSource from '../TickDataSource.js'
-import { connectDB, getTickModel, getUserModel } from '../../db/index.js'
-import { TickInput } from '../../db/TickTypes.js'
+import {getTickModel, getUserModel} from '../../db/index.js'
+import {TickInput} from '../../db/TickTypes.js'
import muuid from 'uuid-mongodb'
import UserDataSource from '../UserDataSource.js'
-import { UpdateProfileGQLInput } from '../../db/UserTypes.js'
+import {UpdateProfileGQLInput} from '../../db/UserTypes.js'
+import inMemoryDB from "../../utils/inMemoryDB.js";
const userId = muuid.v4()
@@ -51,7 +51,7 @@ describe('Ticks', () => {
beforeAll(async () => {
console.log('#BeforeAll Ticks')
- await connectDB()
+ await inMemoryDB.connect()
try {
await getTickModel().collection.drop()
@@ -65,7 +65,7 @@ describe('Ticks', () => {
})
afterAll(async () => {
- await mongoose.connection.close()
+ await inMemoryDB.close()
})
afterEach(async () => {
@@ -76,7 +76,7 @@ describe('Ticks', () => {
// test adding tick
it('should create a new tick for the associated climb', async () => {
const tick = await ticks.addTick(toTest)
- const newTick = await tickModel.findOne({ userId: toTest.userId })
+ const newTick = await tickModel.findOne({userId: toTest.userId})
expect(newTick?._id).toEqual(tick._id)
})
@@ -87,7 +87,7 @@ describe('Ticks', () => {
if (tick == null) {
fail('Tick should not be null')
}
- const newTick = await ticks.editTick({ _id: tick._id }, tickUpdate)
+ const newTick = await ticks.editTick({_id: tick._id}, tickUpdate)
if (newTick == null) {
fail('The new tick should not be null')
@@ -106,7 +106,7 @@ describe('Ticks', () => {
}
await ticks.deleteTick(tick._id)
- const newTick = await tickModel.findOne({ _id: tick._id })
+ const newTick = await tickModel.findOne({_id: tick._id})
expect(newTick).toBeNull()
})
@@ -120,13 +120,13 @@ describe('Ticks', () => {
}
expect(newTicks?.length).toEqual(testImport.length)
- const tick1 = await tickModel.findOne({ _id: newTicks[0]._id })
+ const tick1 = await tickModel.findOne({_id: newTicks[0]._id})
expect(tick1?._id).toEqual(newTicks[0]._id)
- const tick2 = await tickModel.findOne({ _id: newTicks[1]._id })
+ const tick2 = await tickModel.findOne({_id: newTicks[1]._id})
expect(tick2?._id).toEqual(newTicks[1]._id)
- const tick3 = await tickModel.findOne({ _id: newTicks[2]._id })
+ const tick3 = await tickModel.findOne({_id: newTicks[2]._id})
expect(tick3?._id).toEqual(newTicks[2]._id)
})
@@ -143,7 +143,7 @@ describe('Ticks', () => {
fail('Should add a new tick')
}
- const newTicks = await ticks.ticksByUser({ userId })
+ const newTicks = await ticks.ticksByUser({userId})
expect(newTicks.length).toEqual(1)
})
@@ -168,7 +168,7 @@ describe('Ticks', () => {
}
await ticks.deleteAllTicks(userId.toUUID().toString())
- const newTick = await tickModel.findOne({ userId })
+ const newTick = await tickModel.findOne({userId})
expect(newTick).toBeNull()
})
@@ -181,7 +181,7 @@ describe('Ticks', () => {
}
await ticks.deleteImportedTicks(userId.toUUID().toString())
- const newTick = await tickModel.findOne({ _id: OBTick._id })
+ const newTick = await tickModel.findOne({_id: OBTick._id})
expect(newTick?._id).toEqual(OBTick._id)
expect(newTick?.notes).toEqual('Not sandbagged')
})
diff --git a/src/model/__tests__/updateAreas.ts b/src/model/__tests__/updateAreas.ts
index 82b7cadb..e5a1d611 100644
--- a/src/model/__tests__/updateAreas.ts
+++ b/src/model/__tests__/updateAreas.ts
@@ -1,11 +1,11 @@
-import mongoose from 'mongoose'
import muuid from 'uuid-mongodb'
-import { geometry } from '@turf/helpers'
+import {geometry} from '@turf/helpers'
import MutableAreaDataSource from '../MutableAreaDataSource.js'
import MutableClimbDataSource from '../MutableClimbDataSource.js'
-import { connectDB, createIndexes, getAreaModel, getClimbModel } from '../../db/index.js'
-import { AreaEditableFieldsType, UpdateSortingOrderType } from '../../db/AreaTypes.js'
+import {createIndexes, getAreaModel, getClimbModel} from '../../db/index.js'
+import {AreaEditableFieldsType, UpdateSortingOrderType} from '../../db/AreaTypes.js'
+import inMemoryDB from "../../utils/inMemoryDB.js";
describe('Areas', () => {
let areas: MutableAreaDataSource
@@ -13,7 +13,7 @@ describe('Areas', () => {
const testUser = muuid.v4()
beforeAll(async () => {
- await connectDB()
+ await inMemoryDB.connect()
try {
await getAreaModel().collection.drop()
@@ -27,7 +27,7 @@ describe('Areas', () => {
})
afterAll(async () => {
- await mongoose.connection.close()
+ await inMemoryDB.close()
})
it('should create a country by Alpha-3 country code', async () => {
@@ -71,17 +71,17 @@ describe('Areas', () => {
// Verify paths and ancestors
if (theBug != null) { // make TS happy
expect(theBug.ancestors)
- .toEqual(`${canada.metadata.area_id.toUUID().toString()},${theBug?.metadata.area_id.toUUID().toString()}`)
+ .toEqual(`${canada.metadata.area_id.toUUID().toString()},${theBug?.metadata.area_id.toUUID().toString()}`)
expect(theBug.pathTokens)
- .toEqual([canada.area_name, theBug.area_name])
+ .toEqual([canada.area_name, theBug.area_name])
}
})
it('should allow adding child areas to empty leaf area', async () => {
let parent = await areas.addArea(testUser, 'My house', null, 'can')
- await areas.updateArea(testUser, parent.metadata.area_id, { isLeaf: true, isBoulder: true })
+ await areas.updateArea(testUser, parent.metadata.area_id, {isLeaf: true, isBoulder: true})
- const newClimb = await climbs.addOrUpdateClimbs(testUser, parent.metadata.area_id, [{ name: 'Big Mac' }])
+ const newClimb = await climbs.addOrUpdateClimbs(testUser, parent.metadata.area_id, [{name: 'Big Mac'}])
// Try to add a new area when there's already a climb
await expect(areas.addArea(testUser, 'Kitchen', parent.metadata.area_id)).rejects.toThrow(/Adding new areas to a leaf or boulder area is not allowed/)
@@ -155,12 +155,12 @@ describe('Areas', () => {
it('should not update country name and code', async () => {
const country = await areas.addCountry('lao')
if (country == null) fail()
- await expect(areas.updateArea(testUser, country.metadata.area_id, { areaName: 'Foo' })).rejects.toThrowError()
+ await expect(areas.updateArea(testUser, country.metadata.area_id, {areaName: 'Foo'})).rejects.toThrowError()
// eslint-disable-next-line
await new Promise(res => setTimeout(res, 2000))
- await expect(areas.updateArea(testUser, country.metadata.area_id, { shortCode: 'Foo' })).rejects.toThrowError()
+ await expect(areas.updateArea(testUser, country.metadata.area_id, {shortCode: 'Foo'})).rejects.toThrowError()
})
it('should delete a subarea', async () => {
@@ -231,23 +231,23 @@ describe('Areas', () => {
const fr = await areas.addCountry('fra')
await areas.addArea(testUser, 'Verdon Gorge', fr.metadata.area_id)
await expect(areas.addArea(testUser, 'Verdon Gorge', fr.metadata.area_id))
- .rejects.toThrowError('E11000 duplicate key error')
+ .rejects.toThrowError('E11000 duplicate key error')
})
it('should fail when adding without a parent country', async () => {
await expect(areas.addArea(testUser, 'Peak District ', null, 'GB'))
- .rejects.toThrowError()
+ .rejects.toThrowError()
})
it('should fail when adding with a non-existent parent id', async () => {
const notInDb = muuid.from('abf6cb8b-8461-45c3-b46b-5997444be867')
await expect(areas.addArea(testUser, 'Land\'s End ', notInDb))
- .rejects.toThrowError()
+ .rejects.toThrowError()
})
it('should fail when adding with null parents', async () => {
await expect(areas.addArea(testUser, 'Land\'s End ', null, '1q1'))
- .rejects.toThrowError()
+ .rejects.toThrowError()
})
it('should update areas sorting order', async () => {
diff --git a/src/server.ts b/src/server.ts
index 05fe35f1..be737729 100644
--- a/src/server.ts
+++ b/src/server.ts
@@ -1,29 +1,29 @@
-import { ApolloServer } from 'apollo-server-express'
+import {ApolloServer} from 'apollo-server-express'
import mongoose from 'mongoose'
-import { applyMiddleware } from 'graphql-middleware'
-import { graphqlSchema } from './graphql/resolvers.js'
+import {applyMiddleware} from 'graphql-middleware'
+import {graphqlSchema} from './graphql/resolvers.js'
import MutableAreaDataSource from './model/MutableAreaDataSource.js'
import ChangeLogDataSource from './model/ChangeLogDataSource.js'
import MutableMediaDataSource from './model/MutableMediaDataSource.js'
import MutableClimbDataSource from './model/MutableClimbDataSource.js'
import TickDataSource from './model/TickDataSource.js'
-import { authMiddleware, createContext } from './auth/middleware.js'
+import {authMiddleware, createContext} from './auth/middleware.js'
import permissions from './auth/permissions.js'
-import { localDevBypassAuthContext, localDevBypassAuthMiddleware } from './auth/local-dev/middleware.js'
+import {localDevBypassAuthContext, localDevBypassAuthMiddleware} from './auth/local-dev/middleware.js'
import localDevBypassAuthPermissions from './auth/local-dev/permissions.js'
import XMediaDataSource from './model/XMediaDataSource.js'
import PostDataSource from './model/PostDataSource.js'
import MutableOrgDS from './model/MutableOrganizationDataSource.js'
-import type { Context } from './types.js'
-import type { DataSources } from 'apollo-server-core/dist/graphqlOptions'
+import type {Context} from './types.js'
+import type {DataSources} from 'apollo-server-core/dist/graphqlOptions'
import UserDataSource from './model/UserDataSource.js'
import express from 'express'
import * as http from 'http'
import bodyParser from 'body-parser'
-import { importJsonRequestHandler } from './db/import/json/request-handler.js'
+import {importJsonRequestHandler} from './db/import/json/request-handler.js'
-export async function startServer (port = 4000): Promise {
+export async function createServer(): Promise<{ app: express.Application, server: ApolloServer }> {
const schema = applyMiddleware(
graphqlSchema,
(process.env.LOCAL_DEV_BYPASS_AUTH === 'true' ? localDevBypassAuthPermissions : permissions).generate(graphqlSchema)
@@ -45,7 +45,6 @@ export async function startServer (port = 4000): Promise {
})
const app = express()
- const httpServer = http.createServer(app)
const server = new ApolloServer({
introspection: true,
@@ -54,17 +53,31 @@ export async function startServer (port = 4000): Promise {
dataSources,
cache: 'bounded'
})
- app.post('/import/json', [
+ // server must be started before applying middleware
+ await server.start()
+
+ app.post('/import', [
process.env.LOCAL_DEV_BYPASS_AUTH === 'true' ? localDevBypassAuthMiddleware : authMiddleware,
bodyParser.json(),
importJsonRequestHandler
])
+ server.applyMiddleware({app, path: '/'})
- await server.start()
- server.applyMiddleware({ app, path: '/' })
+ return {app, server}
+}
- await new Promise((resolve) => httpServer.listen({ port }, resolve))
- console.log(`🚀 Server ready at http://localhost:4000${server.graphqlPath}`)
+export async function startServer({app, server, port = 4000}: {
+ app: express.Application,
+ server: ApolloServer,
+ port?: number
+}) {
+ const httpServer = http.createServer(app)
- return server
-}
+ httpServer.on('error', (e) => {
+ console.error('Error starting server', e)
+ throw e
+ })
+
+ await new Promise((resolve) => httpServer.listen({port}, resolve))
+ console.log(`🚀 Server ready at http://localhost:${port}${server.graphqlPath}`)
+}
\ No newline at end of file
diff --git a/src/utils/helpers.ts b/src/utils/helpers.ts
index a6cc09e7..bee0c6ac 100644
--- a/src/utils/helpers.ts
+++ b/src/utils/helpers.ts
@@ -1,5 +1,6 @@
-import { MUUID } from 'uuid-mongodb'
-import { Point } from '@turf/helpers'
+import {MUUID} from 'uuid-mongodb'
+import {Point} from '@turf/helpers'
+import {ClientSession} from "mongoose";
export const muuidToString = (m: MUUID): string => m.toUUID().toString()
@@ -15,7 +16,7 @@ export const muuidToString = (m: MUUID): string => m.toUUID().toString()
* }
* @param _value
*/
-export function exhaustiveCheck (_value: never): never {
+export function exhaustiveCheck(_value: never): never {
throw new Error(`ERROR! Enum not handled for ${JSON.stringify(_value)}`)
}
@@ -24,3 +25,15 @@ export const geojsonPointToLatitude = (point?: Point): number | undefined => poi
export const NON_ALPHANUMERIC_REGEX = /[\W_\s]+/g
export const canonicalizeUsername = (username: string): string => username.replaceAll(NON_ALPHANUMERIC_REGEX, '')
+
+
+// withTransaction() doesn't return the callback result
+// see https://jira.mongodb.org/browse/NODE-2014
+export const withTransaction = async (session: ClientSession, closure: () => Promise): Promise => {
+ let result: T | undefined;
+ await session.withTransaction(async () => {
+ result = await closure();
+ return result;
+ });
+ return result;
+};
\ No newline at end of file
diff --git a/src/utils/inMemoryDB.ts b/src/utils/inMemoryDB.ts
index 567bdd46..a7790556 100644
--- a/src/utils/inMemoryDB.ts
+++ b/src/utils/inMemoryDB.ts
@@ -1,8 +1,9 @@
-import mongoose, { ConnectOptions } from 'mongoose'
-import { ChangeStream, MongoClient } from 'mongodb'
-import { MongoMemoryReplSet } from 'mongodb-memory-server'
-import { defaultPostConnect, checkVar } from '../db/index.js'
-import { logger } from '../logger.js'
+import mongoose, {ConnectOptions} from 'mongoose'
+import {ChangeStream, ChangeStreamDocument, MongoClient} from 'mongodb'
+import {MongoMemoryReplSet} from 'mongodb-memory-server'
+import {checkVar, defaultPostConnect} from '../db/index.js'
+import {logger} from '../logger.js'
+import {testStreamListener} from "../db/edit/streamListener";
/**
* In-memory Mongo replset used for testing.
@@ -10,15 +11,15 @@ import { logger } from '../logger.js'
* Need a replset to faciliate transactions.
*/
let mongod: MongoMemoryReplSet
-let stream: ChangeStream
+let stream: ChangeStream | undefined
/**
* Connect to the in-memory database.
*/
-const connect = async (): Promise => {
+export const connect = async (onChange?: (change: ChangeStreamDocument) => void): Promise => {
mongod = await MongoMemoryReplSet.create({
// Stream listener listens on DB denoted by 'MONGO_DBNAME' env var.
- replSet: { count: 1, storageEngine: 'wiredTiger', dbName: checkVar('MONGO_DBNAME') }
+ replSet: {count: 1, storageEngine: 'wiredTiger', dbName: checkVar('MONGO_DBNAME')}
})
const uri = await mongod.getUri(checkVar('MONGO_DBNAME'))
logger.info(`Connecting to in-memory database ${uri}`)
@@ -27,14 +28,14 @@ const connect = async (): Promise => {
}
await mongoose.connect(uri, mongooseOpts)
- stream = await defaultPostConnect()
+ stream = await defaultPostConnect(() => testStreamListener(onChange))
}
/**
* Drop database, close the connection and stop mongod.
*/
-const close = async (): Promise => {
- await stream.close()
+export const close = async (): Promise => {
+ await stream?.close()
await mongoose.connection.dropDatabase()
await mongoose.connection.close()
await mongod.stop()
@@ -43,7 +44,7 @@ const close = async (): Promise => {
/**
* Remove all the data for all db collections.
*/
-const clear = async (): Promise => {
+export const clear = async (): Promise => {
const collections = mongoose.connection.collections
for (const key in collections) {
@@ -79,4 +80,4 @@ export interface InMemoryDB {
insertDirectly: (collection: string, documents: any[]) => Promise
}
-export default { connect, close, clear, insertDirectly }
+export default {connect, close, clear, insertDirectly, stream}
diff --git a/src/utils/testUtils.ts b/src/utils/testUtils.ts
index 00bb170b..582de409 100644
--- a/src/utils/testUtils.ts
+++ b/src/utils/testUtils.ts
@@ -1,10 +1,11 @@
import jwt from 'jsonwebtoken'
-import { jest } from '@jest/globals'
+import {jest} from '@jest/globals'
import request from 'supertest'
+import type {InMemoryDB} from './inMemoryDB.js'
import inMemoryDB from './inMemoryDB.js'
-import type { InMemoryDB } from './inMemoryDB.js'
-import { startServer } from '../server.js'
-import { ApolloServer } from 'apollo-server-express'
+import {createServer} from '../server.js'
+import {ApolloServer} from 'apollo-server-express'
+import express from "express";
const PORT = 4000
@@ -15,41 +16,50 @@ interface QueryAPIProps {
userUuid: string
roles?: string[]
port?: number
+ app?: express.Application
}
/*
* Helper function for querying the locally-served API. It mocks JWT verification
* so we can pretend to have an role we want when calling the API.
*/
-export const queryAPI = async ({ query, operationName, variables, userUuid, roles = [], port = PORT }: QueryAPIProps): Promise => {
+export const queryAPI = async ({
+ query,
+ operationName,
+ variables,
+ userUuid,
+ roles = [],
+ app,
+ port = PORT
+ }: QueryAPIProps): Promise => {
// Avoid needing to pass in actual signed tokens.
const jwtSpy = jest.spyOn(jwt, 'verify')
jwtSpy.mockImplementation(() => {
return {
- // Roles defined at https://manage.auth0.com/dashboard/us/dev-fmjy7n5n/roles
+ // Roles defined at https://manage.auth0.com/dashboard/us/dev-fmjy7n5n/roles
'https://tacos.openbeta.io/roles': roles,
'https://tacos.openbeta.io/uuid': userUuid
}
})
- const queryObj = { query, operationName, variables }
- const response = await request(`http://localhost:${port}`)
- .post('/')
- .send(queryObj)
- .set('Authorization', 'Bearer placeholder-jwt-see-SpyOn')
-
- return response
+ const queryObj = {query, operationName, variables}
+ return request(app ?? `http://localhost:${port}`)
+ .post('/')
+ .send(queryObj)
+ .set('Authorization', 'Bearer placeholder-jwt-see-SpyOn');
}
export interface SetUpServerReturnType {
server: ApolloServer
+ app: express.Application
inMemoryDB: InMemoryDB
}
+
/*
* Starts Apollo server and has Mongo inMemory replset connect to it.
*/
-export const setUpServer = async (port = PORT): Promise => {
+export const setUpServer = async (): Promise => {
await inMemoryDB.connect()
- const server = await startServer(port)
- return { server, inMemoryDB }
+ const {app, server} = await createServer()
+ return {app, server, inMemoryDB}
}
diff --git a/yarn.lock b/yarn.lock
index 19333e5c..a0cfeaad 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -7667,6 +7667,11 @@ vary@^1, vary@~1.1.2:
resolved "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz"
integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==
+wait-for-expect@^3.0.2:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/wait-for-expect/-/wait-for-expect-3.0.2.tgz#d2f14b2f7b778c9b82144109c8fa89ceaadaa463"
+ integrity sha512-cfS1+DZxuav1aBYbaO/kE06EOS8yRw7qOFoD3XtjTkYvCvh3zUvNST8DXK/nPaeqIzIv3P3kL3lRJn8iwOiSag==
+
walker@^1.0.8:
version "1.0.8"
resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f"