diff --git a/src/__tests__/areas.ts b/src/__tests__/areas.ts index 602c177..995b3cc 100644 --- a/src/__tests__/areas.ts +++ b/src/__tests__/areas.ts @@ -1,14 +1,14 @@ -import {ApolloServer} from 'apollo-server-express' +import { ApolloServer } from 'apollo-server-express' import muuid from 'uuid-mongodb' -import {jest} from '@jest/globals' +import { jest } from '@jest/globals' import MutableAreaDataSource from '../model/MutableAreaDataSource.js' import MutableOrganizationDataSource from '../model/MutableOrganizationDataSource.js' -import {AreaType} from '../db/AreaTypes.js' -import {OrganizationEditableFieldsType, OrganizationType, OrgType} from '../db/OrganizationTypes.js' -import {queryAPI, setUpServer} from '../utils/testUtils.js' -import {muuidToString} from '../utils/helpers.js' -import {InMemoryDB} from "../utils/inMemoryDB.js"; -import express from "express"; +import { AreaType } from '../db/AreaTypes.js' +import { OrganizationEditableFieldsType, OrganizationType, OrgType } from '../db/OrganizationTypes.js' +import { queryAPI, setUpServer } from '../utils/testUtils.js' +import { muuidToString } from '../utils/helpers.js' +import { InMemoryDB } from '../utils/inMemoryDB.js' +import express from 'express' jest.setTimeout(60000) @@ -27,7 +27,7 @@ describe('areas API', () => { let wa: AreaType beforeAll(async () => { - ({server, inMemoryDB, app} = await setUpServer()) + ({ server, inMemoryDB, app } = await setUpServer()) // Auth0 serializes uuids in "relaxed" mode, resulting in this hex string format // "59f1d95a-627d-4b8c-91b9-389c7424cb54" instead of base64 "WfHZWmJ9S4yRuTicdCTLVA==". user = muuid.mode('relaxed').v4() @@ -69,17 +69,17 @@ describe('areas API', () => { excludedAreaIds: [ca.metadata.area_id] } alphaOrg = await organizations.addOrganization(user, OrgType.localClimbingOrganization, alphaFields) - .then((res: OrganizationType | null) => { - if (res === null) throw new Error('Failure mocking organization.') - return res - }) + .then((res: OrganizationType | null) => { + if (res === null) throw new Error('Failure mocking organization.') + return res + }) }) it('retrieves an area and lists associated organizations', async () => { const response = await queryAPI({ query: areaQuery, operationName: 'area', - variables: {input: wa.metadata.area_id}, + variables: { input: wa.metadata.area_id }, userUuid, app }) @@ -95,7 +95,7 @@ describe('areas API', () => { const response = await queryAPI({ query: areaQuery, operationName: 'area', - variables: {input: ca.metadata.area_id}, + variables: { input: ca.metadata.area_id }, userUuid, app }) diff --git a/src/__tests__/history.ts b/src/__tests__/history.ts index dd6d381..d784ea3 100644 --- a/src/__tests__/history.ts +++ b/src/__tests__/history.ts @@ -1,15 +1,15 @@ -import {ApolloServer} from 'apollo-server-express' +import { ApolloServer } from 'apollo-server-express' import muuid from 'uuid-mongodb' -import {jest} from '@jest/globals' +import { jest } from '@jest/globals' import MutableAreaDataSource from '../model/MutableAreaDataSource.js' import MutableOrganizationDataSource from '../model/MutableOrganizationDataSource.js' import MutableClimbDataSource from '../model/MutableClimbDataSource.js' -import {AreaType} from '../db/AreaTypes.js' -import {OrganizationType, OrgType} from '../db/OrganizationTypes.js' -import {muuidToString} from '../utils/helpers.js' -import {queryAPI, setUpServer} from '../utils/testUtils.js' -import {InMemoryDB} from "../utils/inMemoryDB.js"; -import express from "express"; +import { AreaType } from '../db/AreaTypes.js' +import { OrganizationType, OrgType } from '../db/OrganizationTypes.js' +import { muuidToString } from '../utils/helpers.js' +import { queryAPI, setUpServer } from '../utils/testUtils.js' +import { InMemoryDB } from '../utils/inMemoryDB.js' +import express from 'express' jest.setTimeout(60000) @@ -26,7 +26,7 @@ describe('history API', () => { let climbs: MutableClimbDataSource beforeAll(async () => { - ({server, inMemoryDB, app} = await setUpServer()) + ({ server, inMemoryDB, app } = await setUpServer()) // Auth0 serializes uuids in "relaxed" mode, resulting in this hex string format // "59f1d95a-627d-4b8c-91b9-389c7424cb54" instead of base64 "WfHZWmJ9S4yRuTicdCTLVA==". user = muuid.mode('relaxed').v4() @@ -104,12 +104,12 @@ describe('history API', () => { email: 'admin@alphaopenbeta.com' } alphaOrg = await organizations.addOrganization(user, OrgType.localClimbingOrganization, alphaFields) - climbIds = await climbs.addOrUpdateClimbs(user, ca.metadata.area_id, [{name: 'Alpha Climb'}]) + climbIds = await climbs.addOrUpdateClimbs(user, ca.metadata.area_id, [{ name: 'Alpha Climb' }]) // Query for changes and ensure they are tracked. const resp = await queryAPI({ query: QUERY_RECENT_CHANGE_HISTORY, - variables: {filter: {}}, + variables: { filter: {} }, userUuid, app }) diff --git a/src/__tests__/organizations.ts b/src/__tests__/organizations.ts index 7e4d038..39ac723 100644 --- a/src/__tests__/organizations.ts +++ b/src/__tests__/organizations.ts @@ -1,15 +1,15 @@ -import {ApolloServer} from 'apollo-server-express' +import { ApolloServer } from 'apollo-server-express' import muuid from 'uuid-mongodb' import MutableAreaDataSource from '../model/MutableAreaDataSource.js' import MutableOrganizationDataSource from '../model/MutableOrganizationDataSource.js' -import {AreaType} from '../db/AreaTypes.js' -import {OperationType, OrganizationEditableFieldsType, OrganizationType, OrgType} from '../db/OrganizationTypes.js' -import {changelogDataSource} from '../model/ChangeLogDataSource.js' -import {queryAPI, setUpServer} from '../utils/testUtils.js' -import {muuidToString} from '../utils/helpers.js' -import {validate as validateMuuid} from 'uuid' -import {InMemoryDB} from "../utils/inMemoryDB.js"; -import express from "express"; +import { AreaType } from '../db/AreaTypes.js' +import { OperationType, OrganizationEditableFieldsType, OrganizationType, OrgType } from '../db/OrganizationTypes.js' +import { changelogDataSource } from '../model/ChangeLogDataSource.js' +import { queryAPI, setUpServer } from '../utils/testUtils.js' +import { muuidToString } from '../utils/helpers.js' +import { validate as validateMuuid } from 'uuid' +import { InMemoryDB } from '../utils/inMemoryDB.js' +import express from 'express' describe('organizations API', () => { let server: ApolloServer @@ -26,7 +26,7 @@ describe('organizations API', () => { let wa: AreaType beforeAll(async () => { - ({server, inMemoryDB, app} = await setUpServer()) + ({ server, inMemoryDB, app } = await setUpServer()) // Auth0 serializes uuids in "relaxed" mode, resulting in this hex string format // "59f1d95a-627d-4b8c-91b9-389c7424cb54" instead of base64 "WfHZWmJ9S4yRuTicdCTLVA==". user = muuid.mode('relaxed').v4() @@ -85,7 +85,7 @@ describe('organizations API', () => { const createResponse = await queryAPI({ query: createQuery, operationName: 'addOrganization', - variables: {input: {displayName: 'Friends of Openbeta', orgType: 'LOCAL_CLIMBING_ORGANIZATION'}}, + variables: { input: { displayName: 'Friends of Openbeta', orgType: 'LOCAL_CLIMBING_ORGANIZATION' } }, userUuid, roles: ['user_admin'], app @@ -165,7 +165,7 @@ describe('organizations API', () => { const response = await queryAPI({ query: createQuery, operationName: 'addOrganization', - variables: {input: {displayName: 'Friends of Openbeta', orgType: 'LOCAL_CLIMBING_ORGANIZATION'}}, + variables: { input: { displayName: 'Friends of Openbeta', orgType: 'LOCAL_CLIMBING_ORGANIZATION' } }, userUuid, roles: ['editor'], app @@ -222,20 +222,20 @@ describe('organizations API', () => { hardwareReportLink: 'https://alphaopenbeta.com/reporthardware' } alphaOrg = await organizations.addOrganization(user, OrgType.localClimbingOrganization, alphaFields) - .then((res: OrganizationType | null) => { - if (res === null) throw new Error('Failure mocking organization.') - return res - }) + .then((res: OrganizationType | null) => { + if (res === null) throw new Error('Failure mocking organization.') + return res + }) deltaFields = { displayName: 'Delta OpenBeta Club', email: 'admin@deltaopenbeta.com' } deltaOrg = await organizations.addOrganization(user, OrgType.localClimbingOrganization, deltaFields) - .then((res: OrganizationType | null) => { - if (res === null) throw new Error('Failure mocking organization.') - return res - }) + .then((res: OrganizationType | null) => { + if (res === null) throw new Error('Failure mocking organization.') + return res + }) gammaFields = { displayName: 'Delta Gamma OpenBeta Club', @@ -243,17 +243,17 @@ describe('organizations API', () => { excludedAreaIds: [wa.metadata.area_id] } gammaOrg = await organizations.addOrganization(user, OrgType.localClimbingOrganization, gammaFields) - .then((res: OrganizationType | null) => { - if (res === null) throw new Error('Failure mocking organization.') - return res - }) + .then((res: OrganizationType | null) => { + if (res === null) throw new Error('Failure mocking organization.') + return res + }) }) it('retrieves an organization with an MUUID', async () => { const response = await queryAPI({ query: organizationQuery, operationName: 'organization', - variables: {input: muuidToString(alphaOrg.orgId)}, + variables: { input: muuidToString(alphaOrg.orgId) }, userUuid, app }) @@ -272,7 +272,7 @@ describe('organizations API', () => { const response = await queryAPI({ query: organizationsQuery, operationName: 'organizations', - variables: {filter: {displayName: {match: 'Delta OpenBeta Club', exactMatch: true}}}, + variables: { filter: { displayName: { match: 'Delta OpenBeta Club', exactMatch: true } } }, userUuid, app }) @@ -287,7 +287,7 @@ describe('organizations API', () => { const response = await queryAPI({ query: organizationsQuery, operationName: 'organizations', - variables: {filter: {displayName: {match: 'delta', exactMatch: false}}}, + variables: { filter: { displayName: { match: 'delta', exactMatch: false } } }, userUuid, app }) @@ -316,7 +316,7 @@ describe('organizations API', () => { const response = await queryAPI({ query: organizationsQuery, operationName: 'organizations', - variables: {filter: {associatedAreaIds: {includes: [muuidToString(ca.metadata.area_id)]}}}, + variables: { filter: { associatedAreaIds: { includes: [muuidToString(ca.metadata.area_id)] } } }, userUuid, app }) @@ -331,7 +331,7 @@ describe('organizations API', () => { const response = await queryAPI({ query: organizationsQuery, operationName: 'organizations', - variables: {filter: {excludedAreaIds: {excludes: [muuidToString(wa.metadata.area_id)]}}}, + variables: { filter: { excludedAreaIds: { excludes: [muuidToString(wa.metadata.area_id)] } } }, userUuid, app }) diff --git a/src/__tests__/ticks.ts b/src/__tests__/ticks.ts index d18ae48..60e8741 100644 --- a/src/__tests__/ticks.ts +++ b/src/__tests__/ticks.ts @@ -1,14 +1,14 @@ -import {ApolloServer} from 'apollo-server-express' +import { ApolloServer } from 'apollo-server-express' import muuid from 'uuid-mongodb' -import {jest} from '@jest/globals' -import {queryAPI, setUpServer} from '../utils/testUtils.js' -import {muuidToString} from '../utils/helpers.js' -import {TickInput} from '../db/TickTypes.js' +import { jest } from '@jest/globals' +import { queryAPI, setUpServer } from '../utils/testUtils.js' +import { muuidToString } from '../utils/helpers.js' +import { TickInput } from '../db/TickTypes.js' import TickDataSource from '../model/TickDataSource.js' import UserDataSource from '../model/UserDataSource.js' -import {UpdateProfileGQLInput} from '../db/UserTypes.js' -import {InMemoryDB} from "../utils/inMemoryDB.js"; -import express from "express"; +import { UpdateProfileGQLInput } from '../db/UserTypes.js' +import { InMemoryDB } from '../utils/inMemoryDB.js' +import express from 'express' jest.setTimeout(110000) @@ -25,7 +25,7 @@ describe('ticks API', () => { let tickOne: TickInput beforeAll(async () => { - ({server, inMemoryDB, app} = await setUpServer()) + ({ server, inMemoryDB, app } = await setUpServer()) user = muuid.v4() userUuid = muuidToString(user) @@ -95,7 +95,7 @@ describe('ticks API', () => { await ticks.addTick(tickOne) const response = await queryAPI({ query: userQuery, - variables: {userId: userUuid}, + variables: { userId: userUuid }, userUuid, app }) @@ -115,7 +115,7 @@ describe('ticks API', () => { await ticks.addTick(tickOne) const response = await queryAPI({ query: userQuery, - variables: {username: 'cat.dog'}, + variables: { username: 'cat.dog' }, userUuid, app }) @@ -129,7 +129,7 @@ describe('ticks API', () => { await ticks.addTick(tickOne) const response = await queryAPI({ query: userTickByClimbQuery, - variables: {userId: userUuid, climbId: tickOne.climbId}, + variables: { userId: userUuid, climbId: tickOne.climbId }, userUuid, app }) @@ -176,7 +176,7 @@ describe('ticks API', () => { it('creates and updates a tick', async () => { const createResponse = await queryAPI({ query: createQuery, - variables: {input: tickOne}, + variables: { input: tickOne }, userUuid, roles: ['user_admin'], app diff --git a/src/auth/middleware.ts b/src/auth/middleware.ts index 153419f..3cb68ec 100644 --- a/src/auth/middleware.ts +++ b/src/auth/middleware.ts @@ -1,31 +1,23 @@ import muid from 'uuid-mongodb' -import {AuthUserType} from '../types.js' -import {verifyJWT} from './util.js' -import {logger} from '../logger.js' +import { AuthUserType } from '../types.js' +import { verifyJWT } from './util.js' +import { logger } from '../logger.js' /** * Create a middleware context for Apollo server */ -export const createContext = async ({req}): Promise => { - const user: AuthUserType = { - roles: [], - uuid: undefined, - isBuilder: false - } - +export const createContext = async ({ req }): Promise => { try { - await validateTokenAndExtractUser(req) + return await validateTokenAndExtractUser(req) } catch (e) { logger.error(`Can't validate token and extract user ${e.toString() as string}`) throw new Error('An unexpected error has occurred. Please notify us at support@openbeta.io.') } - - return {user} } export const authMiddleware = async (req, res, next): Promise => { try { - const {user, token} = await validateTokenAndExtractUser(req) + const { user, token } = await validateTokenAndExtractUser(req) req.user = user req.userId = user.uuid req.token = token @@ -36,8 +28,9 @@ export const authMiddleware = async (req, res, next): Promise => { } } -async function validateTokenAndExtractUser(req: Request): Promise<{ user: AuthUserType, token: string }> { - const {headers} = req +async function validateTokenAndExtractUser (req: Request): Promise<{ user: AuthUserType, token: string }> { + const { headers } = req + // eslint-disable-next-line @typescript-eslint/dot-notation const authHeader = String(headers?.['authorization'] ?? '') if (!authHeader.startsWith('Bearer ')) { throw new Error('Unauthorized. Please provide a valid JWT token in the Authorization header.') diff --git a/src/db/edit/streamListener.ts b/src/db/edit/streamListener.ts index a7b577a..35a4391 100644 --- a/src/db/edit/streamListener.ts +++ b/src/db/edit/streamListener.ts @@ -1,9 +1,9 @@ import mongoose from 'mongoose' -import {ChangeStream, ChangeStreamDocument, ChangeStreamUpdateDocument} from 'mongodb' +import { ChangeStream, ChangeStreamDocument, ChangeStreamUpdateDocument } from 'mongodb' import dot from 'dot-object' -import {changelogDataSource} from '../../model/ChangeLogDataSource.js' -import {logger} from '../../logger.js' +import { changelogDataSource } from '../../model/ChangeLogDataSource.js' +import { logger } from '../../logger.js' import { BaseChangeRecordType, DBOperation, @@ -12,35 +12,37 @@ import { SupportedCollectionTypes, UpdateDescription } from '../ChangeLogType.js' -import {checkVar} from '../index.js' -import {updateAreaIndex, updateClimbIndex} from '../export/Typesense/Client.js' -import {AreaType} from '../AreaTypes.js' -import {exhaustiveCheck} from '../../utils/helpers.js' -import {ClimbType} from '../ClimbTypes.js' +import { checkVar } from '../index.js' +import { updateAreaIndex, updateClimbIndex } from '../export/Typesense/Client.js' +import { AreaType } from '../AreaTypes.js' +import { exhaustiveCheck } from '../../utils/helpers.js' +import { ClimbType } from '../ClimbTypes.js' /** * Start a new stream listener to track changes */ -export default async function streamListener(): Promise { +export default async function streamListener (): Promise { + // eslint-disable-next-line @typescript-eslint/no-misused-promises return (await createChangeStream()).on('change', onChange) } /** * The test stream listener awaits all change events */ -export async function testStreamListener(callback?: (change: ChangeStreamDocument) => void): Promise { +export async function testStreamListener (callback?: (change: ChangeStreamDocument) => void): Promise { + // eslint-disable-next-line @typescript-eslint/no-misused-promises return (await createChangeStream()).on('change', async (change: ChangeStreamDocument) => { await onChange(change) - callback && callback(change) + if (callback !== undefined) callback(change) }) } -async function createChangeStream(): Promise { +async function createChangeStream (): Promise { const resumeId = await mostRecentResumeId() - logger.info({resumeId}, 'Starting stream listener') + logger.info({ resumeId }, 'Starting stream listener') const opts: any = { - fullDocument: 'updateLookup', + fullDocument: 'updateLookup' } if (resumeId != null) { opts.resumeId = resumeId @@ -65,19 +67,19 @@ async function createChangeStream(): Promise { } const onChange = async (change: ChangeStreamDocument): Promise => { - const {operationType} = change + const { operationType } = change switch (operationType) { case 'replace': case 'update': { let dbOp: DBOperation = 'update' const source = DocumentKind[change.ns.coll] - const {fullDocument, _id, updateDescription} = change as ChangeStreamUpdateDocument + const { fullDocument, _id, updateDescription } = change as ChangeStreamUpdateDocument if (fullDocument?._deleting != null) { dbOp = 'delete' } - return recordChange({ + return await recordChange({ _id: _id as ResumeToken, source, fullDocument: fullDocument as SupportedCollectionTypes, @@ -88,8 +90,8 @@ const onChange = async (change: ChangeStreamDocument): Promise => { case 'insert': { const dbOp = 'insert' const source = DocumentKind[change.ns.coll] - const {fullDocument, _id} = change - return recordChange({ + const { fullDocument, _id } = change + return await recordChange({ _id: _id as ResumeToken, source, fullDocument: fullDocument as SupportedCollectionTypes, @@ -107,7 +109,7 @@ interface ChangeRecordType { dbOp: DBOperation } -const recordChange = async ({source, dbOp, fullDocument, updateDescription, _id}: ChangeRecordType): Promise => { +const recordChange = async ({ source, dbOp, fullDocument, updateDescription, _id }: ChangeRecordType): Promise => { fullDocument.kind = source switch (source) { case DocumentKind.climbs: { @@ -118,7 +120,7 @@ const recordChange = async ({source, dbOp, fullDocument, updateDescription, _id} updateDescription: dotifyUpdateDescription(updateDescription), kind: DocumentKind.climbs } - return changelogDataSource.record(newDocument).then(() => updateClimbIndex(fullDocument as ClimbType, dbOp)) + return await changelogDataSource.record(newDocument).then(async () => await updateClimbIndex(fullDocument as ClimbType, dbOp)) } case DocumentKind.areas: { const newDocument: BaseChangeRecordType = { @@ -128,7 +130,7 @@ const recordChange = async ({source, dbOp, fullDocument, updateDescription, _id} updateDescription: dotifyUpdateDescription(updateDescription), kind: DocumentKind.areas } - return changelogDataSource.record(newDocument).then(() => updateAreaIndex(fullDocument as AreaType, dbOp)) + return await changelogDataSource.record(newDocument).then(async () => await updateAreaIndex(fullDocument as AreaType, dbOp)) } case DocumentKind.organizations: { const newDocument: BaseChangeRecordType = { @@ -138,7 +140,7 @@ const recordChange = async ({source, dbOp, fullDocument, updateDescription, _id} updateDescription: dotifyUpdateDescription(updateDescription), kind: DocumentKind.organizations } - return changelogDataSource.record(newDocument).then() + return await changelogDataSource.record(newDocument).then() } default: exhaustiveCheck(source) @@ -198,7 +200,7 @@ const dotifyUpdateDescription = (updateDescription: UpdateDescriptionType): Upda } } - const {updatedFields, removedFields, truncatedArrays} = updateDescription + const { updatedFields, removedFields, truncatedArrays } = updateDescription cleanupObj(updatedFields) return { updatedFields: updatedFields != null ? Object.keys(dot.dot(updatedFields)) : [], diff --git a/src/db/index.ts b/src/db/index.ts index a0a7f80..d087159 100644 --- a/src/db/index.ts +++ b/src/db/index.ts @@ -1,18 +1,18 @@ import mongoose from 'mongoose' -import {ChangeStream} from 'mongodb' -import {config} from 'dotenv' -import {enableAllPlugins} from 'immer' +import { ChangeStream } from 'mongodb' +import { config } from 'dotenv' +import { enableAllPlugins } from 'immer' -import {getAreaModel} from './AreaSchema.js' -import {getClimbModel} from './ClimbSchema.js' -import {getMediaObjectModel} from './MediaObjectSchema.js' -import {getOrganizationModel} from './OrganizationSchema.js' -import {getTickModel} from './TickSchema.js' -import {getXMediaModel} from './XMediaSchema.js' -import {getPostModel} from './PostSchema.js' -import {getChangeLogModel} from './ChangeLogSchema.js' -import {getExperimentalUserModel, getUserModel} from './UserSchema.js' -import {logger} from '../logger.js' +import { getAreaModel } from './AreaSchema.js' +import { getClimbModel } from './ClimbSchema.js' +import { getMediaObjectModel } from './MediaObjectSchema.js' +import { getOrganizationModel } from './OrganizationSchema.js' +import { getTickModel } from './TickSchema.js' +import { getXMediaModel } from './XMediaSchema.js' +import { getPostModel } from './PostSchema.js' +import { getChangeLogModel } from './ChangeLogSchema.js' +import { getExperimentalUserModel, getUserModel } from './UserSchema.js' +import { logger } from '../logger.js' import streamListener from './edit/streamListener.js' config() @@ -55,7 +55,7 @@ export const connectDB = async (onConnected: () => any = defaultFn): Promise { changeLogModel = getChangeLogModel() @@ -24,14 +24,14 @@ export default class ChangeLogDataSource extends MongoDataSource * @param operation * @returns */ - async create(session: ClientSession, uuid: MUUID, operation: OpType): Promise { + async create (session: ClientSession, uuid: MUUID, operation: OpType): Promise { const newChangeDoc: ChangeLogType = { _id: new mongoose.Types.ObjectId(), editedBy: uuid, operation, changes: [] } - const rs = await this.changeLogModel.insertMany(newChangeDoc, {session}) + const rs = await this.changeLogModel.insertMany(newChangeDoc, { session }) if (rs?.length !== 1) throw new Error('Error inserting new change') return rs[0] } @@ -40,7 +40,7 @@ export default class ChangeLogDataSource extends MongoDataSource * Record a new change in the changeset * @param changeRecord */ - async record(changeRecord: BaseChangeRecordType): Promise { + async record (changeRecord: BaseChangeRecordType): Promise { const filter = { _id: changeRecord.fullDocument._change?.historyId } @@ -50,7 +50,7 @@ export default class ChangeLogDataSource extends MongoDataSource $push: { changes: { $each: [changeRecord], - $sort: {'fullDocument._change.seq': -1} + $sort: { 'fullDocument._change.seq': -1 } } } }, { @@ -63,11 +63,11 @@ export default class ChangeLogDataSource extends MongoDataSource return this } - async getAreaChangeSets(areaUuid?: MUUID): Promise { + async getAreaChangeSets (areaUuid?: MUUID): Promise { return await areaHistoryDataSource.getChangeSetsByUuid(areaUuid) } - async getOrganizationChangeSets(orgId?: MUUID): Promise { + async getOrganizationChangeSets (orgId?: MUUID): Promise { return await organizationHistoryDataSource.getChangeSetsByOrgId(orgId) } @@ -76,23 +76,23 @@ export default class ChangeLogDataSource extends MongoDataSource * @param uuidList optional filter * @returns change sets */ - async getChangeSets(uuidList: MUUID[]): Promise> { - return this.changeLogModel.aggregate([ + async getChangeSets (uuidList: MUUID[]): Promise> { + return await this.changeLogModel.aggregate([ { $sort: { createdAt: -1 } } - ]).limit(500); + ]).limit(500) } - async _testRemoveAll(): Promise { + async _testRemoveAll (): Promise { await this.changeLogModel.deleteMany() } static instance: ChangeLogDataSource - static getInstance(): ChangeLogDataSource { + static getInstance (): ChangeLogDataSource { if (ChangeLogDataSource.instance == null) { /** * Why suppress TS error? See: https://github.com/GraphQLGuide/apollo-datasource-mongodb/issues/88 diff --git a/src/model/MutableAreaDataSource.ts b/src/model/MutableAreaDataSource.ts index faefe17..2661003 100644 --- a/src/model/MutableAreaDataSource.ts +++ b/src/model/MutableAreaDataSource.ts @@ -1,14 +1,14 @@ import bbox2Polygon from '@turf/bbox-polygon' -import {geometry, Point} from '@turf/helpers' -import {UserInputError} from 'apollo-server-express' +import { geometry, Point } from '@turf/helpers' +import { UserInputError } from 'apollo-server-express' import isoCountries from 'i18n-iso-countries' import enJson from 'i18n-iso-countries/langs/en.json' assert {type: 'json'} -import {produce} from 'immer' -import mongoose, {ClientSession} from 'mongoose' -import {NIL, v5 as uuidv5} from 'uuid' -import muuid, {MUUID} from 'uuid-mongodb' +import { produce } from 'immer' +import mongoose, { ClientSession } from 'mongoose' +import { NIL, v5 as uuidv5 } from 'uuid' +import muuid, { MUUID } from 'uuid-mongodb' -import {GradeContexts} from '../GradeUtils.js' +import { GradeContexts } from '../GradeUtils.js' import CountriesLngLat from '../data/countries-with-lnglat.json' assert {type: 'json'} import { AreaDocumnent, @@ -17,18 +17,18 @@ import { OperationType, UpdateSortingOrderType } from '../db/AreaTypes.js' -import {ChangeRecordMetadataType} from '../db/ChangeLogType.js' -import {ExperimentalAuthorType} from '../db/UserTypes.js' -import {makeDBArea} from '../db/import/usa/AreaTransformer.js' -import {createRootNode} from '../db/import/usa/AreaTree.js' -import {leafReducer, nodesReducer, StatsSummary} from '../db/utils/jobs/TreeUpdaters/updateAllAreas.js' -import {bboxFrom} from '../geo-utils.js' -import {logger} from '../logger.js' -import {createInstance as createExperimentalUserDataSource} from '../model/ExperimentalUserDataSource.js' -import {sanitizeStrict} from '../utils/sanitize.js' +import { ChangeRecordMetadataType } from '../db/ChangeLogType.js' +import { ExperimentalAuthorType } from '../db/UserTypes.js' +import { makeDBArea } from '../db/import/usa/AreaTransformer.js' +import { createRootNode } from '../db/import/usa/AreaTree.js' +import { leafReducer, nodesReducer, StatsSummary } from '../db/utils/jobs/TreeUpdaters/updateAllAreas.js' +import { bboxFrom } from '../geo-utils.js' +import { logger } from '../logger.js' +import { createInstance as createExperimentalUserDataSource } from '../model/ExperimentalUserDataSource.js' +import { sanitizeStrict } from '../utils/sanitize.js' import AreaDataSource from './AreaDataSource.js' -import {changelogDataSource} from './ChangeLogDataSource.js' -import {withTransaction} from "../utils/helpers"; +import { changelogDataSource } from './ChangeLogDataSource.js' +import { withTransaction } from '../utils/helpers' isoCountries.registerLocale(enJson) @@ -53,7 +53,7 @@ export interface UpdateAreaOptions { export default class MutableAreaDataSource extends AreaDataSource { experimentalUserDataSource = createExperimentalUserDataSource() - async setDestinationFlag(user: MUUID, uuid: MUUID, flag: boolean): Promise { + async setDestinationFlag (user: MUUID, uuid: MUUID, flag: boolean): Promise { const session = await this.areaModel.startSession() let ret: AreaType | null = null @@ -67,10 +67,10 @@ export default class MutableAreaDataSource extends AreaDataSource { return ret } - async _setDestinationFlag(session, user: MUUID, uuid: MUUID, flag: boolean): Promise { + async _setDestinationFlag (session, user: MUUID, uuid: MUUID, flag: boolean): Promise { const change = await changelogDataSource.create(session, uuid, OperationType.updateDestination) - const filter = {'metadata.area_id': uuid} + const filter = { 'metadata.area_id': uuid } const update: Pick }> = [{ $set: { 'metadata.isDestination': flag, @@ -83,16 +83,16 @@ export default class MutableAreaDataSource extends AreaDataSource { } } }] - const opts = {new: true, session, timestamps: false} // return newly updated doc + const opts = { new: true, session, timestamps: false } // return newly updated doc return await this.areaModel - .updateOne(filter, update, opts).orFail().lean() + .updateOne(filter, update, opts).orFail().lean() } /** * Add a country * @param _countryCode alpha2 or 3 ISO code */ - async addCountry(_countryCode: string): Promise { + async addCountry (_countryCode: string): Promise { const countryCode = _countryCode.toLocaleUpperCase('en-US') if (!isoCountries.isValid(countryCode)) { throw new Error('Invalid ISO code: ' + countryCode) @@ -126,16 +126,16 @@ export default class MutableAreaDataSource extends AreaDataSource { throw new Error('Error inserting ' + countryCode) } - async addAreaWith({ - user, - areaName, - parentUuid = null, - countryCode, - experimentalAuthor, - isLeaf, - isBoulder, - session - }: AddAreaOptions): Promise { + async addAreaWith ({ + user, + areaName, + parentUuid = null, + countryCode, + experimentalAuthor, + isLeaf, + isBoulder, + session + }: AddAreaOptions): Promise { return await this.addArea(user, areaName, parentUuid, countryCode, experimentalAuthor, isLeaf, isBoulder, session) } @@ -146,14 +146,14 @@ export default class MutableAreaDataSource extends AreaDataSource { * @param parentUuid * @param countryCode */ - async addArea(user: MUUID, - areaName: string, - parentUuid: MUUID | null, - countryCode?: string, - experimentalAuthor?: ExperimentalAuthorType, - isLeaf?: boolean, - isBoulder?: boolean, - sessionCtx?: ClientSession): Promise { + async addArea (user: MUUID, + areaName: string, + parentUuid: MUUID | null, + countryCode?: string, + experimentalAuthor?: ExperimentalAuthorType, + isLeaf?: boolean, + isBoulder?: boolean, + sessionCtx?: ClientSession): Promise { if (parentUuid == null && countryCode == null) { throw new Error(`Adding area "${areaName}" failed. Must provide parent Id or country code`) } @@ -169,14 +169,14 @@ export default class MutableAreaDataSource extends AreaDataSource { const session = sessionCtx ?? await this.areaModel.startSession() if (session.inTransaction()) { - return this._addArea(session, user, areaName, uuid, experimentalAuthor, isLeaf, isBoulder) + return await this._addArea(session, user, areaName, uuid, experimentalAuthor, isLeaf, isBoulder) } else { - return withTransaction(session, () => this._addArea(session, user, areaName, uuid, experimentalAuthor, isLeaf, isBoulder)) + return await withTransaction(session, async () => await this._addArea(session, user, areaName, uuid, experimentalAuthor, isLeaf, isBoulder)) } } - async _addArea(session, user: MUUID, areaName: string, parentUuid: MUUID, experimentalAuthor?: ExperimentalAuthorType, isLeaf?: boolean, isBoulder?: boolean): Promise { - const parentFilter = {'metadata.area_id': parentUuid} + async _addArea (session, user: MUUID, areaName: string, parentUuid: MUUID, experimentalAuthor?: ExperimentalAuthorType, isLeaf?: boolean, isBoulder?: boolean): Promise { + const parentFilter = { 'metadata.area_id': parentUuid } const parent = await this.areaModel.findOne(parentFilter).session(session).orFail(new UserInputError(`[${areaName}]: Expecting country or area parent, found none with id ${parentUuid.toString()}`)) if (parent.metadata.leaf || (parent.metadata?.isBoulder ?? false)) { @@ -228,16 +228,16 @@ export default class MutableAreaDataSource extends AreaDataSource { newArea._change = produce(newChangeMeta, draft => { draft.seq = 1 }) - const rs1 = await this.areaModel.insertMany(newArea, {session}) + const rs1 = await this.areaModel.insertMany(newArea, { session }) // Make sure parent knows about this new area parent.children.push(newArea._id) parent.updatedBy = experimentaAuthorId ?? user - await parent.save({timestamps: false}) + await parent.save({ timestamps: false }) return rs1[0].toObject() } - async deleteArea(user: MUUID, uuid: MUUID): Promise { + async deleteArea (user: MUUID, uuid: MUUID): Promise { const session = await this.areaModel.startSession() let ret: AreaType | null = null @@ -251,10 +251,10 @@ export default class MutableAreaDataSource extends AreaDataSource { return ret } - async _deleteArea(session: ClientSession, user: MUUID, uuid: MUUID): Promise { + async _deleteArea (session: ClientSession, user: MUUID, uuid: MUUID): Promise { const filter = { 'metadata.area_id': uuid, - deleting: {$ne: null} + deleting: { $ne: null } } const area = await this.areaModel.findOne(filter).session(session).orFail() @@ -290,7 +290,7 @@ export default class MutableAreaDataSource extends AreaDataSource { $filter: { input: '$children', as: 'child', - cond: {$ne: ['$$child', area._id]} + cond: { $ne: ['$$child', area._id] } } }, updatedBy: user, @@ -311,7 +311,7 @@ export default class MutableAreaDataSource extends AreaDataSource { // See https://www.mongodb.com/community/forums/t/change-stream-fulldocument-on-delete/15963 // Mongo TTL indexes: https://www.mongodb.com/docs/manual/core/index-ttl/ return await this.areaModel.findOneAndUpdate( - {'metadata.area_id': uuid}, + { 'metadata.area_id': uuid }, [{ $set: { updatedBy: user, @@ -327,7 +327,7 @@ export default class MutableAreaDataSource extends AreaDataSource { }).session(session).lean() } - async updateAreaWith({user, areaUuid, document, session}: UpdateAreaOptions): Promise { + async updateAreaWith ({ user, areaUuid, document, session }: UpdateAreaOptions): Promise { return await this.updateArea(user, areaUuid, document, session) } @@ -340,11 +340,11 @@ export default class MutableAreaDataSource extends AreaDataSource { * @param document New fields * @returns Newly updated area */ - async updateArea(user: MUUID, areaUuid: MUUID, document: AreaEditableFieldsType, sessionCtx?: ClientSession): Promise { + async updateArea (user: MUUID, areaUuid: MUUID, document: AreaEditableFieldsType, sessionCtx?: ClientSession): Promise { const _updateArea = async (session: ClientSession, user: MUUID, areaUuid: MUUID, document: AreaEditableFieldsType): Promise => { const filter = { 'metadata.area_id': areaUuid, - deleting: {$ne: null} + deleting: { $ne: null } } const area = await this.areaModel.findOne(filter).session(session) @@ -380,7 +380,7 @@ export default class MutableAreaDataSource extends AreaDataSource { operation: opType, seq: 0 } - area.set({_change}) + area.set({ _change }) area.updatedBy = experimentaAuthorId ?? user if (area.pathTokens.length === 1) { @@ -393,25 +393,25 @@ export default class MutableAreaDataSource extends AreaDataSource { if (areaName != null) { const sanitizedName = sanitizeStrict(areaName) - area.set({area_name: sanitizedName}) + area.set({ area_name: sanitizedName }) // change our pathTokens await this.updatePathTokens(session, _change, area, sanitizedName) } - if (shortCode != null) area.set({shortCode: shortCode.toUpperCase()}) - if (isDestination != null) area.set({'metadata.isDestination': isDestination}) - if (isLeaf != null) area.set({'metadata.leaf': isLeaf}) + if (shortCode != null) area.set({ shortCode: shortCode.toUpperCase() }) + if (isDestination != null) area.set({ 'metadata.isDestination': isDestination }) + if (isLeaf != null) area.set({ 'metadata.leaf': isLeaf }) if (isBoulder != null) { - area.set({'metadata.isBoulder': isBoulder}) + area.set({ 'metadata.isBoulder': isBoulder }) if (isBoulder) { // boulfer == true implies leaf = true - area.set({'metadata.leaf': true}) + area.set({ 'metadata.leaf': true }) } } if (description != null) { const sanitized = sanitizeStrict(description) - area.set({'content.description': sanitized}) + area.set({ 'content.description': sanitized }) } const latLngHasChanged = lat != null && lng != null @@ -436,9 +436,9 @@ export default class MutableAreaDataSource extends AreaDataSource { const session = sessionCtx ?? await this.areaModel.startSession() if (session.inTransaction()) { - return _updateArea(session, user, areaUuid, document) + return await _updateArea(session, user, areaUuid, document) } else { - return withTransaction(session, () => _updateArea(session, user, areaUuid, document)) + return await withTransaction(session, async () => await _updateArea(session, user, areaUuid, document)) } } @@ -450,7 +450,7 @@ export default class MutableAreaDataSource extends AreaDataSource { * @param newAreaName new area name * @param depth tree depth */ - async updatePathTokens(session: ClientSession, changeRecord: ChangeRecordMetadataType, area: AreaDocumnent, newAreaName: string, changeIndex: number = -1): Promise { + async updatePathTokens (session: ClientSession, changeRecord: ChangeRecordMetadataType, area: AreaDocumnent, newAreaName: string, changeIndex: number = -1): Promise { if (area.pathTokens.length > 1) { if (changeIndex === -1) { changeIndex = area.pathTokens.length - 1 @@ -458,9 +458,9 @@ export default class MutableAreaDataSource extends AreaDataSource { const newPath = [...area.pathTokens] newPath[changeIndex] = newAreaName - area.set({pathTokens: newPath}) - area.set({_change: changeRecord}) - await area.save({session}) + area.set({ pathTokens: newPath }) + area.set({ _change: changeRecord }) + await area.save({ session }) // hydrate children_ids array with actual area documents await area.populate('children') @@ -480,16 +480,16 @@ export default class MutableAreaDataSource extends AreaDataSource { * @param input area sorting input array * @returns */ - async updateSortingOrder(user: MUUID, input: UpdateSortingOrderType[]): Promise { + async updateSortingOrder (user: MUUID, input: UpdateSortingOrderType[]): Promise { const doUpdate = async (session: ClientSession, user: MUUID, input: UpdateSortingOrderType[]): Promise => { const opType = OperationType.orderAreas const change = await changelogDataSource.create(session, user, opType) const updates: any[] = [] - input.forEach(({areaId, leftRightIndex}, index) => { + input.forEach(({ areaId, leftRightIndex }, index) => { updates.push({ updateOne: { - filter: {'metadata.area_id': muuid.from(areaId)}, + filter: { 'metadata.area_id': muuid.from(areaId) }, update: { $set: { 'metadata.leftRightIndex': leftRightIndex, @@ -506,7 +506,7 @@ export default class MutableAreaDataSource extends AreaDataSource { }) }) - const rs = (await this.areaModel.bulkWrite(updates, {session})).toJSON() + const rs = (await this.areaModel.bulkWrite(updates, { session })).toJSON() if (rs.ok === 1 && rs.nMatched === rs.nModified) { return input.map(item => item.areaId) @@ -536,7 +536,7 @@ export default class MutableAreaDataSource extends AreaDataSource { * @param startingArea * @param excludeStartingArea true to exlude the starting area from the update. Useful when deleting an area. */ - async updateLeafStatsAndGeoData(session: ClientSession, changeRecord: ChangeRecordMetadataType, startingArea: AreaDocumnent, excludeStartingArea: boolean = false): Promise { + async updateLeafStatsAndGeoData (session: ClientSession, changeRecord: ChangeRecordMetadataType, startingArea: AreaDocumnent, excludeStartingArea: boolean = false): Promise { /** * Update function. For each node, recalculate stats and recursively update its acenstors until we reach the country node. */ @@ -549,12 +549,12 @@ export default class MutableAreaDataSource extends AreaDataSource { const ancestors = area.ancestors.split(',') const parentUuid = muuid.from(ancestors[ancestors.length - 2]) const parentArea = - await this.areaModel.findOne({'metadata.area_id': parentUuid}) - .batchSize(10) - .populate<{ children: AreaDocumnent[] }>({path: 'children', model: this.areaModel}) - .allowDiskUse(true) - .session(session) - .orFail() + await this.areaModel.findOne({ 'metadata.area_id': parentUuid }) + .batchSize(10) + .populate<{ children: AreaDocumnent[] }>({ path: 'children', model: this.areaModel }) + .allowDiskUse(true) + .session(session) + .orFail() const acc: StatsSummary[] = [] /** @@ -568,7 +568,7 @@ export default class MutableAreaDataSource extends AreaDataSource { } } - const current = await nodesReducer(acc, parentArea as any as AreaDocumnent, {session, changeRecord}) + const current = await nodesReducer(acc, parentArea as any as AreaDocumnent, { session, changeRecord }) await updateFn(session, changeRecord, parentArea as any as AreaDocumnent, current) } @@ -588,7 +588,7 @@ export default class MutableAreaDataSource extends AreaDataSource { static instance: MutableAreaDataSource - static getInstance(): MutableAreaDataSource { + static getInstance (): MutableAreaDataSource { if (MutableAreaDataSource.instance == null) { MutableAreaDataSource.instance = new MutableAreaDataSource(mongoose.connection.db.collection('areas')) } diff --git a/src/model/MutableClimbDataSource.ts b/src/model/MutableClimbDataSource.ts index 603a486..cfc4991 100644 --- a/src/model/MutableClimbDataSource.ts +++ b/src/model/MutableClimbDataSource.ts @@ -1,20 +1,20 @@ -import {UserInputError} from 'apollo-server-express' -import {ClientSession} from 'mongoose' -import muid, {MUUID} from 'uuid-mongodb' - -import {createGradeObject, gradeContextToGradeScales, sanitizeDisciplines} from '../GradeUtils.js' -import {getAreaModel} from '../db/AreaSchema.js' -import {AreaDocumnent} from '../db/AreaTypes.js' -import {ChangeRecordMetadataType} from '../db/ChangeLogType.js' -import {getClimbModel} from '../db/ClimbSchema.js' -import {ClimbChangeDocType, ClimbChangeInputType, ClimbEditOperationType, ClimbType, IPitch} from '../db/ClimbTypes.js' -import {aggregateCragStats} from '../db/utils/Aggregate.js' -import {sanitize, sanitizeStrict} from '../utils/sanitize.js' -import {changelogDataSource} from './ChangeLogDataSource.js' +import { UserInputError } from 'apollo-server-express' +import { ClientSession } from 'mongoose' +import muid, { MUUID } from 'uuid-mongodb' + +import { createGradeObject, gradeContextToGradeScales, sanitizeDisciplines } from '../GradeUtils.js' +import { getAreaModel } from '../db/AreaSchema.js' +import { AreaDocumnent } from '../db/AreaTypes.js' +import { ChangeRecordMetadataType } from '../db/ChangeLogType.js' +import { getClimbModel } from '../db/ClimbSchema.js' +import { ClimbChangeDocType, ClimbChangeInputType, ClimbEditOperationType, ClimbType, IPitch } from '../db/ClimbTypes.js' +import { aggregateCragStats } from '../db/utils/Aggregate.js' +import { sanitize, sanitizeStrict } from '../utils/sanitize.js' +import { changelogDataSource } from './ChangeLogDataSource.js' import ClimbDataSource from './ClimbDataSource.js' -import {createInstance as createExperimentalUserDataSource} from './ExperimentalUserDataSource.js' +import { createInstance as createExperimentalUserDataSource } from './ExperimentalUserDataSource.js' import MutableAreaDataSource from './MutableAreaDataSource.js' -import {withTransaction} from "../utils/helpers"; +import { withTransaction } from '../utils/helpers' export interface AddOrUpdateClimbsOptions { userId: MUUID @@ -26,7 +26,7 @@ export interface AddOrUpdateClimbsOptions { export default class MutableClimbDataSource extends ClimbDataSource { experimentalUserDataSource = createExperimentalUserDataSource() - async _addOrUpdateClimbs(userId: MUUID, session: ClientSession, parentId: MUUID, userInput: ClimbChangeInputType[]): Promise { + async _addOrUpdateClimbs (userId: MUUID, session: ClientSession, parentId: MUUID, userInput: ClimbChangeInputType[]): Promise { const newClimbIds = new Array(userInput.length) for (let i = 0; i < newClimbIds.length; i++) { // make sure there's some input @@ -42,7 +42,7 @@ export default class MutableClimbDataSource extends ClimbDataSource { } } - const existingIds = await this.climbModel.find({_id: {$in: newClimbIds}}).select('_id') + const existingIds = await this.climbModel.find({ _id: { $in: newClimbIds } }).select('_id') interface IdMapType { id: MUUID @@ -52,9 +52,9 @@ export default class MutableClimbDataSource extends ClimbDataSource { // A list of ID objects to track whether the ID exists in the DB const idList = newClimbIds.reduce((acc, curr) => { if (existingIds.some(item => item._id.toUUID().toString() === curr.toUUID().toString())) { - acc.push({id: curr, existed: true}) + acc.push({ id: curr, existed: true }) } else { - acc.push({id: curr, existed: false}) + acc.push({ id: curr, existed: false }) } return acc }, []) @@ -62,11 +62,11 @@ export default class MutableClimbDataSource extends ClimbDataSource { const opType = ClimbEditOperationType.updateClimb const change = await changelogDataSource.create(session, userId, opType) - const parentFilter = {'metadata.area_id': parentId} + const parentFilter = { 'metadata.area_id': parentId } const parent = await this.areaModel - .findOne(parentFilter).session(session) - .orFail(new UserInputError(`Area with id: ${parentId.toUUID().toString()} not found`)) + .findOne(parentFilter).session(session) + .orFail(new UserInputError(`Area with id: ${parentId.toUUID().toString()} not found`)) const _change: ChangeRecordMetadataType = { user: userId, @@ -75,7 +75,7 @@ export default class MutableClimbDataSource extends ClimbDataSource { operation: ClimbEditOperationType.updateClimb, seq: 0 } - parent.set({_change}) + parent.set({ _change }) // does the parent area have subareas? if (parent.children.length > 0) { @@ -103,7 +103,7 @@ export default class MutableClimbDataSource extends ClimbDataSource { // It's ok to have empty disciplines obj in the input in case // we just want to update other fields. // However, if disciplines is non-empty, is there 1 non-boulder problem in the input? - const hasARouteClimb = userInput.some(({disciplines}) => + const hasARouteClimb = userInput.some(({ disciplines }) => disciplines != null && Object.keys(disciplines).length > 0 && !(disciplines?.bouldering ?? false)) if (hasARouteClimb && (parent.metadata?.isBoulder ?? false)) { @@ -142,7 +142,7 @@ export default class MutableClimbDataSource extends ClimbDataSource { const newPitchesWithIDs = pitches != null ? pitches.map((pitch): IPitch => { - const {id, ...partialPitch} = pitch // separate 'id' input and rest of the pitch properties to avoid duplicate id and _id + const { id, ...partialPitch } = pitch // separate 'id' input and rest of the pitch properties to avoid duplicate id and _id if (partialPitch.pitchNumber === undefined) { throw new UserInputError('Each pitch in a multi-pitch climb must have a pitchNumber representing its sequence in the climb. Please ensure that every pitch is numbered.') } @@ -156,14 +156,14 @@ export default class MutableClimbDataSource extends ClimbDataSource { }) : null - const {description, location, protection, name, fa, length, boltsCount} = userInput[i] + const { description, location, protection, name, fa, length, boltsCount } = userInput[i] // Make sure we don't update content = {} // See https://www.mongodb.com/community/forums/t/mongoservererror-invalid-set-caused-by-an-empty-object-is-not-a-valid-value/148344/2 const content = { - ...description != null && {description: sanitize(description)}, - ...location != null && {location: sanitize(location)}, - ...protection != null && {protection: sanitize(protection)} + ...description != null && { description: sanitize(description) }, + ...location != null && { location: sanitize(location) }, + ...protection != null && { protection: sanitize(protection) } } /** @@ -176,22 +176,22 @@ export default class MutableClimbDataSource extends ClimbDataSource { */ const doc: ClimbChangeDocType = { _id: newClimbIds[i], - ...name != null && {name: sanitizeStrict(name)}, - ...newGradeObj != null && {grades: newGradeObj}, - ...typeSafeDisciplines != null && {type: typeSafeDisciplines}, + ...name != null && { name: sanitizeStrict(name) }, + ...newGradeObj != null && { grades: newGradeObj }, + ...typeSafeDisciplines != null && { type: typeSafeDisciplines }, gradeContext: parent.gradeContext, - ...fa != null && {fa}, - ...length != null && length > 0 && {length}, - ...boltsCount != null && boltsCount >= 0 && {boltsCount}, // Include 'boltsCount' if it's defined and its value is 0 (no bolts) or greater - ...newPitchesWithIDs != null && {pitches: newPitchesWithIDs}, - ...Object.keys(content).length > 0 && {content}, + ...fa != null && { fa }, + ...length != null && length > 0 && { length }, + ...boltsCount != null && boltsCount >= 0 && { boltsCount }, // Include 'boltsCount' if it's defined and its value is 0 (no bolts) or greater + ...newPitchesWithIDs != null && { pitches: newPitchesWithIDs }, + ...Object.keys(content).length > 0 && { content }, metadata: { areaRef: parent.metadata.area_id, lnglat: parent.metadata.lnglat, - ...userInput[i]?.leftRightIndex != null && {left_right_index: userInput[i].leftRightIndex} + ...userInput[i]?.leftRightIndex != null && { left_right_index: userInput[i].leftRightIndex } }, - ...!idList[i].existed && {createdBy: experimentalUserId ?? userId}, - ...idList[i].existed && {updatedBy: userId}, + ...!idList[i].existed && { createdBy: experimentalUserId ?? userId }, + ...idList[i].existed && { updatedBy: userId }, _change: { user: experimentalUserId ?? userId, historyId: change._id, @@ -205,7 +205,7 @@ export default class MutableClimbDataSource extends ClimbDataSource { const bulk = newDocs.map(doc => ({ updateOne: { - filter: {_id: doc._id}, + filter: { _id: doc._id }, update: [{ $set: { ...doc, @@ -219,18 +219,18 @@ export default class MutableClimbDataSource extends ClimbDataSource { } })) - const rs = await (await this.climbModel.bulkWrite(bulk, {session})).toJSON() + const rs = await (await this.climbModel.bulkWrite(bulk, { session })).toJSON() if (rs.ok === 1) { const idList: MUUID[] = [] const idStrList: string[] = [] - rs.upserted.forEach(({_id}) => { + rs.upserted.forEach(({ _id }) => { idList.push(_id) idStrList.push(_id.toUUID().toString()) }) if (idList.length > 0) { - parent.set({climbs: parent.climbs.concat(idList)}) + parent.set({ climbs: parent.climbs.concat(idList) }) } await parent.save() @@ -246,7 +246,7 @@ export default class MutableClimbDataSource extends ClimbDataSource { } } - async addOrUpdateClimbsWith({userId, parentId, changes, session}: AddOrUpdateClimbsOptions): Promise { + async addOrUpdateClimbsWith ({ userId, parentId, changes, session }: AddOrUpdateClimbsOptions): Promise { return await this.addOrUpdateClimbs(userId, parentId, changes, session) } @@ -256,12 +256,12 @@ export default class MutableClimbDataSource extends ClimbDataSource { * @param changes * @returns a list of updated (or newly added) climb IDs */ - async addOrUpdateClimbs(userId: MUUID, parentId: MUUID, changes: ClimbChangeInputType[], sessionCtx?: ClientSession): Promise { + async addOrUpdateClimbs (userId: MUUID, parentId: MUUID, changes: ClimbChangeInputType[], sessionCtx?: ClientSession): Promise { const session = sessionCtx ?? await this.areaModel.startSession() if (session.inTransaction()) { - return this._addOrUpdateClimbs(userId, session, parentId, changes) + return await this._addOrUpdateClimbs(userId, session, parentId, changes) } else { - return await withTransaction(session, () => this._addOrUpdateClimbs(userId, session, parentId, changes)) ?? [] + return await withTransaction(session, async () => await this._addOrUpdateClimbs(userId, session, parentId, changes)) ?? [] } } @@ -272,7 +272,7 @@ export default class MutableClimbDataSource extends ClimbDataSource { * @param idListStr Array of climb IDs * @returns number of climbs was deleted */ - async deleteClimbs(userId: MUUID, parentId: MUUID, idList: MUUID[]): Promise { + async deleteClimbs (userId: MUUID, parentId: MUUID, idList: MUUID[]): Promise { const session = await this.areaModel.startSession() let ret = 0 @@ -289,20 +289,20 @@ export default class MutableClimbDataSource extends ClimbDataSource { } // Remove climb IDs from parent.climbs[] await this.areaModel.updateOne( - {'metadata.area_id': parentId}, + { 'metadata.area_id': parentId }, { - $pullAll: {climbs: idList}, + $pullAll: { climbs: idList }, $set: { _change, updatedBy: userId } }, - {session}) + { session }) // Mark climbs delete const filter = { - _id: {$in: idList}, - _deleting: {$exists: false} + _id: { $in: idList }, + _deleting: { $exists: false } } const rs = await this.climbModel.updateMany( filter, @@ -325,7 +325,7 @@ export default class MutableClimbDataSource extends ClimbDataSource { static instance: MutableClimbDataSource - static getInstance(): MutableClimbDataSource { + static getInstance (): MutableClimbDataSource { if (MutableClimbDataSource.instance == null) { // Why suppress TS error? See: https://github.com/GraphQLGuide/apollo-datasource-mongodb/issues/88 // @ts-expect-error @@ -346,10 +346,10 @@ const updateStats = async (areaIdOrAreaCursor: MUUID | AreaDocumnent, session: C if ((areaIdOrAreaCursor as AreaDocumnent).totalClimbs != null) { area = areaIdOrAreaCursor as AreaDocumnent } else { - area = await getAreaModel().findOne({'metadata.area_id': areaIdOrAreaCursor as MUUID}).session(session).orFail() + area = await getAreaModel().findOne({ 'metadata.area_id': areaIdOrAreaCursor as MUUID }).session(session).orFail() } - await area.populate<{ climbs: ClimbType[] }>({path: 'climbs', model: getClimbModel()}) + await area.populate<{ climbs: ClimbType[] }>({ path: 'climbs', model: getClimbModel() }) area.set({ totalClimbs: area.climbs.length, aggregate: aggregateCragStats(area.toObject()) diff --git a/src/model/__tests__/AreaHistoryDataSource.ts b/src/model/__tests__/AreaHistoryDataSource.ts index 57eaa8c..e4fbaf6 100644 --- a/src/model/__tests__/AreaHistoryDataSource.ts +++ b/src/model/__tests__/AreaHistoryDataSource.ts @@ -1,11 +1,11 @@ import muuid from 'uuid-mongodb' import MutableAreaDataSource from '../MutableAreaDataSource.js' -import {changelogDataSource} from '../ChangeLogDataSource.js' -import {OperationType} from '../../db/AreaTypes.js' -import inMemoryDB from "../../utils/inMemoryDB.js"; -import waitForExpect from "wait-for-expect"; -import jest from "jest-mock"; +import { changelogDataSource } from '../ChangeLogDataSource.js' +import { OperationType } from '../../db/AreaTypes.js' +import inMemoryDB from '../../utils/inMemoryDB.js' +import waitForExpect from 'wait-for-expect' +import jest from 'jest-mock' describe('Area history', () => { let areas: MutableAreaDataSource diff --git a/src/model/__tests__/ChangeLogDS.ts b/src/model/__tests__/ChangeLogDS.ts index 6cf4e36..e722867 100644 --- a/src/model/__tests__/ChangeLogDS.ts +++ b/src/model/__tests__/ChangeLogDS.ts @@ -1,11 +1,11 @@ import muuid from 'uuid-mongodb' -import {getAreaModel, getChangeLogModel} from '../../db/index.js' +import { getAreaModel, getChangeLogModel } from '../../db/index.js' import ChangeLogDataSource from '../ChangeLogDataSource.js' -import {OpType} from '../../db/ChangeLogType.js' -import {OperationType} from '../../db/AreaTypes.js' +import { OpType } from '../../db/ChangeLogType.js' +import { OperationType } from '../../db/AreaTypes.js' -import {logger} from '../../logger.js' -import inMemoryDB from "../../utils/inMemoryDB.js"; +import { logger } from '../../logger.js' +import inMemoryDB from '../../utils/inMemoryDB.js' describe('Area history', () => { let changeLog: ChangeLogDataSource diff --git a/src/model/__tests__/MediaDataSource.ts b/src/model/__tests__/MediaDataSource.ts index 05f05fe..afe3489 100644 --- a/src/model/__tests__/MediaDataSource.ts +++ b/src/model/__tests__/MediaDataSource.ts @@ -1,11 +1,11 @@ import mongoose from 'mongoose' -import muuid, {MUUID} from 'uuid-mongodb' +import muuid, { MUUID } from 'uuid-mongodb' import MutableMediaDataSource from '../MutableMediaDataSource.js' import AreaDataSource from '../MutableAreaDataSource.js' import ClimbDataSource from '../MutableClimbDataSource.js' -import {createIndexes} from '../../db/index.js' -import {AreaType} from '../../db/AreaTypes.js' +import { createIndexes } from '../../db/index.js' +import { AreaType } from '../../db/AreaTypes.js' import { AddTagEntityInput, EntityTag, @@ -14,8 +14,8 @@ import { UserMedia, UserMediaQueryInput } from '../../db/MediaObjectTypes.js' -import {newSportClimb1} from './MutableClimbDataSource.js' -import inMemoryDB from "../../utils/inMemoryDB.js"; +import { newSportClimb1 } from './MutableClimbDataSource.js' +import inMemoryDB from '../../utils/inMemoryDB.js' const TEST_MEDIA: MediaObjectGQLInput = { userUuid: 'a2eb6353-65d1-445f-912c-53c6301404bd', @@ -141,7 +141,7 @@ describe('MediaDataSource', () => { expect(mediaObjects[0].entityTags).toHaveLength(2) // remove tag - const res = await media.removeEntityTag({mediaId: climbTag.mediaId, tagId: tag._id}) + const res = await media.removeEntityTag({ mediaId: climbTag.mediaId, tagId: tag._id }) expect(res).toBe(true) // verify the number tags @@ -202,7 +202,7 @@ describe('MediaDataSource', () => { const rs = await media.addMediaObjects([{ ...TEST_MEDIA, mediaUrl: 'photo101.jpg', - entityTag: {entityType: 0, entityId: climbIdForTagging.toUUID().toString()} + entityTag: { entityType: 0, entityId: climbIdForTagging.toUUID().toString() } } ]) @@ -222,7 +222,7 @@ describe('MediaDataSource', () => { */ const newMediaListInput: MediaObjectGQLInput[] = [] for (let i = 0; i < 7; i = i + 1) { - newMediaListInput.push({...MEDIA_TEMPLATE, mediaUrl: `/photo${i}.jpg`}) + newMediaListInput.push({ ...MEDIA_TEMPLATE, mediaUrl: `/photo${i}.jpg` }) } const expectedMedia = await media.addMediaObjects(newMediaListInput) diff --git a/src/model/__tests__/MutableClimbDataSource.ts b/src/model/__tests__/MutableClimbDataSource.ts index ad1bf83..9fa8abe 100644 --- a/src/model/__tests__/MutableClimbDataSource.ts +++ b/src/model/__tests__/MutableClimbDataSource.ts @@ -1,16 +1,16 @@ import muid from 'uuid-mongodb' -import {ChangeStream} from 'mongodb' +import { ChangeStream } from 'mongodb' import MutableClimbDataSource from '../MutableClimbDataSource.js' import MutableAreaDataSource from '../MutableAreaDataSource.js' -import {createIndexes, getAreaModel, getClimbModel} from '../../db/index.js' -import {logger} from '../../logger.js' -import {ClimbChangeInputType, ClimbType} from '../../db/ClimbTypes.js' -import {sanitizeDisciplines} from '../../GradeUtils.js' +import { createIndexes, getAreaModel, getClimbModel } from '../../db/index.js' +import { logger } from '../../logger.js' +import { ClimbChangeInputType, ClimbType } from '../../db/ClimbTypes.js' +import { sanitizeDisciplines } from '../../GradeUtils.js' import streamListener from '../../db/edit/streamListener.js' -import {changelogDataSource} from '../ChangeLogDataSource.js' -import inMemoryDB from "../../utils/inMemoryDB.js"; +import { changelogDataSource } from '../ChangeLogDataSource.js' +import inMemoryDB from '../../utils/inMemoryDB.js' export const newSportClimb1: ClimbChangeInputType = { name: 'Cool route 1', @@ -122,16 +122,16 @@ describe('Climb CRUD', () => { pitches: [ { pitchNumber: 1, - grades: {uiaa: '7'}, - disciplines: {sport: true}, + grades: { uiaa: '7' }, + disciplines: { sport: true }, length: 30, boltsCount: 5, description: 'First pitch description' }, { pitchNumber: 2, - grades: {uiaa: '6+'}, - disciplines: {sport: true}, + grades: { uiaa: '6+' }, + disciplines: { sport: true }, length: 40, boltsCount: 6, description: 'Second pitch description' @@ -293,13 +293,13 @@ describe('Climb CRUD', () => { const newIDs = await climbs.addOrUpdateClimbs( testUser, newBoulderingArea.metadata.area_id, - [{...newBoulderProblem1, grade: 'V3'}, // good grade - {...newBoulderProblem2, grade: '5.9'}]) // invalid grade (YDS grade for a boulder problem) + [{ ...newBoulderProblem1, grade: 'V3' }, // good grade + { ...newBoulderProblem2, grade: '5.9' }]) // invalid grade (YDS grade for a boulder problem) expect(newIDs).toHaveLength(2) const climb1 = await climbs.findOneClimbByMUUID(muid.from(newIDs[0])) - expect(climb1?.grades).toEqual({vscale: 'V3'}) + expect(climb1?.grades).toEqual({ vscale: 'V3' }) const climb2 = await climbs.findOneClimbByMUUID(muid.from(newIDs[1])) expect(climb2?.grades).toEqual(undefined) @@ -314,11 +314,11 @@ describe('Climb CRUD', () => { if (newClimbingArea == null) fail('Expect new area to be created') const newclimbs = [ - {...newSportClimb1, grade: '17'}, // good sport grade - {...newSportClimb2, grade: '29/30', disciplines: {trad: true}}, // good trad and slash grade - {...newSportClimb2, grade: '5.9'}, // bad AU context grade - {...newIceRoute, grade: 'WI4+'}, // good WI AU context grade - {...newAidRoute, grade: 'A0'} // good aid grade + { ...newSportClimb1, grade: '17' }, // good sport grade + { ...newSportClimb2, grade: '29/30', disciplines: { trad: true } }, // good trad and slash grade + { ...newSportClimb2, grade: '5.9' }, // bad AU context grade + { ...newIceRoute, grade: 'WI4+' }, // good WI AU context grade + { ...newAidRoute, grade: 'A0' } // good aid grade ] const newIDs = await climbs.addOrUpdateClimbs( @@ -329,12 +329,12 @@ describe('Climb CRUD', () => { expect(newIDs).toHaveLength(newclimbs.length) const climb1 = await climbs.findOneClimbByMUUID(muid.from(newIDs[0])) - expect(climb1?.grades).toEqual({ewbank: '17'}) + expect(climb1?.grades).toEqual({ ewbank: '17' }) expect(climb1?.type.sport).toBe(true) expect(newSportClimb1?.boltsCount).toEqual(2) const climb2 = await climbs.findOneClimbByMUUID(muid.from(newIDs[1])) - expect(climb2?.grades).toEqual({ewbank: '29/30'}) + expect(climb2?.grades).toEqual({ ewbank: '29/30' }) expect(climb2?.type.sport).toBe(false) expect(climb2?.type.trad).toBe(true) @@ -342,14 +342,14 @@ describe('Climb CRUD', () => { expect(climb3?.grades).toEqual(undefined) const climb4 = await climbs.findOneClimbByMUUID(muid.from(newIDs[3])) - expect(climb4?.grades).toEqual({wi: 'WI4+'}) + expect(climb4?.grades).toEqual({ wi: 'WI4+' }) expect(climb4?.type.sport).toBe(false) expect(climb4?.type.trad).toBe(false) expect(climb4?.type.bouldering).toBe(false) expect(climb4?.type.ice).toBe(true) const climb5 = await climbs.findOneClimbByMUUID(muid.from(newIDs[4])) - expect(climb5?.grades).toEqual({aid: 'A0'}) + expect(climb5?.grades).toEqual({ aid: 'A0' }) expect(climb5?.type.sport).toBe(false) expect(climb5?.type.trad).toBe(false) expect(climb5?.type.aid).toBe(true) @@ -363,14 +363,14 @@ describe('Climb CRUD', () => { const newIDs = await climbs.addOrUpdateClimbs( testUser, newBoulderingArea.metadata.area_id, - [{...newBoulderProblem1, grade: 'V3'}, // good grade - {...newBoulderProblem2, grade: '23'}, // bad boulder grade - {...newBoulderProblem2, grade: '7B'}]) // invalid grade (font grade for a AU context boulder problem) + [{ ...newBoulderProblem1, grade: 'V3' }, // good grade + { ...newBoulderProblem2, grade: '23' }, // bad boulder grade + { ...newBoulderProblem2, grade: '7B' }]) // invalid grade (font grade for a AU context boulder problem) expect(newIDs).toHaveLength(3) const climb1 = await climbs.findOneClimbByMUUID(muid.from(newIDs[0])) - expect(climb1?.grades).toEqual({vscale: 'V3'}) + expect(climb1?.grades).toEqual({ vscale: 'V3' }) const climb2 = await climbs.findOneClimbByMUUID(muid.from(newIDs[1])) expect(climb2?.grades).toEqual(undefined) @@ -389,11 +389,11 @@ describe('Climb CRUD', () => { if (newClimbingArea == null) fail('Expect new area to be created in Brazil') const newclimbs = [ - {...newSportClimb1, grade: 'VIsup'}, // good sport grade - {...newSportClimb2, grade: 'VIsup/VIIa', disciplines: {trad: true}}, // good trad and slash grade - {...newSportClimb2, grade: '5.9'}, // bad BRZ context grade - {...newIceRoute, grade: 'WI4+'}, // good WI BRZ context grade - {...newAidRoute, grade: 'A0'} // good aid grade + { ...newSportClimb1, grade: 'VIsup' }, // good sport grade + { ...newSportClimb2, grade: 'VIsup/VIIa', disciplines: { trad: true } }, // good trad and slash grade + { ...newSportClimb2, grade: '5.9' }, // bad BRZ context grade + { ...newIceRoute, grade: 'WI4+' }, // good WI BRZ context grade + { ...newAidRoute, grade: 'A0' } // good aid grade ] const newIDs = await climbs.addOrUpdateClimbs( @@ -404,12 +404,12 @@ describe('Climb CRUD', () => { expect(newIDs).toHaveLength(newclimbs.length) const climb1 = await climbs.findOneClimbByMUUID(muid.from(newIDs[0])) - expect(climb1?.grades).toEqual({brazilian_crux: 'VIsup'}) + expect(climb1?.grades).toEqual({ brazilian_crux: 'VIsup' }) expect(climb1?.type.sport).toBe(true) expect(newSportClimb1?.boltsCount).toEqual(2) const climb2 = await climbs.findOneClimbByMUUID(muid.from(newIDs[1])) - expect(climb2?.grades).toEqual({brazilian_crux: 'VIsup/VIIa'}) + expect(climb2?.grades).toEqual({ brazilian_crux: 'VIsup/VIIa' }) expect(climb2?.type.sport).toBe(false) expect(climb2?.type.trad).toBe(true) @@ -417,14 +417,14 @@ describe('Climb CRUD', () => { expect(climb3?.grades).toEqual(undefined) const climb4 = await climbs.findOneClimbByMUUID(muid.from(newIDs[3])) - expect(climb4?.grades).toEqual({wi: 'WI4+'}) + expect(climb4?.grades).toEqual({ wi: 'WI4+' }) expect(climb4?.type.sport).toBe(false) expect(climb4?.type.trad).toBe(false) expect(climb4?.type.bouldering).toBe(false) expect(climb4?.type.ice).toBe(true) const climb5 = await climbs.findOneClimbByMUUID(muid.from(newIDs[4])) - expect(climb5?.grades).toEqual({aid: 'A0'}) + expect(climb5?.grades).toEqual({ aid: 'A0' }) expect(climb5?.type.sport).toBe(false) expect(climb5?.type.trad).toBe(false) expect(climb5?.type.aid).toBe(true) @@ -438,14 +438,14 @@ describe('Climb CRUD', () => { const newIDs = await climbs.addOrUpdateClimbs( testUser, newBoulderingArea.metadata.area_id, - [{...newBoulderProblem1, grade: 'V3'}, // good grade - {...newBoulderProblem2, grade: '23'}, // bad boulder grade - {...newBoulderProblem2, grade: '7B'}]) // invalid grade (font grade for a BRZ context boulder problem) + [{ ...newBoulderProblem1, grade: 'V3' }, // good grade + { ...newBoulderProblem2, grade: '23' }, // bad boulder grade + { ...newBoulderProblem2, grade: '7B' }]) // invalid grade (font grade for a BRZ context boulder problem) expect(newIDs).toHaveLength(3) const climb1 = await climbs.findOneClimbByMUUID(muid.from(newIDs[0])) - expect(climb1?.grades).toEqual({vscale: 'V3'}) + expect(climb1?.grades).toEqual({ vscale: 'V3' }) const climb2 = await climbs.findOneClimbByMUUID(muid.from(newIDs[1])) expect(climb2?.grades).toEqual(undefined) @@ -465,21 +465,21 @@ describe('Climb CRUD', () => { const newIDs = await climbs.addOrUpdateClimbs( testUser, newClimbingArea.metadata.area_id, - [{...newSportClimb1, grade: '6+'}, // good UIAA grade - {...newSportClimb2, grade: '7-'}, // good UIAA grade - {...newSportClimb2, grade: '5'}, // good UIAA grade - {...newSportClimb1, grade: 'V6'}]) // bad UIAA grade (V-scale used) + [{ ...newSportClimb1, grade: '6+' }, // good UIAA grade + { ...newSportClimb2, grade: '7-' }, // good UIAA grade + { ...newSportClimb2, grade: '5' }, // good UIAA grade + { ...newSportClimb1, grade: 'V6' }]) // bad UIAA grade (V-scale used) expect(newIDs).toHaveLength(4) const climb1 = await climbs.findOneClimbByMUUID(muid.from(newIDs[0])) - expect(climb1?.grades).toEqual({uiaa: '6+'}) + expect(climb1?.grades).toEqual({ uiaa: '6+' }) const climb2 = await climbs.findOneClimbByMUUID(muid.from(newIDs[1])) - expect(climb2?.grades).toEqual({uiaa: '7-'}) + expect(climb2?.grades).toEqual({ uiaa: '7-' }) const climb3 = await climbs.findOneClimbByMUUID(muid.from(newIDs[2])) - expect(climb3?.grades).toEqual({uiaa: '5'}) + expect(climb3?.grades).toEqual({ uiaa: '5' }) const climb4 = await climbs.findOneClimbByMUUID(muid.from(newIDs[3])) expect(climb4?.grades).toEqual(undefined) @@ -510,7 +510,7 @@ describe('Climb CRUD', () => { id: newIDs[0], name: 'new name A100', grade: '6b', - disciplines: sanitizeDisciplines({bouldering: true}) + disciplines: sanitizeDisciplines({ bouldering: true }) }, { id: newIDs[1], @@ -652,8 +652,8 @@ describe('Climb CRUD', () => { id: originalPitch1ID, parentId: originalPitch1ParentID, pitchNumber: 1, - grades: {ewbank: '19'}, - disciplines: {sport: false, alpine: true}, + grades: { ewbank: '19' }, + disciplines: { sport: false, alpine: true }, length: 20, boltsCount: 6, description: 'Updated first pitch description' @@ -663,8 +663,8 @@ describe('Climb CRUD', () => { id: originalPitch2ID, parentId: originalPitch2ParentID, pitchNumber: 2, - grades: {ewbank: '18'}, - disciplines: {sport: false, alpine: true}, + grades: { ewbank: '18' }, + disciplines: { sport: false, alpine: true }, length: 25, boltsCount: 5, description: 'Updated second pitch description' diff --git a/src/model/__tests__/MutableOrganizationDataSource.ts b/src/model/__tests__/MutableOrganizationDataSource.ts index f951e5d..651198b 100644 --- a/src/model/__tests__/MutableOrganizationDataSource.ts +++ b/src/model/__tests__/MutableOrganizationDataSource.ts @@ -2,11 +2,11 @@ import muuid from 'uuid-mongodb' import MutableOrganizationDataSource from '../MutableOrganizationDataSource.js' import MutableAreaDataSource from '../MutableAreaDataSource.js' -import {createIndexes, getAreaModel, getOrganizationModel} from '../../db/index.js' -import {OrganizationEditableFieldsType, OrgType} from '../../db/OrganizationTypes.js' -import {AreaType} from '../../db/AreaTypes.js' -import {muuidToString} from '../../utils/helpers.js' -import inMemoryDB from "../../utils/inMemoryDB.js"; +import { createIndexes, getAreaModel, getOrganizationModel } from '../../db/index.js' +import { OrganizationEditableFieldsType, OrgType } from '../../db/OrganizationTypes.js' +import { AreaType } from '../../db/AreaTypes.js' +import { muuidToString } from '../../utils/helpers.js' +import inMemoryDB from '../../utils/inMemoryDB.js' describe('Organization', () => { let organizations: MutableOrganizationDataSource @@ -62,7 +62,7 @@ describe('Organization', () => { it('should successfully create a document when passed valid input', async () => { const newOrg = await organizations.addOrganization(testUser, OrgType.localClimbingOrganization, fullOrg) - const document = {...fullOrg} + const document = { ...fullOrg } expect(newOrg.displayName).toBe(document.displayName) expect(newOrg.content?.website).toBe(document.website) expect(newOrg.content?.email).toBe(document.email) @@ -99,7 +99,7 @@ describe('Organization', () => { associatedAreaIds: [ca.metadata.area_id, wa.metadata.area_id] } await organizations.updateOrganization(testUser, newOrg.orgId, document) - const areaIdSearchCursor = await organizations.findOrganizationsByFilter({associatedAreaIds: {includes: [ca.metadata.area_id]}}) + const areaIdSearchCursor = await organizations.findOrganizationsByFilter({ associatedAreaIds: { includes: [ca.metadata.area_id] } }) const areaIdSearchRes = await areaIdSearchCursor.toArray() expect(areaIdSearchRes).toHaveLength(1) expect(areaIdSearchRes[0]._id).toEqual(newOrg._id) @@ -108,7 +108,7 @@ describe('Organization', () => { describe('update', () => { it('should succeed on valid input', async () => { const newOrg = await organizations.addOrganization(testUser, OrgType.localClimbingOrganization, emptyOrg) - const document = {...fullOrg} + const document = { ...fullOrg } const updatedOrg = await organizations.updateOrganization(testUser, newOrg.orgId, document) expect(updatedOrg).toBeDefined() @@ -116,9 +116,9 @@ describe('Organization', () => { fail('should not reach here.') } expect(updatedOrg.associatedAreaIds.map(muuidToString).sort()) - .toStrictEqual(document?.associatedAreaIds?.map(muuidToString).sort()) + .toStrictEqual(document?.associatedAreaIds?.map(muuidToString).sort()) expect(updatedOrg.excludedAreaIds.map(muuidToString).sort()) - .toStrictEqual(document?.excludedAreaIds?.map(muuidToString).sort()) + .toStrictEqual(document?.excludedAreaIds?.map(muuidToString).sort()) expect(updatedOrg.displayName).toBe(document.displayName) expect(updatedOrg.content?.website).toBe(document.website) expect(updatedOrg.content?.email).toBe(document.email) @@ -139,8 +139,8 @@ describe('Organization', () => { associatedAreaIds: [muuid.v4()] } await expect(organizations.updateOrganization(testUser, newOrg.orgId, document)) - .rejects - .toThrow(/Organization update error. Reason: Associated areas not found: /) + .rejects + .toThrow(/Organization update error. Reason: Associated areas not found: /) }) }) }) diff --git a/src/model/__tests__/UserDataSource.ts b/src/model/__tests__/UserDataSource.ts index e460ac4..04ff694 100644 --- a/src/model/__tests__/UserDataSource.ts +++ b/src/model/__tests__/UserDataSource.ts @@ -1,11 +1,11 @@ import mongoose from 'mongoose' import muuid from 'uuid-mongodb' -import {jest} from '@jest/globals' +import { jest } from '@jest/globals' -import {getUserModel} from '../../db/index.js' +import { getUserModel } from '../../db/index.js' import UserDataSource from '../UserDataSource.js' -import {UpdateProfileGQLInput} from '../../db/UserTypes.js' -import inMemoryDB from "../../utils/inMemoryDB.js"; +import { UpdateProfileGQLInput } from '../../db/UserTypes.js' +import inMemoryDB from '../../utils/inMemoryDB.js' describe('UserDataSource', () => { let users: UserDataSource @@ -86,7 +86,7 @@ describe('UserDataSource', () => { expect(u2?._id.toUUID().toString()).toEqual(input.userUuid) // should allow website as an empty string to clear existing value - await users.createOrUpdateUserProfile(updater, {userUuid: input.userUuid, website: ''}) + await users.createOrUpdateUserProfile(updater, { userUuid: input.userUuid, website: '' }) u2 = await users.getUserPublicProfile(username) @@ -144,8 +144,8 @@ describe('UserDataSource', () => { await users.createOrUpdateUserProfile(updater, input) jest - .spyOn(UserDataSource, 'calculateLastUpdatedInDays') - .mockImplementation(() => 14) + .spyOn(UserDataSource, 'calculateLastUpdatedInDays') + .mockImplementation(() => 14) const newInput: UpdateProfileGQLInput = { userUuid: input.userUuid, diff --git a/src/model/__tests__/ticks.ts b/src/model/__tests__/ticks.ts index 7da08d5..7c934b6 100644 --- a/src/model/__tests__/ticks.ts +++ b/src/model/__tests__/ticks.ts @@ -1,11 +1,11 @@ -import {produce} from 'immer' +import { produce } from 'immer' import TickDataSource from '../TickDataSource.js' -import {getTickModel, getUserModel} from '../../db/index.js' -import {TickInput} from '../../db/TickTypes.js' +import { getTickModel, getUserModel } from '../../db/index.js' +import { TickInput } from '../../db/TickTypes.js' import muuid from 'uuid-mongodb' import UserDataSource from '../UserDataSource.js' -import {UpdateProfileGQLInput} from '../../db/UserTypes.js' -import inMemoryDB from "../../utils/inMemoryDB.js"; +import { UpdateProfileGQLInput } from '../../db/UserTypes.js' +import inMemoryDB from '../../utils/inMemoryDB.js' const userId = muuid.v4() @@ -76,7 +76,7 @@ describe('Ticks', () => { // test adding tick it('should create a new tick for the associated climb', async () => { const tick = await ticks.addTick(toTest) - const newTick = await tickModel.findOne({userId: toTest.userId}) + const newTick = await tickModel.findOne({ userId: toTest.userId }) expect(newTick?._id).toEqual(tick._id) }) @@ -87,7 +87,7 @@ describe('Ticks', () => { if (tick == null) { fail('Tick should not be null') } - const newTick = await ticks.editTick({_id: tick._id}, tickUpdate) + const newTick = await ticks.editTick({ _id: tick._id }, tickUpdate) if (newTick == null) { fail('The new tick should not be null') @@ -106,7 +106,7 @@ describe('Ticks', () => { } await ticks.deleteTick(tick._id) - const newTick = await tickModel.findOne({_id: tick._id}) + const newTick = await tickModel.findOne({ _id: tick._id }) expect(newTick).toBeNull() }) @@ -120,13 +120,13 @@ describe('Ticks', () => { } expect(newTicks?.length).toEqual(testImport.length) - const tick1 = await tickModel.findOne({_id: newTicks[0]._id}) + const tick1 = await tickModel.findOne({ _id: newTicks[0]._id }) expect(tick1?._id).toEqual(newTicks[0]._id) - const tick2 = await tickModel.findOne({_id: newTicks[1]._id}) + const tick2 = await tickModel.findOne({ _id: newTicks[1]._id }) expect(tick2?._id).toEqual(newTicks[1]._id) - const tick3 = await tickModel.findOne({_id: newTicks[2]._id}) + const tick3 = await tickModel.findOne({ _id: newTicks[2]._id }) expect(tick3?._id).toEqual(newTicks[2]._id) }) @@ -143,7 +143,7 @@ describe('Ticks', () => { fail('Should add a new tick') } - const newTicks = await ticks.ticksByUser({userId}) + const newTicks = await ticks.ticksByUser({ userId }) expect(newTicks.length).toEqual(1) }) @@ -168,7 +168,7 @@ describe('Ticks', () => { } await ticks.deleteAllTicks(userId.toUUID().toString()) - const newTick = await tickModel.findOne({userId}) + const newTick = await tickModel.findOne({ userId }) expect(newTick).toBeNull() }) @@ -181,7 +181,7 @@ describe('Ticks', () => { } await ticks.deleteImportedTicks(userId.toUUID().toString()) - const newTick = await tickModel.findOne({_id: OBTick._id}) + const newTick = await tickModel.findOne({ _id: OBTick._id }) expect(newTick?._id).toEqual(OBTick._id) expect(newTick?.notes).toEqual('Not sandbagged') }) diff --git a/src/model/__tests__/updateAreas.ts b/src/model/__tests__/updateAreas.ts index e5a1d61..347bc52 100644 --- a/src/model/__tests__/updateAreas.ts +++ b/src/model/__tests__/updateAreas.ts @@ -1,11 +1,11 @@ import muuid from 'uuid-mongodb' -import {geometry} from '@turf/helpers' +import { geometry } from '@turf/helpers' import MutableAreaDataSource from '../MutableAreaDataSource.js' import MutableClimbDataSource from '../MutableClimbDataSource.js' -import {createIndexes, getAreaModel, getClimbModel} from '../../db/index.js' -import {AreaEditableFieldsType, UpdateSortingOrderType} from '../../db/AreaTypes.js' -import inMemoryDB from "../../utils/inMemoryDB.js"; +import { createIndexes, getAreaModel, getClimbModel } from '../../db/index.js' +import { AreaEditableFieldsType, UpdateSortingOrderType } from '../../db/AreaTypes.js' +import inMemoryDB from '../../utils/inMemoryDB.js' describe('Areas', () => { let areas: MutableAreaDataSource @@ -71,17 +71,17 @@ describe('Areas', () => { // Verify paths and ancestors if (theBug != null) { // make TS happy expect(theBug.ancestors) - .toEqual(`${canada.metadata.area_id.toUUID().toString()},${theBug?.metadata.area_id.toUUID().toString()}`) + .toEqual(`${canada.metadata.area_id.toUUID().toString()},${theBug?.metadata.area_id.toUUID().toString()}`) expect(theBug.pathTokens) - .toEqual([canada.area_name, theBug.area_name]) + .toEqual([canada.area_name, theBug.area_name]) } }) it('should allow adding child areas to empty leaf area', async () => { let parent = await areas.addArea(testUser, 'My house', null, 'can') - await areas.updateArea(testUser, parent.metadata.area_id, {isLeaf: true, isBoulder: true}) + await areas.updateArea(testUser, parent.metadata.area_id, { isLeaf: true, isBoulder: true }) - const newClimb = await climbs.addOrUpdateClimbs(testUser, parent.metadata.area_id, [{name: 'Big Mac'}]) + const newClimb = await climbs.addOrUpdateClimbs(testUser, parent.metadata.area_id, [{ name: 'Big Mac' }]) // Try to add a new area when there's already a climb await expect(areas.addArea(testUser, 'Kitchen', parent.metadata.area_id)).rejects.toThrow(/Adding new areas to a leaf or boulder area is not allowed/) @@ -155,12 +155,12 @@ describe('Areas', () => { it('should not update country name and code', async () => { const country = await areas.addCountry('lao') if (country == null) fail() - await expect(areas.updateArea(testUser, country.metadata.area_id, {areaName: 'Foo'})).rejects.toThrowError() + await expect(areas.updateArea(testUser, country.metadata.area_id, { areaName: 'Foo' })).rejects.toThrowError() // eslint-disable-next-line await new Promise(res => setTimeout(res, 2000)) - await expect(areas.updateArea(testUser, country.metadata.area_id, {shortCode: 'Foo'})).rejects.toThrowError() + await expect(areas.updateArea(testUser, country.metadata.area_id, { shortCode: 'Foo' })).rejects.toThrowError() }) it('should delete a subarea', async () => { @@ -231,23 +231,23 @@ describe('Areas', () => { const fr = await areas.addCountry('fra') await areas.addArea(testUser, 'Verdon Gorge', fr.metadata.area_id) await expect(areas.addArea(testUser, 'Verdon Gorge', fr.metadata.area_id)) - .rejects.toThrowError('E11000 duplicate key error') + .rejects.toThrowError('E11000 duplicate key error') }) it('should fail when adding without a parent country', async () => { await expect(areas.addArea(testUser, 'Peak District ', null, 'GB')) - .rejects.toThrowError() + .rejects.toThrowError() }) it('should fail when adding with a non-existent parent id', async () => { const notInDb = muuid.from('abf6cb8b-8461-45c3-b46b-5997444be867') await expect(areas.addArea(testUser, 'Land\'s End ', notInDb)) - .rejects.toThrowError() + .rejects.toThrowError() }) it('should fail when adding with null parents', async () => { await expect(areas.addArea(testUser, 'Land\'s End ', null, '1q1')) - .rejects.toThrowError() + .rejects.toThrowError() }) it('should update areas sorting order', async () => { diff --git a/src/server.ts b/src/server.ts index be73772..c029bf1 100644 --- a/src/server.ts +++ b/src/server.ts @@ -1,29 +1,29 @@ -import {ApolloServer} from 'apollo-server-express' +import { ApolloServer } from 'apollo-server-express' import mongoose from 'mongoose' -import {applyMiddleware} from 'graphql-middleware' -import {graphqlSchema} from './graphql/resolvers.js' +import { applyMiddleware } from 'graphql-middleware' +import { graphqlSchema } from './graphql/resolvers.js' import MutableAreaDataSource from './model/MutableAreaDataSource.js' import ChangeLogDataSource from './model/ChangeLogDataSource.js' import MutableMediaDataSource from './model/MutableMediaDataSource.js' import MutableClimbDataSource from './model/MutableClimbDataSource.js' import TickDataSource from './model/TickDataSource.js' -import {authMiddleware, createContext} from './auth/middleware.js' +import { authMiddleware, createContext } from './auth/middleware.js' import permissions from './auth/permissions.js' -import {localDevBypassAuthContext, localDevBypassAuthMiddleware} from './auth/local-dev/middleware.js' +import { localDevBypassAuthContext, localDevBypassAuthMiddleware } from './auth/local-dev/middleware.js' import localDevBypassAuthPermissions from './auth/local-dev/permissions.js' import XMediaDataSource from './model/XMediaDataSource.js' import PostDataSource from './model/PostDataSource.js' import MutableOrgDS from './model/MutableOrganizationDataSource.js' -import type {Context} from './types.js' -import type {DataSources} from 'apollo-server-core/dist/graphqlOptions' +import type { Context } from './types.js' +import type { DataSources } from 'apollo-server-core/dist/graphqlOptions' import UserDataSource from './model/UserDataSource.js' import express from 'express' import * as http from 'http' import bodyParser from 'body-parser' -import {importJsonRequestHandler} from './db/import/json/request-handler.js' +import { importJsonRequestHandler } from './db/import/json/request-handler.js' -export async function createServer(): Promise<{ app: express.Application, server: ApolloServer }> { +export async function createServer (): Promise<{ app: express.Application, server: ApolloServer }> { const schema = applyMiddleware( graphqlSchema, (process.env.LOCAL_DEV_BYPASS_AUTH === 'true' ? localDevBypassAuthPermissions : permissions).generate(graphqlSchema) @@ -61,16 +61,16 @@ export async function createServer(): Promise<{ app: express.Application, server bodyParser.json(), importJsonRequestHandler ]) - server.applyMiddleware({app, path: '/'}) + server.applyMiddleware({ app, path: '/' }) - return {app, server} + return { app, server } } -export async function startServer({app, server, port = 4000}: { - app: express.Application, - server: ApolloServer, +export async function startServer ({ app, server, port = 4000 }: { + app: express.Application + server: ApolloServer port?: number -}) { +}): Promise { const httpServer = http.createServer(app) httpServer.on('error', (e) => { @@ -78,6 +78,6 @@ export async function startServer({app, server, port = 4000}: { throw e }) - await new Promise((resolve) => httpServer.listen({port}, resolve)) + await new Promise((resolve) => httpServer.listen({ port }, resolve)) console.log(`🚀 Server ready at http://localhost:${port}${server.graphqlPath}`) -} \ No newline at end of file +} diff --git a/src/utils/helpers.ts b/src/utils/helpers.ts index bee0c6a..9ed4f04 100644 --- a/src/utils/helpers.ts +++ b/src/utils/helpers.ts @@ -1,6 +1,6 @@ -import {MUUID} from 'uuid-mongodb' -import {Point} from '@turf/helpers' -import {ClientSession} from "mongoose"; +import { MUUID } from 'uuid-mongodb' +import { Point } from '@turf/helpers' +import { ClientSession } from 'mongoose' export const muuidToString = (m: MUUID): string => m.toUUID().toString() @@ -16,7 +16,7 @@ export const muuidToString = (m: MUUID): string => m.toUUID().toString() * } * @param _value */ -export function exhaustiveCheck(_value: never): never { +export function exhaustiveCheck (_value: never): never { throw new Error(`ERROR! Enum not handled for ${JSON.stringify(_value)}`) } @@ -26,14 +26,13 @@ export const geojsonPointToLatitude = (point?: Point): number | undefined => poi export const NON_ALPHANUMERIC_REGEX = /[\W_\s]+/g export const canonicalizeUsername = (username: string): string => username.replaceAll(NON_ALPHANUMERIC_REGEX, '') - // withTransaction() doesn't return the callback result // see https://jira.mongodb.org/browse/NODE-2014 export const withTransaction = async (session: ClientSession, closure: () => Promise): Promise => { - let result: T | undefined; + let result: T | undefined await session.withTransaction(async () => { - result = await closure(); - return result; - }); - return result; -}; \ No newline at end of file + result = await closure() + return result + }) + return result +} diff --git a/src/utils/inMemoryDB.ts b/src/utils/inMemoryDB.ts index a779055..ed9e623 100644 --- a/src/utils/inMemoryDB.ts +++ b/src/utils/inMemoryDB.ts @@ -1,9 +1,9 @@ -import mongoose, {ConnectOptions} from 'mongoose' -import {ChangeStream, ChangeStreamDocument, MongoClient} from 'mongodb' -import {MongoMemoryReplSet} from 'mongodb-memory-server' -import {checkVar, defaultPostConnect} from '../db/index.js' -import {logger} from '../logger.js' -import {testStreamListener} from "../db/edit/streamListener"; +import mongoose, { ConnectOptions } from 'mongoose' +import { ChangeStream, ChangeStreamDocument, MongoClient } from 'mongodb' +import { MongoMemoryReplSet } from 'mongodb-memory-server' +import { checkVar, defaultPostConnect } from '../db/index.js' +import { logger } from '../logger.js' +import { testStreamListener } from '../db/edit/streamListener' /** * In-memory Mongo replset used for testing. @@ -19,7 +19,7 @@ let stream: ChangeStream | undefined export const connect = async (onChange?: (change: ChangeStreamDocument) => void): Promise => { mongod = await MongoMemoryReplSet.create({ // Stream listener listens on DB denoted by 'MONGO_DBNAME' env var. - replSet: {count: 1, storageEngine: 'wiredTiger', dbName: checkVar('MONGO_DBNAME')} + replSet: { count: 1, storageEngine: 'wiredTiger', dbName: checkVar('MONGO_DBNAME') } }) const uri = await mongod.getUri(checkVar('MONGO_DBNAME')) logger.info(`Connecting to in-memory database ${uri}`) @@ -28,7 +28,7 @@ export const connect = async (onChange?: (change: ChangeStreamDocument) => void) } await mongoose.connect(uri, mongooseOpts) - stream = await defaultPostConnect(() => testStreamListener(onChange)) + stream = await defaultPostConnect(async () => await testStreamListener(onChange)) } /** @@ -80,4 +80,4 @@ export interface InMemoryDB { insertDirectly: (collection: string, documents: any[]) => Promise } -export default {connect, close, clear, insertDirectly, stream} +export default { connect, close, clear, insertDirectly, stream } diff --git a/src/utils/testUtils.ts b/src/utils/testUtils.ts index 582de40..6f6957c 100644 --- a/src/utils/testUtils.ts +++ b/src/utils/testUtils.ts @@ -1,11 +1,11 @@ import jwt from 'jsonwebtoken' -import {jest} from '@jest/globals' +import { jest } from '@jest/globals' import request from 'supertest' -import type {InMemoryDB} from './inMemoryDB.js' +import type { InMemoryDB } from './inMemoryDB.js' import inMemoryDB from './inMemoryDB.js' -import {createServer} from '../server.js' -import {ApolloServer} from 'apollo-server-express' -import express from "express"; +import { createServer } from '../server.js' +import { ApolloServer } from 'apollo-server-express' +import express from 'express' const PORT = 4000 @@ -24,14 +24,14 @@ interface QueryAPIProps { * so we can pretend to have an role we want when calling the API. */ export const queryAPI = async ({ - query, - operationName, - variables, - userUuid, - roles = [], - app, - port = PORT - }: QueryAPIProps): Promise => { + query, + operationName, + variables, + userUuid, + roles = [], + app, + port = PORT +}: QueryAPIProps): Promise => { // Avoid needing to pass in actual signed tokens. const jwtSpy = jest.spyOn(jwt, 'verify') jwtSpy.mockImplementation(() => { @@ -42,11 +42,11 @@ export const queryAPI = async ({ } }) - const queryObj = {query, operationName, variables} - return request(app ?? `http://localhost:${port}`) - .post('/') - .send(queryObj) - .set('Authorization', 'Bearer placeholder-jwt-see-SpyOn'); + const queryObj = { query, operationName, variables } + return await request(app ?? `http://localhost:${port}`) + .post('/') + .send(queryObj) + .set('Authorization', 'Bearer placeholder-jwt-see-SpyOn') } export interface SetUpServerReturnType { @@ -60,6 +60,6 @@ export interface SetUpServerReturnType { */ export const setUpServer = async (): Promise => { await inMemoryDB.connect() - const {app, server} = await createServer() - return {app, server, inMemoryDB} + const { app, server } = await createServer() + return { app, server, inMemoryDB } }