diff --git a/.run/Template Jest.run.xml b/.run/Template Jest.run.xml
new file mode 100644
index 00000000..a9f73fe3
--- /dev/null
+++ b/.run/Template Jest.run.xml
@@ -0,0 +1,10 @@
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.vscode/launch.json b/.vscode/launch.json
index 2c7343b1..f242b393 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -15,7 +15,7 @@
],
"skipFiles": [
"/**"
- ],
+ ]
},
{
"type": "node",
@@ -28,7 +28,7 @@
],
"skipFiles": [
"/**"
- ],
+ ]
},
{
"type": "node",
@@ -41,7 +41,7 @@
],
"skipFiles": [
"/**"
- ],
+ ]
},
{
"type": "node",
@@ -55,7 +55,7 @@
"outFiles": [
"${workspaceFolder}/build/**/*.js"
],
- "console": "integratedTerminal",
+ "console": "integratedTerminal"
},
{
"type": "node",
@@ -90,7 +90,26 @@
"history"
],
"console": "integratedTerminal",
- "internalConsoleOptions": "neverOpen",
+ "internalConsoleOptions": "neverOpen"
+ },
+ {
+ "type": "node",
+ "name": "vscode-jest-tests.v2",
+ "request": "launch",
+ "env": {
+ "NODE_OPTIONS": "--experimental-vm-modules"
+ },
+ "args": [
+ "${workspaceRoot}/node_modules/.bin/jest",
+ "--runInBand",
+ "--watchAll=false",
+ "--testNamePattern",
+ "${jest.testNamePattern}",
+ "--runTestsByPath",
+ "${jest.testFile}"
+ ],
+ "console": "integratedTerminal",
+ "internalConsoleOptions": "neverOpen"
}
]
}
\ No newline at end of file
diff --git a/.vscode/settings.json b/.vscode/settings.json
index dacfb92f..541a13ce 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -16,4 +16,4 @@
"javascript.format.enable": false,
"javascript.format.semicolons": "remove",
"typescript.format.enable": false
-}
+}
\ No newline at end of file
diff --git a/package.json b/package.json
index 419476ea..e13d96b6 100644
--- a/package.json
+++ b/package.json
@@ -21,7 +21,8 @@
"supertest": "^6.3.3",
"ts-jest": "^29.0.5",
"ts-standard": "^12.0.0",
- "typescript": "4.9.5"
+ "typescript": "4.9.5",
+ "wait-for-expect": "^3.0.2"
},
"dependencies": {
"@babel/runtime": "^7.17.2",
@@ -36,15 +37,18 @@
"@turf/helpers": "^6.5.0",
"@types/uuid": "^8.3.3",
"apollo-datasource-mongodb": "^0.5.4",
- "apollo-server": "^3.9.0",
+ "apollo-server-core": "^3.13.0",
+ "apollo-server-express": "^3.13.0",
"auth0": "^3.4.0",
"axios": "^1.3.6",
+ "body-parser": "^1.20.2",
"cors": "^2.8.5",
"date-fns": "^2.30.0",
"dot-object": "^2.1.4",
"dotenv": "^10.0.0",
+ "express": "^4.18.2",
"glob": "^10.2.2",
- "graphql": "^16.5.0",
+ "graphql": "^16.8.1",
"graphql-middleware": "^6.1.31",
"graphql-shield": "^7.5.0",
"i18n-iso-countries": "^7.5.0",
@@ -104,4 +108,4 @@
"engines": {
"node": ">=16.14.0"
}
-}
\ No newline at end of file
+}
diff --git a/src/__tests__/areas.ts b/src/__tests__/areas.ts
index 6a9c64e9..995b3ccc 100644
--- a/src/__tests__/areas.ts
+++ b/src/__tests__/areas.ts
@@ -1,12 +1,14 @@
-import { ApolloServer } from 'apollo-server'
+import { ApolloServer } from 'apollo-server-express'
import muuid from 'uuid-mongodb'
import { jest } from '@jest/globals'
import MutableAreaDataSource from '../model/MutableAreaDataSource.js'
import MutableOrganizationDataSource from '../model/MutableOrganizationDataSource.js'
import { AreaType } from '../db/AreaTypes.js'
-import { OrgType, OrganizationType, OrganizationEditableFieldsType } from '../db/OrganizationTypes.js'
+import { OrganizationEditableFieldsType, OrganizationType, OrgType } from '../db/OrganizationTypes.js'
import { queryAPI, setUpServer } from '../utils/testUtils.js'
import { muuidToString } from '../utils/helpers.js'
+import { InMemoryDB } from '../utils/inMemoryDB.js'
+import express from 'express'
jest.setTimeout(60000)
@@ -14,7 +16,8 @@ describe('areas API', () => {
let server: ApolloServer
let user: muuid.MUUID
let userUuid: string
- let inMemoryDB
+ let app: express.Application
+ let inMemoryDB: InMemoryDB
// Mongoose models for mocking pre-existing state.
let areas: MutableAreaDataSource
@@ -24,7 +27,7 @@ describe('areas API', () => {
let wa: AreaType
beforeAll(async () => {
- ({ server, inMemoryDB } = await setUpServer())
+ ({ server, inMemoryDB, app } = await setUpServer())
// Auth0 serializes uuids in "relaxed" mode, resulting in this hex string format
// "59f1d95a-627d-4b8c-91b9-389c7424cb54" instead of base64 "WfHZWmJ9S4yRuTicdCTLVA==".
user = muuid.mode('relaxed').v4()
@@ -77,7 +80,8 @@ describe('areas API', () => {
query: areaQuery,
operationName: 'area',
variables: { input: wa.metadata.area_id },
- userUuid
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
@@ -92,7 +96,8 @@ describe('areas API', () => {
query: areaQuery,
operationName: 'area',
variables: { input: ca.metadata.area_id },
- userUuid
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
const areaResult = response.body.data.area
diff --git a/src/__tests__/bulkImport.test.ts b/src/__tests__/bulkImport.test.ts
new file mode 100644
index 00000000..b67aca3e
--- /dev/null
+++ b/src/__tests__/bulkImport.test.ts
@@ -0,0 +1,137 @@
+import {ApolloServer} from "apollo-server-express";
+import muuid from "uuid-mongodb";
+import express from "express";
+import {InMemoryDB} from "../utils/inMemoryDB.js";
+import {queryAPI, setUpServer} from "../utils/testUtils.js";
+import {muuidToString} from "../utils/helpers.js";
+import exampleImportData from './import-example.json' assert {type: 'json'};
+import {AreaType} from "../db/AreaTypes.js";
+import {BulkImportResultType} from "../db/BulkImportTypes.js";
+import MutableClimbDataSource from "../model/MutableClimbDataSource.js";
+import BulkImportDataSource from "../model/BulkImportDataSource.js";
+
+describe('bulkImportAreas', () => {
+ const query = `
+ mutation bulkImportAreas($input: BulkImportInput!) {
+ bulkImportAreas(input: $input) {
+ addedAreas {
+ uuid
+ metadata {
+ area_id
+ }
+ }
+ updatedAreas {
+ uuid
+ metadata {
+ area_id
+ }
+ }
+ addedOrUpdatedClimbs {
+ id
+ }
+ }
+ }
+ `
+
+ let server: ApolloServer
+ let user: muuid.MUUID
+ let userUuid: string
+ let app: express.Application
+ let inMemoryDB: InMemoryDB
+ let testArea: AreaType
+
+ let bulkImport: BulkImportDataSource
+ let climbs: MutableClimbDataSource
+
+ beforeAll(async () => {
+ ({server, inMemoryDB, app} = await setUpServer())
+ // Auth0 serializes uuids in "relaxed" mode, resulting in this hex string format
+ // "59f1d95a-627d-4b8c-91b9-389c7424cb54" instead of base64 "WfHZWmJ9S4yRuTicdCTLVA==".
+ user = muuid.mode('relaxed').v4()
+ userUuid = muuidToString(user)
+
+ bulkImport = BulkImportDataSource.getInstance()
+ climbs = MutableClimbDataSource.getInstance()
+ })
+
+ beforeEach(async () => {
+ await inMemoryDB.clear()
+ await bulkImport.addCountry('usa')
+ testArea = await bulkImport.addArea(user, "Test Area", null, "us")
+ })
+
+ afterAll(async () => {
+ await server.stop()
+ await inMemoryDB.close()
+ })
+
+ it('should return 403 if no user', async () => {
+ const res = await queryAPI({
+ app,
+ query,
+ operationName: 'bulkImportAreas',
+ variables: {input: exampleImportData}
+ })
+ expect(res.statusCode).toBe(200)
+ expect(res.body.errors[0].message).toBe('Not Authorised!')
+ })
+
+ it('should return 403 if user is not an editor', async () => {
+ const res = await queryAPI({
+ app,
+ userUuid,
+ query,
+ operationName: 'bulkImportAreas',
+ variables: {input: exampleImportData}
+ })
+ expect(res.statusCode).toBe(200)
+ expect(res.body.errors[0].message).toBe('Not Authorised!')
+ })
+
+ it('should return 200 if user is an editor', async () => {
+ const res = await queryAPI({
+ app,
+ userUuid,
+ roles: ['editor'],
+ query,
+ operationName: 'bulkImportAreas',
+ variables: {input: exampleImportData}
+ })
+ expect(res.status).toBe(200)
+ })
+
+ it('should import data', async () => {
+ const res = await queryAPI({
+ app,
+ userUuid,
+ roles: ['editor'],
+ query,
+ operationName: 'bulkImportAreas',
+ variables: {
+ input: {
+ areas: [
+ ...exampleImportData.areas,
+ {
+ uuid: testArea.metadata.area_id,
+ areaName: "Updated Test Area",
+ }
+ ]
+ }
+ }
+ });
+ expect(res.body.errors).toBeFalsy()
+
+ const result = res.body.data.bulkImportAreas as BulkImportResultType
+ expect(result.addedAreas.length).toBe(4)
+
+ const committedAreas = await Promise.all(result.addedAreas.map((area) => bulkImport.findOneAreaByUUID(muuid.from(area.metadata.area_id))));
+ expect(committedAreas.length).toBe(4);
+
+ const committedClimbs = await Promise.all(result.addedOrUpdatedClimbs.map((climb) => climbs.findOneClimbByMUUID(climb._id)));
+ expect(committedClimbs.length).toBe(2);
+
+ const updatedAreas = await Promise.all(result.updatedAreas.map((area) => bulkImport.findOneAreaByUUID(muuid.from(area.metadata.area_id))));
+ expect(updatedAreas.length).toBe(1);
+ expect(updatedAreas[0].area_name).toBe("Updated Test Area");
+ })
+});
\ No newline at end of file
diff --git a/src/__tests__/history.ts b/src/__tests__/history.ts
index d33e218e..d784ea3f 100644
--- a/src/__tests__/history.ts
+++ b/src/__tests__/history.ts
@@ -1,13 +1,15 @@
-import { ApolloServer } from 'apollo-server'
+import { ApolloServer } from 'apollo-server-express'
import muuid from 'uuid-mongodb'
import { jest } from '@jest/globals'
import MutableAreaDataSource from '../model/MutableAreaDataSource.js'
import MutableOrganizationDataSource from '../model/MutableOrganizationDataSource.js'
import MutableClimbDataSource from '../model/MutableClimbDataSource.js'
import { AreaType } from '../db/AreaTypes.js'
-import { OrgType, OrganizationType } from '../db/OrganizationTypes.js'
+import { OrganizationType, OrgType } from '../db/OrganizationTypes.js'
import { muuidToString } from '../utils/helpers.js'
import { queryAPI, setUpServer } from '../utils/testUtils.js'
+import { InMemoryDB } from '../utils/inMemoryDB.js'
+import express from 'express'
jest.setTimeout(60000)
@@ -15,7 +17,8 @@ describe('history API', () => {
let server: ApolloServer
let user: muuid.MUUID
let userUuid: string
- let inMemoryDB
+ let app: express.Application
+ let inMemoryDB: InMemoryDB
// Mongoose models for mocking pre-existing state.
let areas: MutableAreaDataSource
@@ -23,7 +26,7 @@ describe('history API', () => {
let climbs: MutableClimbDataSource
beforeAll(async () => {
- ({ server, inMemoryDB } = await setUpServer())
+ ({ server, inMemoryDB, app } = await setUpServer())
// Auth0 serializes uuids in "relaxed" mode, resulting in this hex string format
// "59f1d95a-627d-4b8c-91b9-389c7424cb54" instead of base64 "WfHZWmJ9S4yRuTicdCTLVA==".
user = muuid.mode('relaxed').v4()
@@ -107,7 +110,8 @@ describe('history API', () => {
const resp = await queryAPI({
query: QUERY_RECENT_CHANGE_HISTORY,
variables: { filter: {} },
- userUuid
+ userUuid,
+ app
})
expect(resp.statusCode).toBe(200)
const histories = resp.body.data.getChangeHistory
diff --git a/src/__tests__/import-example.json b/src/__tests__/import-example.json
new file mode 100644
index 00000000..bf7fec94
--- /dev/null
+++ b/src/__tests__/import-example.json
@@ -0,0 +1,79 @@
+{
+ "areas": [
+ {
+ "areaName": "Utah",
+ "countryCode": "us",
+ "children": [
+ {
+ "areaName": "Southeast Utah",
+ "children": [
+ {
+ "areaName": "Indian Creek",
+ "description": "Indian Creek is a crack climbing mecca in the southeastern region of Utah, USA. Located within the [Bears Ears National Monument](https://en.wikipedia.org/wiki/Bears_Ears_National_Monument).",
+ "lng": -109.5724044642857,
+ "lat": 38.069429035714286,
+ "children": [
+ {
+ "areaName": "Supercrack Buttress",
+ "gradeContext": "US",
+ "description": "",
+ "lng": -109.54552,
+ "lat": 38.03635,
+ "bbox": [
+ -109.54609091005857,
+ 38.03590033981814,
+ -109.54494908994141,
+ 38.03679966018186
+ ],
+ "climbs": [
+ {
+ "name": "The Key Flake",
+ "grade": "5.10",
+ "fa": "unknown",
+ "disciplines": {
+ "trad": true
+ },
+ "safety": "UNSPECIFIED",
+ "lng": -109.54552,
+ "lat": 38.03635,
+ "leftRightIndex": 1,
+ "description": "Cool off-width that requires off-width and face skills.",
+ "protection": "Anchors hidden up top. Need 80m to make it all the way down.",
+ "location": "Opposite keyhole flake. Obvious right leaning offwidth that starts atop 20 ft boulder."
+ },
+ {
+ "name": "Incredible Hand Crack",
+ "grade": "5.10",
+ "fa": "Rich Perch, John Bragg, Doug Snively, and Anne Tarver, 1978",
+ "disciplines": {
+ "trad": true
+ },
+ "leftRightIndex": 2,
+ "description": "Route starts at the top of the trail from the parking lot to Supercrack Buttress.",
+ "protection": "Cams from 2-2.5\". Heavy on 2.5\" (#2 Camalot)",
+ "pitches": [
+ {
+ "pitchNumber": 1,
+ "grade": "5.10",
+ "length": 100,
+ "boltsCount": 0,
+ "description": "A classic hand crack that widens slightly towards the top. Requires a range of cam sizes. Sustained and excellent quality."
+ },
+ {
+ "pitchNumber": 2,
+ "grade": "5.9",
+ "length": 30,
+ "description": "Easier climbing with good protection. Features a mix of crack sizes. Shorter than the first pitch but equally enjoyable."
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ }
+ ]
+}
\ No newline at end of file
diff --git a/src/__tests__/organizations.ts b/src/__tests__/organizations.ts
index 448df19b..39ac723a 100644
--- a/src/__tests__/organizations.ts
+++ b/src/__tests__/organizations.ts
@@ -1,22 +1,22 @@
-import { ApolloServer } from 'apollo-server'
+import { ApolloServer } from 'apollo-server-express'
import muuid from 'uuid-mongodb'
-import { jest } from '@jest/globals'
import MutableAreaDataSource from '../model/MutableAreaDataSource.js'
import MutableOrganizationDataSource from '../model/MutableOrganizationDataSource.js'
import { AreaType } from '../db/AreaTypes.js'
-import { OrgType, OrganizationType, OperationType, OrganizationEditableFieldsType } from '../db/OrganizationTypes.js'
+import { OperationType, OrganizationEditableFieldsType, OrganizationType, OrgType } from '../db/OrganizationTypes.js'
import { changelogDataSource } from '../model/ChangeLogDataSource.js'
import { queryAPI, setUpServer } from '../utils/testUtils.js'
import { muuidToString } from '../utils/helpers.js'
import { validate as validateMuuid } from 'uuid'
-
-jest.setTimeout(60000)
+import { InMemoryDB } from '../utils/inMemoryDB.js'
+import express from 'express'
describe('organizations API', () => {
let server: ApolloServer
let user: muuid.MUUID
let userUuid: string
- let inMemoryDB
+ let app: express.Application
+ let inMemoryDB: InMemoryDB
// Mongoose models for mocking pre-existing state.
let areas: MutableAreaDataSource
@@ -26,7 +26,7 @@ describe('organizations API', () => {
let wa: AreaType
beforeAll(async () => {
- ({ server, inMemoryDB } = await setUpServer())
+ ({ server, inMemoryDB, app } = await setUpServer())
// Auth0 serializes uuids in "relaxed" mode, resulting in this hex string format
// "59f1d95a-627d-4b8c-91b9-389c7424cb54" instead of base64 "WfHZWmJ9S4yRuTicdCTLVA==".
user = muuid.mode('relaxed').v4()
@@ -43,8 +43,8 @@ describe('organizations API', () => {
})
afterAll(async () => {
- await server.stop()
- await inMemoryDB.close()
+ await server?.stop()
+ await inMemoryDB?.close()
})
describe('mutations', () => {
@@ -87,7 +87,8 @@ describe('organizations API', () => {
operationName: 'addOrganization',
variables: { input: { displayName: 'Friends of Openbeta', orgType: 'LOCAL_CLIMBING_ORGANIZATION' } },
userUuid,
- roles: ['user_admin']
+ roles: ['user_admin'],
+ app
})
expect(createResponse.statusCode).toBe(200)
@@ -120,7 +121,8 @@ describe('organizations API', () => {
}
},
userUuid,
- roles: ['user_admin']
+ roles: ['user_admin'],
+ app
})
expect(updateResponse.statusCode).toBe(200)
expect(updateResponse.body.errors).toBeUndefined()
@@ -165,7 +167,8 @@ describe('organizations API', () => {
operationName: 'addOrganization',
variables: { input: { displayName: 'Friends of Openbeta', orgType: 'LOCAL_CLIMBING_ORGANIZATION' } },
userUuid,
- roles: ['editor']
+ roles: ['editor'],
+ app
})
expect(response.statusCode).toBe(200)
expect(response.body.data.organization).toBeNull()
@@ -251,7 +254,8 @@ describe('organizations API', () => {
query: organizationQuery,
operationName: 'organization',
variables: { input: muuidToString(alphaOrg.orgId) },
- userUuid
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
const orgResult = response.body.data.organization
@@ -269,7 +273,8 @@ describe('organizations API', () => {
query: organizationsQuery,
operationName: 'organizations',
variables: { filter: { displayName: { match: 'Delta OpenBeta Club', exactMatch: true } } },
- userUuid
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
@@ -283,7 +288,8 @@ describe('organizations API', () => {
query: organizationsQuery,
operationName: 'organizations',
variables: { filter: { displayName: { match: 'delta', exactMatch: false } } },
- userUuid
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
const dataResult = response.body.data.organizations
@@ -298,7 +304,8 @@ describe('organizations API', () => {
variables: {
limit: 1
},
- userUuid
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
const dataResult = response.body.data.organizations
@@ -310,7 +317,8 @@ describe('organizations API', () => {
query: organizationsQuery,
operationName: 'organizations',
variables: { filter: { associatedAreaIds: { includes: [muuidToString(ca.metadata.area_id)] } } },
- userUuid
+ userUuid,
+ app
})
// Graphql should convert `includes` from a string[] to MUUID[]
expect(response.statusCode).toBe(200)
@@ -324,7 +332,8 @@ describe('organizations API', () => {
query: organizationsQuery,
operationName: 'organizations',
variables: { filter: { excludedAreaIds: { excludes: [muuidToString(wa.metadata.area_id)] } } },
- userUuid
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
const dataResult = response.body.data.organizations
diff --git a/src/__tests__/ticks.ts b/src/__tests__/ticks.ts
index 1db3e2b2..60e87413 100644
--- a/src/__tests__/ticks.ts
+++ b/src/__tests__/ticks.ts
@@ -1,4 +1,4 @@
-import { ApolloServer } from 'apollo-server'
+import { ApolloServer } from 'apollo-server-express'
import muuid from 'uuid-mongodb'
import { jest } from '@jest/globals'
import { queryAPI, setUpServer } from '../utils/testUtils.js'
@@ -7,14 +7,17 @@ import { TickInput } from '../db/TickTypes.js'
import TickDataSource from '../model/TickDataSource.js'
import UserDataSource from '../model/UserDataSource.js'
import { UpdateProfileGQLInput } from '../db/UserTypes.js'
+import { InMemoryDB } from '../utils/inMemoryDB.js'
+import express from 'express'
-jest.setTimeout(60000)
+jest.setTimeout(110000)
describe('ticks API', () => {
let server: ApolloServer
let user: muuid.MUUID
let userUuid: string
- let inMemoryDB
+ let app: express.Application
+ let inMemoryDB: InMemoryDB
// Mongoose models for mocking pre-existing state.
let ticks: TickDataSource
@@ -22,7 +25,7 @@ describe('ticks API', () => {
let tickOne: TickInput
beforeAll(async () => {
- ({ server, inMemoryDB } = await setUpServer())
+ ({ server, inMemoryDB, app } = await setUpServer())
user = muuid.v4()
userUuid = muuidToString(user)
@@ -93,7 +96,8 @@ describe('ticks API', () => {
const response = await queryAPI({
query: userQuery,
variables: { userId: userUuid },
- userUuid
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
const res = response.body.data.userTicks
@@ -112,7 +116,8 @@ describe('ticks API', () => {
const response = await queryAPI({
query: userQuery,
variables: { username: 'cat.dog' },
- userUuid
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
const res = response.body.data.userTicks
@@ -125,7 +130,8 @@ describe('ticks API', () => {
const response = await queryAPI({
query: userTickByClimbQuery,
variables: { userId: userUuid, climbId: tickOne.climbId },
- userUuid
+ userUuid,
+ app
})
expect(response.statusCode).toBe(200)
const res = response.body.data.userTicksByClimbId
@@ -172,7 +178,8 @@ describe('ticks API', () => {
query: createQuery,
variables: { input: tickOne },
userUuid,
- roles: ['user_admin']
+ roles: ['user_admin'],
+ app
})
expect(createResponse.statusCode).toBe(200)
@@ -204,7 +211,8 @@ describe('ticks API', () => {
}
},
userUuid,
- roles: ['user_admin']
+ roles: ['user_admin'],
+ app
})
expect(updateResponse.statusCode).toBe(200)
diff --git a/src/auth/local-dev/middleware.ts b/src/auth/local-dev/middleware.ts
index 779066ec..55898df6 100644
--- a/src/auth/local-dev/middleware.ts
+++ b/src/auth/local-dev/middleware.ts
@@ -6,7 +6,7 @@ import muuid, { MUUID } from 'uuid-mongodb'
import { AuthUserType } from '../../types.js'
import { logger } from '../../logger.js'
-export const localDevBypassAuthMiddleware = (() => {
+export const localDevBypassAuthContext = (() => {
const testUUID: MUUID = muuid.v4()
return async ({ req }): Promise => {
diff --git a/src/auth/middleware.ts b/src/auth/middleware.ts
index af81045c..365ca294 100644
--- a/src/auth/middleware.ts
+++ b/src/auth/middleware.ts
@@ -7,31 +7,35 @@ import { logger } from '../logger.js'
* Create a middleware context for Apollo server
*/
export const createContext = async ({ req }): Promise => {
- const { headers } = req
-
- const user: AuthUserType = {
- roles: [],
- uuid: undefined,
- isBuilder: false
+ try {
+ return await validateTokenAndExtractUser(req)
+ } catch (e) {
+ logger.error(`Can't validate token and extract user ${e.toString() as string}`)
+ throw new Error('An unexpected error has occurred. Please notify us at support@openbeta.io.')
}
+}
- const authHeader = String(headers?.authorization ?? '')
- if (authHeader.startsWith('Bearer ')) {
- const token = authHeader.substring(7, authHeader.length).trim()
+async function validateTokenAndExtractUser (req: Request): Promise<{ user: AuthUserType, token: string }> {
+ const { headers } = req
+ // eslint-disable-next-line @typescript-eslint/dot-notation
+ const authHeader = String(headers?.['authorization'] ?? '')
+ if (!authHeader.startsWith('Bearer ')) {
+ throw new Error('Unauthorized. Please provide a valid JWT token in the Authorization header.')
+ }
- let payload
- try {
- payload = await verifyJWT(token)
- } catch (e) {
- logger.error(`Can't verify JWT token ${e.toString() as string}`)
- throw new Error('An unexpected error has occurred. Please notify us at support@openbeta.io.')
+ const token = authHeader.substring(7, authHeader.length).trim()
+ try {
+ const payload = await verifyJWT(token)
+ return {
+ user: {
+ isBuilder: payload?.scope?.includes('builder:default') ?? false,
+ roles: payload?.['https://tacos.openbeta.io/roles'] ?? [],
+ uuid: payload?.['https://tacos.openbeta.io/uuid'] != null ? muid.from(payload['https://tacos.openbeta.io/uuid']) : undefined
+ },
+ token
}
-
- user.isBuilder = payload?.scope?.includes('builder:default') ?? false
- user.roles = payload?.['https://tacos.openbeta.io/roles'] ?? []
- const uidStr: string | undefined = payload?.['https://tacos.openbeta.io/uuid']
- user.uuid = uidStr != null ? muid.from(uidStr) : undefined
+ } catch (e) {
+ logger.error(`Can't verify JWT token ${e.toString() as string}`)
+ throw new Error("Unauthorized. Can't verify JWT token")
}
-
- return { user }
}
diff --git a/src/auth/permissions.ts b/src/auth/permissions.ts
index 59e2925e..4f6dfbb8 100644
--- a/src/auth/permissions.ts
+++ b/src/auth/permissions.ts
@@ -1,5 +1,5 @@
-import { shield, allow, and, or } from 'graphql-shield'
-import { isEditor, isUserAdmin, isOwner, isValidEmail, isMediaOwner } from './rules.js'
+import { allow, and, or, shield } from 'graphql-shield'
+import { isEditor, isMediaOwner, isOwner, isUserAdmin, isValidEmail } from './rules.js'
const permissions = shield({
Query: {
@@ -13,6 +13,7 @@ const permissions = shield({
updateArea: isEditor,
updateClimbs: isEditor,
deleteClimbs: isEditor,
+ bulkImportAreas: isEditor,
updateUserProfile: and(isOwner, isValidEmail),
addEntityTag: or(isMediaOwner, isUserAdmin),
removeEntityTag: or(isMediaOwner, isUserAdmin),
diff --git a/src/auth/rules.ts b/src/auth/rules.ts
index f5ecdafe..6e7a3a21 100644
--- a/src/auth/rules.ts
+++ b/src/auth/rules.ts
@@ -1,4 +1,4 @@
-import { rule, inputRule } from 'graphql-shield'
+import { inputRule, rule } from 'graphql-shield'
import MediaDataSource from '../model/MutableMediaDataSource.js'
import { MediaObjectGQLInput } from '../db/MediaObjectTypes.js'
diff --git a/src/db/AreaTypes.ts b/src/db/AreaTypes.ts
index 51545d8f..d230882a 100644
--- a/src/db/AreaTypes.ts
+++ b/src/db/AreaTypes.ts
@@ -2,11 +2,11 @@ import mongoose from 'mongoose'
import { MUUID } from 'uuid-mongodb'
import { BBox, Point, Polygon } from '@turf/helpers'
-import { ClimbType } from './ClimbTypes.js'
-import { ChangeRecordMetadataType } from './ChangeLogType.js'
import { GradeContexts } from '../GradeUtils.js'
-import { ExperimentalAuthorType } from './UserTypes.js'
import { AuthorMetadata } from '../types.js'
+import { ChangeRecordMetadataType } from './ChangeLogType.js'
+import { ClimbType } from './ClimbTypes.js'
+import { ExperimentalAuthorType } from './UserTypes.js'
export type AreaDocumnent = mongoose.Document & AreaType
@@ -34,7 +34,7 @@ export type AreaType = IAreaProps & {
* they may be hard to locate based on the contents of this object.
* See AreaType for the reified version of this object, and always use it
* if you are working with data that exists inside the database.
-*/
+ */
export interface IAreaProps extends AuthorMetadata {
_id: mongoose.Types.ObjectId
/**
@@ -63,18 +63,22 @@ export interface IAreaProps extends AuthorMetadata {
*/
children: mongoose.Types.ObjectId[]
/**
- * areaNames of this areas parents, traversing up the heirarchy to the root area.
+ * ancestors ids of this areas parents, traversing up the heirarchy to the root area.
* This is encoded as a string, but is really an array delimited by comma.
+ * @see https://www.mongodb.com/docs/manual/tutorial/model-tree-structures-with-materialized-paths/
*/
ancestors: string
- /** UUIDs of this areas parents, traversing up the heirarchy to the root area. */
+ /**
+ * pathTokens names of this areas parents, traversing up the heirarchy to the root area
+ * with the current area being the last element.
+ */
pathTokens: string[]
gradeContext: GradeContexts
/**
* computed aggregations on this document. See the AggregateType documentation for
* more information.
- */
+ */
aggregate: AggregateType
/**
* User-composed content that makes up most of the user-readable data in the system.
@@ -150,6 +154,7 @@ export interface IAreaMetadata {
*/
polygon?: Polygon
}
+
export interface IAreaContent {
/** longform to mediumform description of this area.
* Remembering that areas can be the size of countries, or as precise as a single cliff/boulder,
@@ -186,11 +191,13 @@ export interface CountByGroupType {
count: number
label: string
}
+
export interface AggregateType {
byGrade: CountByGroupType[]
byDiscipline: CountByDisciplineType
byGradeBand: CountByGradeBandType
}
+
export interface CountByDisciplineType {
trad?: DisciplineStatsType
sport?: DisciplineStatsType
@@ -219,7 +226,7 @@ export interface CountByGradeBandType {
}
/** The audit trail comprises a set of controlled events that may occur in relation
- * to user actiion on core data. The enumeration herein defines the set of events
+ * to user action on core data. The enumeration herein defines the set of events
* that may occur, and short documentation of what they mean
*/
export enum OperationType {
@@ -238,7 +245,7 @@ export enum OperationType {
* specific field's boolean state.
*/
updateDestination = 'updateDestination',
- /** signals that a user has pushed new user-changable data has been pushed into an area document. */
+ /** signals that a user has pushed new user-changeable data has been pushed into an area document. */
updateArea = 'updateArea',
/** Set areas' sorting index */
diff --git a/src/db/BulkImportTypes.ts b/src/db/BulkImportTypes.ts
new file mode 100644
index 00000000..a0714196
--- /dev/null
+++ b/src/db/BulkImportTypes.ts
@@ -0,0 +1,57 @@
+import { AreaType } from './AreaTypes.js'
+import { ClimbType, DisciplineType, SafetyType } from './ClimbTypes.js'
+import { MUUID } from 'uuid-mongodb'
+import { ExperimentalAuthorType } from './UserTypes.js'
+
+export interface BulkImportResultType {
+ addedAreas: AreaType[]
+ updatedAreas: AreaType[]
+ addedOrUpdatedClimbs: ClimbType[]
+}
+
+export interface BulkImportInputType {
+ areas: BulkImportAreaInputType[]
+}
+
+export interface BulkImportAreaInputType {
+ uuid?: MUUID
+ areaName?: string
+ description?: string
+ countryCode?: string
+ gradeContext?: string
+ leftRightIndex?: number
+ lng?: number
+ lat?: number
+ bbox?: [number, number, number, number]
+ children?: BulkImportAreaInputType[]
+ climbs?: BulkImportClimbInputType[]
+}
+
+export interface BulkImportClimbInputType {
+ uuid?: MUUID
+ name?: string
+ grade: string
+ disciplines: DisciplineType
+ safety?: SafetyType
+ lng?: number
+ lat?: number
+ leftRightIndex?: number
+ description?: string
+ location?: string
+ protection?: string
+ fa?: string
+ length?: number
+ boltsCount?: number
+ experimentalAuthor?: ExperimentalAuthorType
+ pitches?: BulkImportPitchesInputType[]
+}
+
+export interface BulkImportPitchesInputType {
+ id?: MUUID
+ pitchNumber: number
+ grade: string
+ disciplines?: DisciplineType
+ description?: string
+ length?: number
+ boltsCount?: number
+}
diff --git a/src/db/edit/streamListener.ts b/src/db/edit/streamListener.ts
index aff9b11f..f6310c47 100644
--- a/src/db/edit/streamListener.ts
+++ b/src/db/edit/streamListener.ts
@@ -4,7 +4,14 @@ import dot from 'dot-object'
import { changelogDataSource } from '../../model/ChangeLogDataSource.js'
import { logger } from '../../logger.js'
-import { BaseChangeRecordType, ResumeToken, UpdateDescription, DBOperation, SupportedCollectionTypes, DocumentKind } from '../ChangeLogType.js'
+import {
+ BaseChangeRecordType,
+ DBOperation,
+ DocumentKind,
+ ResumeToken,
+ SupportedCollectionTypes,
+ UpdateDescription
+} from '../ChangeLogType.js'
import { checkVar } from '../index.js'
import { updateAreaIndex, updateClimbIndex } from '../export/Typesense/Client.js'
import { AreaType } from '../AreaTypes.js'
@@ -15,6 +22,22 @@ import { ClimbType } from '../ClimbTypes.js'
* Start a new stream listener to track changes
*/
export default async function streamListener (): Promise {
+ // eslint-disable-next-line @typescript-eslint/no-misused-promises
+ return (await createChangeStream()).on('change', onChange)
+}
+
+/**
+ * The test stream listener awaits all change events
+ */
+export async function testStreamListener (callback?: (change: ChangeStreamDocument) => void): Promise {
+ // eslint-disable-next-line @typescript-eslint/no-misused-promises
+ return (await createChangeStream()).on('change', async (change: ChangeStreamDocument) => {
+ await onChange(change)
+ if (callback != null) callback(change)
+ })
+}
+
+async function createChangeStream (): Promise {
const resumeId = await mostRecentResumeId()
logger.info({ resumeId }, 'Starting stream listener')
@@ -40,11 +63,10 @@ export default async function streamListener (): Promise {
}
}]
- const changeStream = mongoose.connection.watch(pipeline, opts)
- return changeStream.on('change', onChange)
+ return mongoose.connection.watch(pipeline, opts)
}
-const onChange = (change: ChangeStreamDocument): void => {
+const onChange = async (change: ChangeStreamDocument): Promise => {
const { operationType } = change
switch (operationType) {
@@ -57,15 +79,24 @@ const onChange = (change: ChangeStreamDocument): void => {
dbOp = 'delete'
}
- void recordChange({ _id: _id as ResumeToken, source, fullDocument: fullDocument as SupportedCollectionTypes, updateDescription, dbOp })
- break
+ return await recordChange({
+ _id: _id as ResumeToken,
+ source,
+ fullDocument: fullDocument as SupportedCollectionTypes,
+ updateDescription,
+ dbOp
+ })
}
case 'insert': {
const dbOp = 'insert'
const source = DocumentKind[change.ns.coll]
const { fullDocument, _id } = change
- void recordChange({ _id: _id as ResumeToken, source, fullDocument: fullDocument as SupportedCollectionTypes, dbOp })
- break
+ return await recordChange({
+ _id: _id as ResumeToken,
+ source,
+ fullDocument: fullDocument as SupportedCollectionTypes,
+ dbOp
+ })
}
}
}
@@ -89,9 +120,7 @@ const recordChange = async ({ source, dbOp, fullDocument, updateDescription, _id
updateDescription: dotifyUpdateDescription(updateDescription),
kind: DocumentKind.climbs
}
- void changelogDataSource.record(newDocument)
- void updateClimbIndex(fullDocument as ClimbType, dbOp)
- break
+ return await changelogDataSource.record(newDocument).then(async () => await updateClimbIndex(fullDocument as ClimbType, dbOp))
}
case DocumentKind.areas: {
const newDocument: BaseChangeRecordType = {
@@ -101,9 +130,7 @@ const recordChange = async ({ source, dbOp, fullDocument, updateDescription, _id
updateDescription: dotifyUpdateDescription(updateDescription),
kind: DocumentKind.areas
}
- void changelogDataSource.record(newDocument)
- void updateAreaIndex(fullDocument as AreaType, dbOp)
- break
+ return await changelogDataSource.record(newDocument).then(async () => await updateAreaIndex(fullDocument as AreaType, dbOp))
}
case DocumentKind.organizations: {
const newDocument: BaseChangeRecordType = {
@@ -113,8 +140,7 @@ const recordChange = async ({ source, dbOp, fullDocument, updateDescription, _id
updateDescription: dotifyUpdateDescription(updateDescription),
kind: DocumentKind.organizations
}
- void changelogDataSource.record(newDocument)
- break
+ return await changelogDataSource.record(newDocument).then()
}
default:
exhaustiveCheck(source)
diff --git a/src/db/index.ts b/src/db/index.ts
index 25656e8a..d087159f 100644
--- a/src/db/index.ts
+++ b/src/db/index.ts
@@ -82,10 +82,10 @@ export const gracefulExit = async (exitCode: number = 0): Promise => {
})
}
-export const defaultPostConnect = async (): Promise => {
+export const defaultPostConnect = async (changeStreamListener = streamListener): Promise => {
console.log('Kudos!')
await createIndexes()
- return await streamListener()
+ return await changeStreamListener()
}
// eslint-disable-next-line
diff --git a/src/graphql/area/AreaMutations.ts b/src/graphql/area/AreaMutations.ts
index 6c62de5a..b454fda4 100644
--- a/src/graphql/area/AreaMutations.ts
+++ b/src/graphql/area/AreaMutations.ts
@@ -3,6 +3,7 @@ import muuid from 'uuid-mongodb'
import { AreaType } from '../../db/AreaTypes.js'
import { ContextWithAuth } from '../../types.js'
import type MutableAreaDataSource from '../../model/MutableAreaDataSource.js'
+import { BulkImportInputType, BulkImportResultType } from '../../db/BulkImportTypes.js'
const AreaMutations = {
@@ -77,6 +78,19 @@ const AreaMutations = {
user.uuid,
input
)
+ },
+
+ bulkImportAreas: async (_, { input }: { input: BulkImportInputType }, {
+ dataSources,
+ user
+ }: ContextWithAuth): Promise => {
+ const { bulkImport, climbs } = dataSources
+ if (user?.uuid == null) throw new Error('Missing user uuid')
+ return await bulkImport.bulkImport({
+ user: user.uuid,
+ input,
+ climbs
+ })
}
}
diff --git a/src/graphql/common/typeDef.ts b/src/graphql/common/typeDef.ts
index 162e9c94..c52eac6b 100644
--- a/src/graphql/common/typeDef.ts
+++ b/src/graphql/common/typeDef.ts
@@ -1,4 +1,4 @@
-import { gql } from 'apollo-server'
+import { gql } from 'apollo-server-express'
const typeDefs = gql`
scalar Date
diff --git a/src/graphql/organization/OrganizationQueries.ts b/src/graphql/organization/OrganizationQueries.ts
index e7d559cd..4087d3f5 100644
--- a/src/graphql/organization/OrganizationQueries.ts
+++ b/src/graphql/organization/OrganizationQueries.ts
@@ -1,5 +1,5 @@
import type OrganizationDataSource from '../../model/OrganizationDataSource'
-import { QueryByIdType, OrganizationGQLFilter, Sort, Context } from '../../types'
+import { Context, OrganizationGQLFilter, QueryByIdType, Sort } from '../../types'
const OrganizationQueries = {
organization: async (_: any,
@@ -7,7 +7,7 @@ const OrganizationQueries = {
context: Context, info) => {
const { dataSources } = context
const { organizations }: { organizations: OrganizationDataSource } = dataSources
- if (muuid !== undefined) {
+ if (muuid != null) {
return await organizations.findOneOrganizationByOrgId(muuid)
}
return null
diff --git a/src/graphql/resolvers.ts b/src/graphql/resolvers.ts
index f9796a6a..094fafcb 100644
--- a/src/graphql/resolvers.ts
+++ b/src/graphql/resolvers.ts
@@ -1,23 +1,23 @@
import { makeExecutableSchema } from '@graphql-tools/schema'
import muid, { MUUID } from 'uuid-mongodb'
import fs from 'fs'
-import { gql } from 'apollo-server'
+import { gql } from 'apollo-server-express'
import { DocumentNode } from 'graphql'
import { CommonResolvers, CommonTypeDef } from './common/index.js'
-import { HistoryQueries, HistoryFieldResolvers } from '../graphql/history/index.js'
-import { QueryByIdType, GQLFilter, Sort, Context } from '../types'
+import { HistoryFieldResolvers, HistoryQueries } from '../graphql/history/index.js'
+import { Context, GQLFilter, QueryByIdType, Sort } from '../types'
import { AreaType, CountByDisciplineType } from '../db/AreaTypes.js'
import { ClimbGQLQueryType, ClimbType } from '../db/ClimbTypes.js'
import AreaDataSource from '../model/AreaDataSource.js'
import { MediaMutations, MediaQueries, MediaResolvers } from './media/index.js'
import { PostMutations, PostQueries, PostResolvers } from './posts/index.js'
import { XMediaMutations, XMediaQueries, XMediaResolvers } from './xmedia/index.js'
-import { AreaQueries, AreaMutations } from './area/index.js'
+import { AreaMutations, AreaQueries } from './area/index.js'
import { ClimbMutations } from './climb/index.js'
import { OrganizationMutations, OrganizationQueries } from './organization/index.js'
import { TickMutations, TickQueries } from './tick/index.js'
-import { UserQueries, UserMutations, UserResolvers } from './user/index.js'
+import { UserMutations, UserQueries, UserResolvers } from './user/index.js'
import { getAuthorMetadataFromBaseNode } from '../db/utils/index.js'
import { geojsonPointToLatitude, geojsonPointToLongitude } from '../utils/helpers.js'
@@ -74,7 +74,7 @@ const resolvers = {
{ uuid }: QueryByIdType,
{ dataSources }: Context) => {
const { areas } = dataSources
- if (uuid !== undefined && uuid !== '') {
+ if (uuid != null && uuid !== '') {
return await areas.findOneClimbByUUID(muid.from(uuid))
}
return null
@@ -182,7 +182,7 @@ const resolvers = {
})
},
- ancestors: (node: ClimbGQLQueryType) => node.ancestors.split(','),
+ ancestors: (node: ClimbGQLQueryType) => node.ancestors?.split(',') ?? [],
media: async (node: ClimbType, args: any, { dataSources }: Context) => {
const { media } = dataSources
@@ -220,7 +220,7 @@ const resolvers = {
return node.aggregate
},
- ancestors: async (parent) => parent.ancestors.split(','),
+ ancestors: async (parent) => parent.ancestors?.split(',') ?? [],
climbs: async (node: AreaType, _, { dataSources: { areas } }: Context) => {
if ((node?.climbs?.length ?? 0) === 0) {
diff --git a/src/graphql/schema/Area.gql b/src/graphql/schema/Area.gql
index 0add644e..20fbc88e 100644
--- a/src/graphql/schema/Area.gql
+++ b/src/graphql/schema/Area.gql
@@ -49,7 +49,7 @@ type Area {
content: AreaContent
"pathTokens hashed into a single string"
- pathHash: String!
+ pathHash: String
"""
Grade systems have minor variations between countries.
diff --git a/src/graphql/schema/AreaEdit.gql b/src/graphql/schema/AreaEdit.gql
index 3db833d1..53808577 100644
--- a/src/graphql/schema/AreaEdit.gql
+++ b/src/graphql/schema/AreaEdit.gql
@@ -1,6 +1,6 @@
type Mutation {
"""
- Add an area attributes
+ Add an area
"""
addArea(input: AreaInput): Area
@@ -23,6 +23,13 @@ type Mutation {
Update area sorting order in bulk
"""
updateAreasSortingOrder(input: [AreaSortingInput]): [ID]
+
+ """
+ Add or update an area tree in bulk, including climbs (and their pitches).
+ You can start at any point in the tree given a valid parent area with its uuid.
+ If starting at the root level, the `countryCode` must be provided.
+ """
+ bulkImportAreas(input: BulkImportInput): BulkImportResult
}
input DestinationFlagInput {
@@ -44,6 +51,114 @@ input AreaInput {
experimentalAuthor: ExperimentalAuthorType
}
+"""
+Bulk input for adding or updating areas, climbs, and pitches.
+"""
+input BulkImportInput {
+ areas: [BulkImportAreaInput]!
+}
+
+"""
+Bulk input for adding or updating areas.
+Provide either a `uuid` to UPDATE an existing area, or `areaName` to ADD a new area.
+"""
+input BulkImportAreaInput {
+ "The area UUID"
+ uuid: ID
+ "The name of the new area (or, if provided together with a uuid, the updated name of the area)"
+ areaName: String
+ "The name that this area is commonly identified by within the climbing community."
+ description: String
+ "Only relevant for the first level of areas (i. e. USA -> Utah). Must be ISO 3166-1 Alpha-3 country code (e. g. ‘USA’)."
+ countryCode: String
+ "The grading system used for climbing/bouldering in the area. Inherits from parent node if current node does not have one. UIAA = UIAA/font, US = yds/vscale, FR = french/font (see https://github.com/OpenBeta/openbeta-graphql/blob/9c517329db079c922fe7f092a78b658cb295e158/src/GradeUtils.ts#L40.)"
+ gradeContext: String
+ "The sorting index of the area. Defaults to -1 if not provided."
+ leftRightIndex: Int
+ "Longitude coordinate of the area, using the WGS 84 reference system."
+ lng: Float
+ "Latitude coordinate of the area, using the WGS 84 reference system."
+ lat: Float
+ "An optional bounding box that can be displayed on maps, using GeoJSON bbox (see https://datatracker.ietf.org/doc/html/rfc7946#section-5)."
+ bbox: [Float]
+ "A list of child areas. Can be deeply nested."
+ children: [BulkImportAreaInput]
+ """
+ A list of climbs that are directly associated with this area.
+ An area that has climbs cannot have child areas and automatically becomes a leaf node.
+ """
+ climbs: [BulkImportClimbInput]
+}
+
+"""
+Bulk input for adding or updating climbs (and pitches) within an area.
+Either define `uuid` or `name` to indicate whether to add or update a climb.
+Provide a `uuid` to UPDATE a climb, and `name` to ADD a new climb.
+Make sure to update all climbs if the leftRightIndex of a climb is updated.
+"""
+input BulkImportClimbInput {
+ "The climb UUID"
+ uuid: ID
+ "The name that this climb is commonly identified by (or if provided together with a uuid, the updated name of the climb)."
+ name: String
+ "The difficulty grade of the climb. Must be coherent with the area's gradeContext. I. e. gradeContext = 'US' requires denomination in yds/vscale (climbing/bouldering), so '5.11'/'V14', 'FR' would be french/font '9c+'/'9a', 'UIIA' would be uiaa/font '9+'/'9a'. (see https://github.com/OpenBeta/sandbag)."
+ grade: String!
+ "Object of applicable disciplines (e.g. { \"trad\": true }). Options: trad, sport, bouldering, deepwatersolo, alpine, ice, mixed, aid, tr (= toprope). Can be combined."
+ disciplines: DisciplineType!
+ "The safety rating of a climb based on US movie ratings (see https://github.com/OpenBeta/openbeta-graphql/blob/9c517329db079c922fe7f092a78b658cb295e158/src/graphql/schema/Climb.gql#L177)."
+ safety: SafetyEnum
+ "Longitude coordinate of the climb, using the WGS 84 reference system."
+ lng: Float
+ "Latitude coordinate of the climb, using the WGS 84 reference system."
+ lat: Float
+ "A numeric index used for sorting climbs from left to right (of a wall)."
+ leftRightIndex: Int
+ "The description of this climb, this is the main text field for this climb. This contains beta, visual descriptors, and any other information useful to identifying and attempting the climb."
+ description: String
+ "The location of the climb, e.g. 'The first climb on the left, entry directly behind the tree'."
+ location: String
+ "The protection of the climb, e.g. 'Long run out to the first bolt'."
+ protection: String
+ "The first ascent information of the climb. Usually formatted as: name(s) (year)."
+ fa: String
+ "Total length in meters if known (-1 otherwise)"
+ length: Int
+ "The number of bolts (fixed anchors) on the climb."
+ boltsCount: Int
+ "The experimental author of the climb."
+ experimentalAuthor: ExperimentalAuthorType
+ "A list of pitches that are directly associated with this climb (applies only to multi-pitch climbs)."
+ pitches: [BulkImportPitchesInput]
+}
+
+"""
+Bulk input for adding or updating pitches within a climb.
+Provide `id` to UPDATE an existing pitch.
+Make sure to update all pitches if the pitchNumber of one pitch is changed.
+"""
+input BulkImportPitchesInput {
+ "The pitch UUID (if provided, the pitch data will be UPDATED)."
+ id: ID
+ "The number of the pitch in the sequence."
+ pitchNumber: Int!
+ "The difficulty grade of the pitch (see Climb.grade)."
+ grade: String!
+ "The climbing disciplines applicable to the pitch (see Climb.disciplines)."
+ disciplines: DisciplineType
+ "The description of the pitch."
+ description: String
+ "The length of the pitch in meters."
+ length: Int
+ "The number of bolts (fixed anchors) on the pitch."
+ boltsCount: Int
+}
+
+type BulkImportResult {
+ addedAreas: [Area]
+ updatedAreas: [Area]
+ addedOrUpdatedClimbs: [Climb]
+}
+
input RemoveAreaInput {
uuid: String!
}
diff --git a/src/graphql/schema/Climb.gql b/src/graphql/schema/Climb.gql
index 5e73b516..ffa96efd 100644
--- a/src/graphql/schema/Climb.gql
+++ b/src/graphql/schema/Climb.gql
@@ -46,16 +46,16 @@ type Climb {
content: Content!
"""
- Area names traversing up the heirarchy from this climbs immediate
+ Area names traversing up the hierarchy from this climbs immediate
parent to the root area.
"""
- pathTokens: [String!]!
+ pathTokens: [String!]
"""
Area UUIDs traversing up the heirarchy from this climbs immediate
parent to the root area.
"""
- ancestors: [String!]!
+ ancestors: [String!]
"Media associated with this climb"
media: [MediaWithTags]
diff --git a/src/graphql/tick/TickImportTypeDef.ts b/src/graphql/tick/TickImportTypeDef.ts
index a7611bf2..ba702912 100644
--- a/src/graphql/tick/TickImportTypeDef.ts
+++ b/src/graphql/tick/TickImportTypeDef.ts
@@ -1,4 +1,4 @@
-import { gql } from 'apollo-server'
+import { gql } from 'apollo-server-express'
const TickImportTypeDefs = gql`
type TickImport{
diff --git a/src/main.ts b/src/main.ts
index 2af67fe9..04c7a7ea 100644
--- a/src/main.ts
+++ b/src/main.ts
@@ -1,15 +1,5 @@
-import { logger } from './logger.js'
import { connectDB, defaultPostConnect } from './db/index.js'
-import { createServer } from './server.js'
-
-const port = 4000
-const server = await createServer()
+import { createServer, startServer } from './server.js'
await connectDB(defaultPostConnect)
-await server
- .listen({
- port
- })
- .then((): void => {
- logger.info(`🚀 Server ready at http://localhost:${port}`)
- })
+await startServer(await createServer())
diff --git a/src/model/AreaDataSource.ts b/src/model/AreaDataSource.ts
index 2f4f3839..443c2aad 100644
--- a/src/model/AreaDataSource.ts
+++ b/src/model/AreaDataSource.ts
@@ -5,7 +5,16 @@ import bboxPolygon from '@turf/bbox-polygon'
import { getAreaModel, getMediaObjectModel } from '../db/index.js'
import { AreaType } from '../db/AreaTypes'
-import { GQLFilter, AreaFilterParams, PathTokenParams, LeafStatusParams, ComparisonFilterParams, StatisticsType, CragsNear, BBoxType } from '../types'
+import {
+ AreaFilterParams,
+ BBoxType,
+ ComparisonFilterParams,
+ CragsNear,
+ GQLFilter,
+ LeafStatusParams,
+ PathTokenParams,
+ StatisticsType
+} from '../types'
import { getClimbModel } from '../db/ClimbSchema.js'
import { ClimbGQLQueryType } from '../db/ClimbTypes.js'
import { logger } from '../logger.js'
@@ -113,7 +122,7 @@ export default class AreaDataSource extends MongoDataSource {
if (rs != null && rs.length === 1) {
return rs[0]
}
- throw new Error(`Area ${uuid.toUUID().toString()} not found.`)
+ throw new Error(`Area ${uuid.toString()} not found.`)
}
async findManyClimbsByUuids (uuidList: muuid.MUUID[]): Promise {
@@ -152,7 +161,7 @@ export default class AreaDataSource extends MongoDataSource {
{ $unwind: '$parent' }, // Previous stage returns as an array of 1 element. 'unwind' turn it into an object.
{
$set: {
- // create aliases
+ // create aliases
pathTokens: '$parent.pathTokens',
ancestors: '$parent.ancestors'
}
diff --git a/src/model/BulkImportDataSource.ts b/src/model/BulkImportDataSource.ts
new file mode 100644
index 00000000..877888d4
--- /dev/null
+++ b/src/model/BulkImportDataSource.ts
@@ -0,0 +1,190 @@
+import MutableAreaDataSource from './MutableAreaDataSource.js'
+import mongoose, { ClientSession } from 'mongoose'
+import { withTransaction } from '../utils/helpers.js'
+import muuid, { MUUID } from 'uuid-mongodb'
+import { AreaType } from '../db/AreaTypes.js'
+import {
+ BulkImportAreaInputType,
+ BulkImportClimbInputType,
+ BulkImportInputType,
+ BulkImportResultType
+} from '../db/BulkImportTypes.js'
+import MutableClimbDataSource from './MutableClimbDataSource.js'
+import { logger } from '../logger.js'
+import { ClimbChangeInputType, ClimbType } from '../db/ClimbTypes.js'
+
+export interface BulkImportOptions {
+ user: MUUID
+ input: BulkImportInputType
+ session?: mongoose.ClientSession
+ climbs?: MutableClimbDataSource
+}
+
+export default class BulkImportDataSource extends MutableAreaDataSource {
+ /**
+ *
+ * @param json the json to import formatted in a valid database format
+ * @returns a list of ids of the areas that were imported
+ */
+ async bulkImport ({
+ user,
+ input,
+ session: _session,
+ climbs = MutableClimbDataSource.getInstance()
+ }: BulkImportOptions): Promise {
+ const result: BulkImportResultType = {
+ addedAreas: [],
+ updatedAreas: [],
+ addedOrUpdatedClimbs: []
+ }
+ logger.debug('starting bulk import session...')
+ const session = _session ?? (await mongoose.startSession())
+ try {
+ const bulkImportResult = await withTransaction(session, async () => {
+ logger.info('starting bulk import...')
+ logger.debug(input)
+ return await this._bulkImportJson({ user, input, climbs, session })
+ }) ?? result
+ logger.info(`bulk import complete: added ${bulkImportResult.addedAreas.length} new areas, updated ${bulkImportResult.updatedAreas.length} areas, added or updated ${bulkImportResult.addedOrUpdatedClimbs.length} climbs`)
+ logger.debug(bulkImportResult)
+ return bulkImportResult
+ } catch (e) {
+ logger.error('bulk import failed', e)
+ throw e
+ } finally {
+ if (!session.hasEnded) {
+ await session.endSession()
+ }
+ }
+ }
+
+ private async _bulkImportJson ({
+ user,
+ input,
+ session,
+ climbs = MutableClimbDataSource.getInstance()
+ }: BulkImportOptions): Promise {
+ const addOrUpdateArea = async (
+ areaNode: BulkImportAreaInputType,
+ parentUuid?: MUUID
+ ): Promise => {
+ const result: BulkImportResultType = {
+ addedAreas: [],
+ updatedAreas: [],
+ addedOrUpdatedClimbs: []
+ }
+ let area: AreaType | null
+ if (areaNode.uuid != null) {
+ area = await this.updateAreaWith({
+ user,
+ areaUuid: muuid.from(areaNode.uuid),
+ document: {
+ areaName: areaNode.areaName,
+ description: areaNode.description,
+ leftRightIndex: areaNode.leftRightIndex,
+ lng: areaNode.lng,
+ lat: areaNode.lat
+ },
+ session
+ })
+ if (area != null) {
+ result.updatedAreas.push(area)
+ } else {
+ throw new Error(`area with id ${areaNode.uuid.toUUID().toString()} (${areaNode.areaName ?? 'unknown name'}) not found`)
+ }
+ } else if (areaNode.areaName != null) {
+ area = await this.addAreaWith({
+ user,
+ areaName: areaNode.areaName,
+ countryCode: areaNode.countryCode,
+ parentUuid,
+ session
+ }).then(async (area) => {
+ return await this.updateArea(user, area.metadata.area_id, {
+ description: areaNode.description,
+ leftRightIndex: areaNode.leftRightIndex,
+ lng: areaNode.lng,
+ lat: areaNode.lat
+ }, session)
+ })
+ if (area != null) {
+ result.addedAreas.push(area)
+ } else {
+ throw new Error(`failed to add area ${areaNode.areaName} to parent ${parentUuid?.toUUID().toString() ?? 'unknown'}`)
+ }
+ } else {
+ throw new Error('areaName or id is required')
+ }
+ if (areaNode.children != null) {
+ for (const child of areaNode.children) {
+ const childResult = await addOrUpdateArea(child, area.metadata.area_id)
+ result.updatedAreas.push(...childResult.updatedAreas)
+ result.addedAreas.push(...childResult.addedAreas)
+ result.addedOrUpdatedClimbs.push(...childResult.addedOrUpdatedClimbs)
+ }
+ }
+ if (areaNode.climbs != null) {
+ const addedOrUpdatedClimbs = await Promise.all(await climbs?.addOrUpdateClimbsWith({
+ userId: user,
+ parentId: area.metadata.area_id,
+ changes: [...areaNode.climbs.map(this.toClimbChangeInputType) ?? []],
+ session
+ }).then((climbIds) => climbIds
+ .map((id) => climbs?.climbModel.findById(muuid.from(id)).session(session as ClientSession))) ?? [])
+ result.addedOrUpdatedClimbs.push(...addedOrUpdatedClimbs
+ .filter((climb) => climb !== null)
+ .map((climb) => climb as unknown as ClimbType)
+ )
+ }
+ return result
+ }
+
+ const results = await Promise.all(
+ input?.areas.map(async (area) => await addOrUpdateArea(area)) ?? []
+ )
+ return results.reduce((acc, result) => {
+ acc.addedAreas.push(...result.addedAreas)
+ acc.updatedAreas.push(...result.updatedAreas)
+ acc.addedOrUpdatedClimbs.push(...result.addedOrUpdatedClimbs)
+ return acc
+ }, {
+ addedAreas: [],
+ updatedAreas: [],
+ addedOrUpdatedClimbs: []
+ })
+ }
+
+ private toClimbChangeInputType (climb: BulkImportClimbInputType): ClimbChangeInputType {
+ return {
+ id: climb.uuid?.toUUID().toString(),
+ name: climb.name,
+ grade: climb.grade,
+ disciplines: climb.disciplines,
+ leftRightIndex: climb.leftRightIndex,
+ description: climb.description,
+ location: climb.location,
+ protection: climb.protection,
+ fa: climb.fa,
+ length: climb.length,
+ boltsCount: climb.boltsCount,
+ experimentalAuthor: climb.experimentalAuthor,
+ pitches: climb.pitches?.map((pitch) => ({
+ pitchNumber: pitch.pitchNumber,
+ grade: pitch.grade,
+ disciplines: pitch.disciplines,
+ description: pitch.description,
+ length: pitch.length,
+ boltsCount: pitch.boltsCount
+ }))
+ }
+ }
+
+ static instance: BulkImportDataSource
+
+ static getInstance (): BulkImportDataSource {
+ if (BulkImportDataSource.instance == null) {
+ BulkImportDataSource.instance = new BulkImportDataSource(mongoose.connection.db.collection('areas'))
+ }
+ return BulkImportDataSource.instance
+ }
+}
diff --git a/src/model/ChangeLogDataSource.ts b/src/model/ChangeLogDataSource.ts
index 267a6215..30001db5 100644
--- a/src/model/ChangeLogDataSource.ts
+++ b/src/model/ChangeLogDataSource.ts
@@ -3,7 +3,14 @@ import { MongoDataSource } from 'apollo-datasource-mongodb'
import { MUUID } from 'uuid-mongodb'
import { getChangeLogModel } from '../db/index.js'
-import { ChangeLogType, OpType, BaseChangeRecordType, AreaChangeLogType, ClimbChangeLogType, OrganizationChangeLogType } from '../db/ChangeLogType'
+import {
+ AreaChangeLogType,
+ BaseChangeRecordType,
+ ChangeLogType,
+ ClimbChangeLogType,
+ OpType,
+ OrganizationChangeLogType
+} from '../db/ChangeLogType'
import { logger } from '../logger.js'
import { areaHistoryDataSource } from './AreaHistoryDatasource.js'
import { organizationHistoryDataSource } from './OrganizationHistoryDatasource.js'
@@ -70,14 +77,13 @@ export default class ChangeLogDataSource extends MongoDataSource
* @returns change sets
*/
async getChangeSets (uuidList: MUUID[]): Promise> {
- const rs = await this.changeLogModel.aggregate([
+ return await this.changeLogModel.aggregate([
{
$sort: {
createdAt: -1
}
}
]).limit(500)
- return rs
}
async _testRemoveAll (): Promise {
diff --git a/src/model/MutableAreaDataSource.ts b/src/model/MutableAreaDataSource.ts
index fbbd7428..208671fb 100644
--- a/src/model/MutableAreaDataSource.ts
+++ b/src/model/MutableAreaDataSource.ts
@@ -1,30 +1,55 @@
-import { Point, geometry } from '@turf/helpers'
-import muuid, { MUUID } from 'uuid-mongodb'
-import { v5 as uuidv5, NIL } from 'uuid'
-import mongoose, { ClientSession } from 'mongoose'
-import { produce } from 'immer'
-import { UserInputError } from 'apollo-server'
-import isoCountries from 'i18n-iso-countries'
-import enJson from 'i18n-iso-countries/langs/en.json' assert { type: 'json' }
import bbox2Polygon from '@turf/bbox-polygon'
+import { geometry, Point } from '@turf/helpers'
+import { UserInputError } from 'apollo-server-express'
+import isoCountries from 'i18n-iso-countries'
+import enJson from 'i18n-iso-countries/langs/en.json' assert {type: 'json'}
+import { produce } from 'immer'
+import mongoose, { ClientSession } from 'mongoose'
+import { NIL, v5 as uuidv5 } from 'uuid'
+import muuid, { MUUID } from 'uuid-mongodb'
-import { AreaType, AreaDocumnent, AreaEditableFieldsType, OperationType, UpdateSortingOrderType } from '../db/AreaTypes.js'
-import AreaDataSource from './AreaDataSource.js'
-import { createRootNode } from '../db/import/usa/AreaTree.js'
-import { makeDBArea } from '../db/import/usa/AreaTransformer.js'
-import { changelogDataSource } from './ChangeLogDataSource.js'
-import { ChangeRecordMetadataType } from '../db/ChangeLogType.js'
-import CountriesLngLat from '../data/countries-with-lnglat.json' assert { type: 'json' }
-import { logger } from '../logger.js'
import { GradeContexts } from '../GradeUtils.js'
-import { sanitizeStrict } from '../utils/sanitize.js'
+import CountriesLngLat from '../data/countries-with-lnglat.json' assert {type: 'json'}
+import {
+ AreaDocumnent,
+ AreaEditableFieldsType,
+ AreaType,
+ OperationType,
+ UpdateSortingOrderType
+} from '../db/AreaTypes.js'
+import { ChangeRecordMetadataType } from '../db/ChangeLogType.js'
import { ExperimentalAuthorType } from '../db/UserTypes.js'
-import { createInstance as createExperimentalUserDataSource } from '../model/ExperimentalUserDataSource.js'
-import { StatsSummary, leafReducer, nodesReducer } from '../db/utils/jobs/TreeUpdaters/updateAllAreas.js'
+import { makeDBArea } from '../db/import/usa/AreaTransformer.js'
+import { createRootNode } from '../db/import/usa/AreaTree.js'
+import { leafReducer, nodesReducer, StatsSummary } from '../db/utils/jobs/TreeUpdaters/updateAllAreas.js'
import { bboxFrom } from '../geo-utils.js'
+import { logger } from '../logger.js'
+import { createInstance as createExperimentalUserDataSource } from '../model/ExperimentalUserDataSource.js'
+import { sanitizeStrict } from '../utils/sanitize.js'
+import AreaDataSource from './AreaDataSource.js'
+import { changelogDataSource } from './ChangeLogDataSource.js'
+import { withTransaction } from '../utils/helpers.js'
isoCountries.registerLocale(enJson)
+export interface AddAreaOptions {
+ user: MUUID
+ areaName: string
+ parentUuid?: MUUID | null
+ countryCode?: string
+ experimentalAuthor?: ExperimentalAuthorType
+ isLeaf?: boolean
+ isBoulder?: boolean
+ session?: ClientSession
+}
+
+export interface UpdateAreaOptions {
+ user: MUUID
+ areaUuid: MUUID
+ document: AreaEditableFieldsType
+ session?: ClientSession
+}
+
export default class MutableAreaDataSource extends AreaDataSource {
experimentalUserDataSource = createExperimentalUserDataSource()
@@ -101,6 +126,19 @@ export default class MutableAreaDataSource extends AreaDataSource {
throw new Error('Error inserting ' + countryCode)
}
+ async addAreaWith ({
+ user,
+ areaName,
+ parentUuid = null,
+ countryCode,
+ experimentalAuthor,
+ isLeaf,
+ isBoulder,
+ session
+ }: AddAreaOptions): Promise {
+ return await this.addArea(user, areaName, parentUuid, countryCode, experimentalAuthor, isLeaf, isBoulder, session)
+ }
+
/**
* Add a new area. Either a parent id or country code is required.
* @param user
@@ -114,40 +152,42 @@ export default class MutableAreaDataSource extends AreaDataSource {
countryCode?: string,
experimentalAuthor?: ExperimentalAuthorType,
isLeaf?: boolean,
- isBoulder?: boolean): Promise {
+ isBoulder?: boolean,
+ sessionCtx?: ClientSession): Promise {
if (parentUuid == null && countryCode == null) {
- throw new Error('Adding area failed. Must provide parent Id or country code')
+ throw new Error(`Adding area "${areaName}" failed. Must provide parent Id or country code`)
}
- let uuid: MUUID
+ let parentId: MUUID
if (parentUuid != null) {
- uuid = parentUuid
+ parentId = parentUuid
} else if (countryCode != null) {
- uuid = countryCode2Uuid(countryCode)
+ parentId = countryCode2Uuid(countryCode)
+ } else {
+ throw new Error(`Adding area "${areaName}" failed. Unable to determine parent id or country code`)
}
- const session = await this.areaModel.startSession()
-
- let ret: AreaType
-
- // withTransaction() doesn't return the callback result
- // see https://jira.mongodb.org/browse/NODE-2014
- await session.withTransaction(
- async (session) => {
- ret = await this._addArea(session, user, areaName, uuid, experimentalAuthor, isLeaf, isBoulder)
- return ret
- })
- // @ts-expect-error
- return ret
+ const session = sessionCtx ?? await this.areaModel.startSession()
+ try {
+ if (session.inTransaction()) {
+ return await this._addArea(session, user, areaName, parentId, experimentalAuthor, isLeaf, isBoulder)
+ } else {
+ return await withTransaction(session, async () => await this._addArea(session, user, areaName, parentId, experimentalAuthor, isLeaf, isBoulder))
+ }
+ } finally {
+ if (sessionCtx == null) {
+ await session.endSession()
+ }
+ }
}
async _addArea (session, user: MUUID, areaName: string, parentUuid: MUUID, experimentalAuthor?: ExperimentalAuthorType, isLeaf?: boolean, isBoulder?: boolean): Promise {
const parentFilter = { 'metadata.area_id': parentUuid }
- const parent = await this.areaModel.findOne(parentFilter).session(session).orFail(new UserInputError('Expecting 1 parent, found none.'))
+ const parent = await this.areaModel.findOne(parentFilter).session(session).orFail(new UserInputError(`[${areaName}]: Expecting country or area parent, found none with id ${parentUuid.toString()}`))
if (parent.metadata.leaf || (parent.metadata?.isBoulder ?? false)) {
if (parent.children.length > 0 || parent.climbs.length > 0) {
- throw new UserInputError('Adding new areas to a leaf or boulder area is not allowed.')
+ throw new UserInputError(`[${areaName}]: Adding new areas to a leaf or boulder area is not allowed.`)
}
// No children. It's ok to continue turning an empty crag/boulder into an area.
parent.metadata.leaf = false
@@ -293,6 +333,10 @@ export default class MutableAreaDataSource extends AreaDataSource {
}).session(session).lean()
}
+ async updateAreaWith ({ user, areaUuid, document, session }: UpdateAreaOptions): Promise {
+ return await this.updateArea(user, areaUuid, document, session)
+ }
+
/**
* Update one or more area fields.
*
@@ -300,9 +344,10 @@ export default class MutableAreaDataSource extends AreaDataSource {
* @param user
* @param areaUuid Area uuid to be updated
* @param document New fields
+ * @param sessionCtx optional existing session to use for the transactions
* @returns Newly updated area
*/
- async updateArea (user: MUUID, areaUuid: MUUID, document: AreaEditableFieldsType): Promise {
+ async updateArea (user: MUUID, areaUuid: MUUID, document: AreaEditableFieldsType, sessionCtx?: ClientSession): Promise {
const _updateArea = async (session: ClientSession, user: MUUID, areaUuid: MUUID, document: AreaEditableFieldsType): Promise => {
const filter = {
'metadata.area_id': areaUuid,
@@ -311,36 +356,46 @@ export default class MutableAreaDataSource extends AreaDataSource {
const area = await this.areaModel.findOne(filter).session(session)
if (area == null) {
- throw new Error('Area update error. Reason: Area not found.')
+ throw new Error(`Area update error. Reason: Area with id ${areaUuid.toString()} not found.`)
}
- const { areaName, description, shortCode, isDestination, isLeaf, isBoulder, lat, lng, experimentalAuthor } = document
+ const {
+ areaName,
+ description,
+ shortCode,
+ isDestination,
+ isLeaf,
+ isBoulder,
+ lat,
+ lng,
+ experimentalAuthor
+ } = document
// See https://github.com/OpenBeta/openbeta-graphql/issues/244
- let experimentaAuthorId: MUUID | null = null
+ let experimentalAuthorId: MUUID | null = null
if (experimentalAuthor != null) {
- experimentaAuthorId = await this.experimentalUserDataSource.updateUser(session, experimentalAuthor.displayName, experimentalAuthor.url)
+ experimentalAuthorId = await this.experimentalUserDataSource.updateUser(session, experimentalAuthor.displayName, experimentalAuthor.url)
}
const opType = OperationType.updateArea
const change = await changelogDataSource.create(session, user, opType)
const _change: ChangeRecordMetadataType = {
- user: experimentaAuthorId ?? user,
+ user: experimentalAuthorId ?? user,
historyId: change._id,
prevHistoryId: area._change?.historyId._id,
operation: opType,
seq: 0
}
area.set({ _change })
- area.updatedBy = experimentaAuthorId ?? user
+ area.updatedBy = experimentalAuthorId ?? user
if (area.pathTokens.length === 1) {
- if (areaName != null || shortCode != null) throw new Error('Area update error. Reason: Updating country name or short code is not allowed.')
+ if (areaName != null || shortCode != null) throw new Error(`[${area.area_name}]: Area update error. Reason: Updating country name or short code is not allowed.`)
}
if (area.children.length > 0 && (isLeaf != null || isBoulder != null)) {
- throw new Error('Area update error. Reason: Updating leaf or boulder status of an area with subareas is not allowed.')
+ throw new Error(`[${area.area_name}]: Area update error. Reason: Updating leaf or boulder status of an area with subareas is not allowed.`)
}
if (areaName != null) {
@@ -357,7 +412,7 @@ export default class MutableAreaDataSource extends AreaDataSource {
if (isBoulder != null) {
area.set({ 'metadata.isBoulder': isBoulder })
if (isBoulder) {
- // boulfer == true implies leaf = true
+ // boulder == true implies leaf = true
area.set({ 'metadata.leaf': true })
}
}
@@ -386,17 +441,18 @@ export default class MutableAreaDataSource extends AreaDataSource {
return cursor.toObject()
}
- const session = await this.areaModel.startSession()
- let ret: AreaType | null = null
-
- // withTransaction() doesn't return the callback result
- // see https://jira.mongodb.org/browse/NODE-2014
- await session.withTransaction(
- async session => {
- ret = await _updateArea(session, user, areaUuid, document)
- return ret
- })
- return ret
+ const session = sessionCtx ?? await this.areaModel.startSession()
+ try {
+ if (session.inTransaction()) {
+ return await _updateArea(session, user, areaUuid, document)
+ } else {
+ return await withTransaction(session, async () => await _updateArea(session, user, areaUuid, document))
+ }
+ } finally {
+ if (sessionCtx == null) {
+ await session.endSession()
+ }
+ }
}
/**
@@ -409,7 +465,9 @@ export default class MutableAreaDataSource extends AreaDataSource {
*/
async updatePathTokens (session: ClientSession, changeRecord: ChangeRecordMetadataType, area: AreaDocumnent, newAreaName: string, changeIndex: number = -1): Promise {
if (area.pathTokens.length > 1) {
- if (changeIndex === -1) { changeIndex = area.pathTokens.length - 1 }
+ if (changeIndex === -1) {
+ changeIndex = area.pathTokens.length - 1
+ }
const newPath = [...area.pathTokens]
newPath[changeIndex] = newAreaName
@@ -599,7 +657,7 @@ export const newAreaHelper = (areaName: string, parentAncestors: string, parentP
export const countryCode2Uuid = (code: string): MUUID => {
if (!isoCountries.isValid(code)) {
- throw new Error('Invalid country code. Expect alpha2 or alpha3')
+ throw new Error(`Invalid country code: ${code}. Expect alpha2 or alpha3`)
}
const alpha3 = code.length === 2 ? isoCountries.toAlpha3(code) : code
return muuid.from(uuidv5(alpha3.toUpperCase(), NIL))
diff --git a/src/model/MutableClimbDataSource.ts b/src/model/MutableClimbDataSource.ts
index d8dd5753..9ad85a72 100644
--- a/src/model/MutableClimbDataSource.ts
+++ b/src/model/MutableClimbDataSource.ts
@@ -1,19 +1,27 @@
-import muid, { MUUID } from 'uuid-mongodb'
-import { UserInputError } from 'apollo-server'
+import { UserInputError } from 'apollo-server-express'
import { ClientSession } from 'mongoose'
+import muid, { MUUID } from 'uuid-mongodb'
+import { createGradeObject, gradeContextToGradeScales, sanitizeDisciplines } from '../GradeUtils.js'
+import { getAreaModel } from '../db/AreaSchema.js'
import { AreaDocumnent } from '../db/AreaTypes.js'
-import { ClimbType, ClimbChangeDocType, ClimbChangeInputType, ClimbEditOperationType, IPitch } from '../db/ClimbTypes.js'
-import ClimbDataSource from './ClimbDataSource.js'
-import { createInstance as createExperimentalUserDataSource } from './ExperimentalUserDataSource.js'
-import { sanitizeDisciplines, gradeContextToGradeScales, createGradeObject } from '../GradeUtils.js'
-import { getClimbModel } from '../db/ClimbSchema.js'
import { ChangeRecordMetadataType } from '../db/ChangeLogType.js'
-import { changelogDataSource } from './ChangeLogDataSource.js'
+import { getClimbModel } from '../db/ClimbSchema.js'
+import { ClimbChangeDocType, ClimbChangeInputType, ClimbEditOperationType, ClimbType, IPitch } from '../db/ClimbTypes.js'
+import { aggregateCragStats } from '../db/utils/Aggregate.js'
import { sanitize, sanitizeStrict } from '../utils/sanitize.js'
+import { changelogDataSource } from './ChangeLogDataSource.js'
+import ClimbDataSource from './ClimbDataSource.js'
+import { createInstance as createExperimentalUserDataSource } from './ExperimentalUserDataSource.js'
import MutableAreaDataSource from './MutableAreaDataSource.js'
-import { aggregateCragStats } from '../db/utils/Aggregate.js'
-import { getAreaModel } from '../db/AreaSchema.js'
+import { withTransaction } from '../utils/helpers.js'
+
+export interface AddOrUpdateClimbsOptions {
+ userId: MUUID
+ parentId: MUUID
+ changes: ClimbChangeInputType[]
+ session?: ClientSession
+}
export default class MutableClimbDataSource extends ClimbDataSource {
experimentalUserDataSource = createExperimentalUserDataSource()
@@ -40,6 +48,7 @@ export default class MutableClimbDataSource extends ClimbDataSource {
id: MUUID
existed: boolean
}
+
// A list of ID objects to track whether the ID exists in the DB
const idList = newClimbIds.reduce((acc, curr) => {
if (existingIds.some(item => item._id.toUUID().toString() === curr.toUUID().toString())) {
@@ -237,25 +246,23 @@ export default class MutableClimbDataSource extends ClimbDataSource {
}
}
+ async addOrUpdateClimbsWith ({ userId, parentId, changes, session }: AddOrUpdateClimbsOptions): Promise {
+ return await this.addOrUpdateClimbs(userId, parentId, changes, session)
+ }
+
/**
* Update one or climbs (or boulder problems). Add climb to the area if it doesn't exist.
* @param parentId parent area id
* @param changes
* @returns a list of updated (or newly added) climb IDs
*/
- async addOrUpdateClimbs (userId: MUUID, parentId: MUUID, changes: ClimbChangeInputType[]): Promise {
- const session = await this.areaModel.startSession()
- let ret: string[]
-
- // withTransaction() doesn't return the callback result
- // see https://jira.mongodb.org/browse/NODE-2014
- await session.withTransaction(
- async (session) => {
- ret = await this._addOrUpdateClimbs(userId, session, parentId, changes)
- return ret
- })
- // @ts-expect-error
- return ret
+ async addOrUpdateClimbs (userId: MUUID, parentId: MUUID, changes: ClimbChangeInputType[], sessionCtx?: ClientSession): Promise {
+ const session = sessionCtx ?? await this.areaModel.startSession()
+ if (session.inTransaction()) {
+ return await this._addOrUpdateClimbs(userId, session, parentId, changes)
+ } else {
+ return await withTransaction(session, async () => await this._addOrUpdateClimbs(userId, session, parentId, changes)) ?? []
+ }
}
/**
diff --git a/src/model/MutableMediaDataSource.ts b/src/model/MutableMediaDataSource.ts
index 75cdef29..531dbb58 100644
--- a/src/model/MutableMediaDataSource.ts
+++ b/src/model/MutableMediaDataSource.ts
@@ -1,4 +1,4 @@
-import { UserInputError } from 'apollo-server'
+import { UserInputError } from 'apollo-server-express'
import mongoose from 'mongoose'
import muuid from 'uuid-mongodb'
diff --git a/src/model/__tests__/AreaHistoryDataSource.ts b/src/model/__tests__/AreaHistoryDataSource.ts
index 171433b3..e4fbaf6f 100644
--- a/src/model/__tests__/AreaHistoryDataSource.ts
+++ b/src/model/__tests__/AreaHistoryDataSource.ts
@@ -1,52 +1,36 @@
-import mongoose from 'mongoose'
-import { ChangeStream } from 'mongodb'
-import { jest } from '@jest/globals'
import muuid from 'uuid-mongodb'
import MutableAreaDataSource from '../MutableAreaDataSource.js'
-import { connectDB, createIndexes, getAreaModel } from '../../db/index.js'
-import streamListener from '../../db/edit/streamListener.js'
-import { logger } from '../../logger.js'
import { changelogDataSource } from '../ChangeLogDataSource.js'
import { OperationType } from '../../db/AreaTypes.js'
-
-jest.setTimeout(120000)
+import inMemoryDB from '../../utils/inMemoryDB.js'
+import waitForExpect from 'wait-for-expect'
+import jest from 'jest-mock'
describe('Area history', () => {
let areas: MutableAreaDataSource
- let stream: ChangeStream
+ let onChange: jest.Mock
const testUser = muuid.v4()
beforeAll(async () => {
- await connectDB()
-
- stream = await streamListener()
-
- try {
- await getAreaModel().collection.drop()
- await createIndexes()
- } catch (e) {
- logger.info('Expected exception')
- }
-
+ onChange = jest.fn()
+ await inMemoryDB.connect(onChange)
await changelogDataSource._testRemoveAll()
- areas = new MutableAreaDataSource(mongoose.connection.db.collection('areas'))
+ areas = MutableAreaDataSource.getInstance()
})
afterAll(async () => {
try {
- await stream.close()
- await mongoose.disconnect()
+ await inMemoryDB.close()
} catch (e) {
console.log('closing mongoose', e)
}
})
beforeEach(async () => {
- // await changelogDataSource._testRemoveAll()
- // eslint-disable-next-line
- await new Promise(res => setTimeout(res, 3000))
+ await changelogDataSource._testRemoveAll()
+ onChange.mockClear()
})
it('should create history records for new subareas', async () => {
@@ -55,16 +39,12 @@ describe('Area history', () => {
expect(newArea.area_name).toEqual(usa.area_name)
const or = await areas.addArea(testUser, 'oregon', usa.metadata.area_id)
- // eslint-disable-next-line
- await new Promise(res => setTimeout(res, 1000))
const nv = await areas.addArea(testUser, 'nevada', usa.metadata.area_id)
expect(nv?._id).toBeTruthy()
expect(or?._id).toBeTruthy()
- // eslint-disable-next-line
- await new Promise(res => setTimeout(res, 2000))
-
+ await waitForExpect(() => expect(onChange).toHaveBeenCalledTimes(5))
const areaHistory = await changelogDataSource.getAreaChangeSets()
expect(areaHistory).toHaveLength(2)
@@ -125,9 +105,7 @@ describe('Area history', () => {
await areas.setDestinationFlag(testUser, areaUuid, true)
await areas.setDestinationFlag(testUser, areaUuid, false)
- // eslint-disable-next-line
- await new Promise(res => setTimeout(res, 2000))
-
+ await waitForExpect(() => expect(onChange).toHaveBeenCalledTimes(5))
const changset = await changelogDataSource.getAreaChangeSets(areaUuid)
expect(changset).toHaveLength(3)
@@ -149,9 +127,7 @@ describe('Area history', () => {
await areas.deleteArea(testUser, leonidio.metadata.area_id)
- // eslint-disable-next-line
- await new Promise(res => setTimeout(res, 10000))
-
+ await waitForExpect(() => expect(onChange).toHaveBeenCalledTimes(5))
const history = await changelogDataSource.getAreaChangeSets(leonidio.metadata.area_id)
expect(history).toHaveLength(2)
@@ -181,9 +157,7 @@ describe('Area history', () => {
expect(deleted).toBeTruthy()
- // eslint-disable-next-line
- await new Promise(res => setTimeout(res, 3000))
-
+ await waitForExpect(() => expect(onChange).toHaveBeenCalledTimes(5))
const history = await changelogDataSource.getAreaChangeSets(spain.metadata.area_id)
// should only have 2 entries:
diff --git a/src/model/__tests__/AreaUtils.ts b/src/model/__tests__/AreaUtils.ts
index c6add400..55d00ee5 100644
--- a/src/model/__tests__/AreaUtils.ts
+++ b/src/model/__tests__/AreaUtils.ts
@@ -1,5 +1,5 @@
// import muid from 'uuid-mongodb'
-import { genMUIDFromPaths } from '../MutableAreaDataSource'
+import { genMUIDFromPaths } from '../MutableAreaDataSource.js'
describe('Test area utilities', () => {
it('generates UUID from area tokens', () => {
diff --git a/src/model/__tests__/BulkDataSource.test.ts b/src/model/__tests__/BulkDataSource.test.ts
new file mode 100644
index 00000000..9fd83866
--- /dev/null
+++ b/src/model/__tests__/BulkDataSource.test.ts
@@ -0,0 +1,233 @@
+import {ChangeStream} from 'mongodb';
+import muuid from 'uuid-mongodb';
+import {changelogDataSource} from '../ChangeLogDataSource.js';
+import MutableClimbDataSource from '../MutableClimbDataSource.js';
+import {AreaType} from '../../db/AreaTypes.js';
+import {ClimbType} from '../../db/ClimbTypes.js';
+import streamListener from '../../db/edit/streamListener.js';
+import inMemoryDB from "../../utils/inMemoryDB.js";
+import {isFulfilled} from "../../utils/testUtils.js";
+import BulkImportDataSource from "../BulkImportDataSource.js";
+import {BulkImportAreaInputType, BulkImportResultType} from "../../db/BulkImportTypes.js";
+
+describe('bulk import e2e', () => {
+ let bulkImport: BulkImportDataSource;
+ let climbs: MutableClimbDataSource;
+ let stream: ChangeStream;
+ const testUser = muuid.v4();
+
+ const assertBulkImport = async (...input: BulkImportAreaInputType[]): Promise => {
+ const result = await bulkImport.bulkImport({
+ user: testUser,
+ input: {areas: input},
+ climbs
+ });
+
+ const addedAreas = await Promise.allSettled(
+ result.addedAreas.map((area) =>
+ bulkImport.findOneAreaByUUID(area.metadata.area_id)
+ )
+ );
+ const updatedAreas = await Promise.allSettled(
+ result.updatedAreas.map((area) =>
+ bulkImport.findOneAreaByUUID(area.metadata.area_id)
+ )
+ );
+ const addedOrUpdatedClimbs = await Promise.allSettled(
+ result.addedOrUpdatedClimbs.map((climb) => climbs.findOneClimbByMUUID(climb._id))
+ );
+
+ return {
+ addedAreas: addedAreas.filter(isFulfilled).map((p) => p.value),
+ updatedAreas: updatedAreas.filter(isFulfilled).map((p) => p.value),
+ addedOrUpdatedClimbs: addedOrUpdatedClimbs.filter(isFulfilled).map((p) => p.value as ClimbType),
+ };
+ };
+
+ beforeAll(async () => {
+ await inMemoryDB.connect()
+ stream = await streamListener();
+ });
+
+ afterAll(async () => {
+ try {
+ await stream.close();
+ await inMemoryDB.close()
+ } catch (e) {
+ console.log('error closing mongoose', e);
+ }
+ });
+
+ beforeEach(async () => {
+ bulkImport = BulkImportDataSource.getInstance();
+ climbs = MutableClimbDataSource.getInstance();
+
+ await bulkImport.addCountry('us');
+ });
+
+ afterEach(async () => {
+ await changelogDataSource._testRemoveAll();
+ await inMemoryDB.clear()
+ });
+
+ describe('adding new areas and climbs', () => {
+ it('should commit a new minimal area to the database', async () => {
+ await expect(
+ assertBulkImport({
+ areaName: 'Minimal Area',
+ countryCode: 'us',
+ })
+ ).resolves.toMatchObject({
+ addedAreas: [
+ {
+ area_name: 'Minimal Area',
+ gradeContext: 'US',
+ metadata: {
+ leaf: false,
+ isBoulder: false,
+ },
+ },
+ ],
+ });
+ });
+
+ it('should rollback when one of the areas fails to import', async () => {
+ await expect(
+ assertBulkImport(
+ {
+ areaName: 'Test Area',
+ countryCode: 'us',
+ },
+ {
+ areaName: 'Test Area 2',
+ }
+ )
+ ).rejects.toThrowError("Must provide parent Id or country code");
+ });
+
+ it('should import nested areas with children', async () => {
+ await expect(
+ assertBulkImport({
+ areaName: 'Parent Area',
+ countryCode: 'us',
+ children: [
+ {
+ areaName: 'Child Area 2',
+ },
+ ],
+ })
+ ).resolves.toMatchObject({
+ addedAreas: [
+ {area_name: 'Parent Area', gradeContext: 'US'},
+ {area_name: 'Child Area 2', gradeContext: 'US'},
+ ] as Partial[],
+ });
+ });
+
+ it('should import nested areas with children and grandchildren', async () => {
+ await expect(
+ assertBulkImport({
+ areaName: 'Test Area',
+ countryCode: 'us',
+ children: [
+ {
+ areaName: 'Test Area 2',
+ children: [
+ {
+ areaName: 'Test Area 3',
+ },
+ ],
+ },
+ ],
+ })
+ ).resolves.toMatchObject({
+ addedAreas: [
+ {
+ area_name: 'Test Area',
+ pathTokens: ['United States of America', 'Test Area'],
+ },
+ {
+ area_name: 'Test Area 2',
+ pathTokens: [
+ 'United States of America',
+ 'Test Area',
+ 'Test Area 2',
+ ],
+ },
+ {
+ area_name: 'Test Area 3',
+ pathTokens: [
+ 'United States of America',
+ 'Test Area',
+ 'Test Area 2',
+ 'Test Area 3',
+ ],
+ },
+ ] as Partial[],
+ });
+ });
+
+ it('should import leaf areas with climbs', async () => {
+ await expect(
+ assertBulkImport({
+ areaName: 'Test Area',
+ countryCode: 'us',
+ climbs: [
+ {
+ name: 'Test Climb',
+ grade: '5.10a',
+ disciplines: {sport: true},
+ },
+ ],
+ })
+ ).resolves.toMatchObject({
+ addedAreas: [
+ {
+ area_name: 'Test Area',
+ gradeContext: 'US',
+ metadata: {
+ leaf: true,
+ isBoulder: false,
+ },
+ climbs: [{
+ name: 'Test Climb',
+ grades: {
+ yds: '5.10a',
+ },
+ }],
+ },
+ ],
+ addedOrUpdatedClimbs: [
+ {
+ name: 'Test Climb',
+ grades: {
+ yds: '5.10a',
+ },
+ },
+ ],
+ });
+ });
+ });
+
+ describe('updating existing areas', () => {
+ let area: AreaType;
+ beforeEach(async () => {
+ const result = await assertBulkImport({
+ areaName: 'Existing Area',
+ countryCode: 'us',
+ });
+ area = result.addedAreas[0] as AreaType;
+ });
+
+ it('should update an existing area', async () => {
+ await expect(
+ assertBulkImport({
+ uuid: area.metadata.area_id,
+ areaName: 'New Name',
+ })
+ ).resolves.toMatchObject({
+ updatedAreas: [{area_name: 'New Name'}],
+ });
+ });
+ });
+});
diff --git a/src/model/__tests__/ChangeLogDS.ts b/src/model/__tests__/ChangeLogDS.ts
index 4edfa5fb..e722867e 100644
--- a/src/model/__tests__/ChangeLogDS.ts
+++ b/src/model/__tests__/ChangeLogDS.ts
@@ -1,20 +1,17 @@
-import mongoose from 'mongoose'
-import { jest } from '@jest/globals'
import muuid from 'uuid-mongodb'
-import { connectDB, getChangeLogModel, getAreaModel } from '../../db/index.js'
+import { getAreaModel, getChangeLogModel } from '../../db/index.js'
import ChangeLogDataSource from '../ChangeLogDataSource.js'
import { OpType } from '../../db/ChangeLogType.js'
import { OperationType } from '../../db/AreaTypes.js'
import { logger } from '../../logger.js'
-
-jest.setTimeout(10000)
+import inMemoryDB from '../../utils/inMemoryDB.js'
describe('Area history', () => {
let changeLog: ChangeLogDataSource
beforeAll(async () => {
- await connectDB()
+ await inMemoryDB.connect()
try {
await getAreaModel().collection.drop()
@@ -23,14 +20,11 @@ describe('Area history', () => {
logger.info('Expected exception')
}
- changeLog = new ChangeLogDataSource(
- mongoose.connection.db.collection(
- getChangeLogModel().modelName)
- )
+ changeLog = ChangeLogDataSource.getInstance()
})
afterAll(async () => {
- await mongoose.connection.close()
+ await inMemoryDB.close()
})
it('should create a change record', async () => {
diff --git a/src/model/__tests__/MediaDataSource.ts b/src/model/__tests__/MediaDataSource.ts
index 83a65c7c..afe34892 100644
--- a/src/model/__tests__/MediaDataSource.ts
+++ b/src/model/__tests__/MediaDataSource.ts
@@ -1,13 +1,21 @@
import mongoose from 'mongoose'
import muuid, { MUUID } from 'uuid-mongodb'
-import MutableMediaDataSource from '../MutableMediaDataSource'
-import AreaDataSource from '../MutableAreaDataSource'
-import ClimbDataSource from '../MutableClimbDataSource'
+import MutableMediaDataSource from '../MutableMediaDataSource.js'
+import AreaDataSource from '../MutableAreaDataSource.js'
+import ClimbDataSource from '../MutableClimbDataSource.js'
-import { connectDB, createIndexes } from '../../db/index.js'
+import { createIndexes } from '../../db/index.js'
import { AreaType } from '../../db/AreaTypes.js'
-import { EntityTag, MediaObject, MediaObjectGQLInput, AddTagEntityInput, UserMediaQueryInput, UserMedia } from '../../db/MediaObjectTypes.js'
+import {
+ AddTagEntityInput,
+ EntityTag,
+ MediaObject,
+ MediaObjectGQLInput,
+ UserMedia,
+ UserMediaQueryInput
+} from '../../db/MediaObjectTypes.js'
import { newSportClimb1 } from './MutableClimbDataSource.js'
+import inMemoryDB from '../../utils/inMemoryDB.js'
const TEST_MEDIA: MediaObjectGQLInput = {
userUuid: 'a2eb6353-65d1-445f-912c-53c6301404bd',
@@ -34,7 +42,7 @@ describe('MediaDataSource', () => {
let testMediaObject: MediaObject
beforeAll(async () => {
- await connectDB()
+ await inMemoryDB.connect()
areas = AreaDataSource.getInstance()
climbs = ClimbDataSource.getInstance()
@@ -85,7 +93,7 @@ describe('MediaDataSource', () => {
})
afterAll(async () => {
- await mongoose.connection.close()
+ await inMemoryDB.close()
})
it('should not tag a nonexistent area', async () => {
diff --git a/src/model/__tests__/MutableClimbDataSource.ts b/src/model/__tests__/MutableClimbDataSource.ts
index 58c73ff1..9fa8abe8 100644
--- a/src/model/__tests__/MutableClimbDataSource.ts
+++ b/src/model/__tests__/MutableClimbDataSource.ts
@@ -1,16 +1,16 @@
-import mongoose from 'mongoose'
import muid from 'uuid-mongodb'
import { ChangeStream } from 'mongodb'
import MutableClimbDataSource from '../MutableClimbDataSource.js'
import MutableAreaDataSource from '../MutableAreaDataSource.js'
-import { connectDB, createIndexes, getAreaModel, getClimbModel } from '../../db/index.js'
+import { createIndexes, getAreaModel, getClimbModel } from '../../db/index.js'
import { logger } from '../../logger.js'
-import { ClimbType, ClimbChangeInputType } from '../../db/ClimbTypes.js'
+import { ClimbChangeInputType, ClimbType } from '../../db/ClimbTypes.js'
import { sanitizeDisciplines } from '../../GradeUtils.js'
import streamListener from '../../db/edit/streamListener.js'
import { changelogDataSource } from '../ChangeLogDataSource.js'
+import inMemoryDB from '../../utils/inMemoryDB.js'
export const newSportClimb1: ClimbChangeInputType = {
name: 'Cool route 1',
@@ -140,7 +140,7 @@ describe('Climb CRUD', () => {
}
beforeAll(async () => {
- await connectDB()
+ await inMemoryDB.connect()
stream = await streamListener()
try {
@@ -161,7 +161,7 @@ describe('Climb CRUD', () => {
afterAll(async () => {
try {
await stream.close()
- await mongoose.disconnect()
+ await inMemoryDB.close()
} catch (e) {
console.log('closing mongoose', e)
}
diff --git a/src/model/__tests__/MutableOrganizationDataSource.ts b/src/model/__tests__/MutableOrganizationDataSource.ts
index e2f06cca..651198b5 100644
--- a/src/model/__tests__/MutableOrganizationDataSource.ts
+++ b/src/model/__tests__/MutableOrganizationDataSource.ts
@@ -1,12 +1,12 @@
-import mongoose from 'mongoose'
import muuid from 'uuid-mongodb'
import MutableOrganizationDataSource from '../MutableOrganizationDataSource.js'
import MutableAreaDataSource from '../MutableAreaDataSource.js'
-import { connectDB, createIndexes, getAreaModel, getOrganizationModel } from '../../db/index.js'
+import { createIndexes, getAreaModel, getOrganizationModel } from '../../db/index.js'
import { OrganizationEditableFieldsType, OrgType } from '../../db/OrganizationTypes.js'
import { AreaType } from '../../db/AreaTypes.js'
import { muuidToString } from '../../utils/helpers.js'
+import inMemoryDB from '../../utils/inMemoryDB.js'
describe('Organization', () => {
let organizations: MutableOrganizationDataSource
@@ -19,7 +19,7 @@ describe('Organization', () => {
const testUser = muuid.v4()
beforeAll(async () => {
- await connectDB()
+ await inMemoryDB.connect()
try { // Use the same fixed areas for testing so no need to drop and re-create on each test.
await getAreaModel().collection.drop()
} catch (e) {
@@ -57,7 +57,7 @@ describe('Organization', () => {
})
afterAll(async () => {
- await mongoose.connection.close()
+ await inMemoryDB.close()
})
it('should successfully create a document when passed valid input', async () => {
@@ -82,7 +82,12 @@ describe('Organization', () => {
it('should retrieve documents based on displayName', async () => {
const newOrg = await organizations.addOrganization(testUser, OrgType.localClimbingOrganization, fullOrg)
// Match should be case-insensitive.
- const displayNameSearchCursor = await organizations.findOrganizationsByFilter({ displayName: { match: 'openbeta', exactMatch: false } })
+ const displayNameSearchCursor = await organizations.findOrganizationsByFilter({
+ displayName: {
+ match: 'openbeta',
+ exactMatch: false
+ }
+ })
const displayNameSearchRes = await displayNameSearchCursor.toArray()
expect(displayNameSearchRes).toHaveLength(1)
expect(displayNameSearchRes[0]._id).toEqual(newOrg._id)
@@ -107,7 +112,9 @@ describe('Organization', () => {
const updatedOrg = await organizations.updateOrganization(testUser, newOrg.orgId, document)
expect(updatedOrg).toBeDefined()
- if (updatedOrg == null) { fail('should not reach here.') }
+ if (updatedOrg == null) {
+ fail('should not reach here.')
+ }
expect(updatedOrg.associatedAreaIds.map(muuidToString).sort())
.toStrictEqual(document?.associatedAreaIds?.map(muuidToString).sort())
expect(updatedOrg.excludedAreaIds.map(muuidToString).sort())
diff --git a/src/model/__tests__/UserDataSource.ts b/src/model/__tests__/UserDataSource.ts
index 2921b247..04ff694a 100644
--- a/src/model/__tests__/UserDataSource.ts
+++ b/src/model/__tests__/UserDataSource.ts
@@ -2,15 +2,16 @@ import mongoose from 'mongoose'
import muuid from 'uuid-mongodb'
import { jest } from '@jest/globals'
-import { connectDB, getUserModel } from '../../db/index.js'
+import { getUserModel } from '../../db/index.js'
import UserDataSource from '../UserDataSource.js'
import { UpdateProfileGQLInput } from '../../db/UserTypes.js'
+import inMemoryDB from '../../utils/inMemoryDB.js'
describe('UserDataSource', () => {
let users: UserDataSource
beforeAll(async () => {
- await connectDB()
+ await inMemoryDB.connect()
const userModel = getUserModel()
try {
await userModel.collection.drop()
@@ -22,7 +23,7 @@ describe('UserDataSource', () => {
})
afterAll(async () => {
- await mongoose.connection.close()
+ await inMemoryDB.close()
})
afterEach(() => {
diff --git a/src/model/__tests__/ticks.ts b/src/model/__tests__/ticks.ts
index 5325f34c..7c934b67 100644
--- a/src/model/__tests__/ticks.ts
+++ b/src/model/__tests__/ticks.ts
@@ -1,11 +1,11 @@
-import mongoose from 'mongoose'
import { produce } from 'immer'
import TickDataSource from '../TickDataSource.js'
-import { connectDB, getTickModel, getUserModel } from '../../db/index.js'
+import { getTickModel, getUserModel } from '../../db/index.js'
import { TickInput } from '../../db/TickTypes.js'
import muuid from 'uuid-mongodb'
import UserDataSource from '../UserDataSource.js'
import { UpdateProfileGQLInput } from '../../db/UserTypes.js'
+import inMemoryDB from '../../utils/inMemoryDB.js'
const userId = muuid.v4()
@@ -51,7 +51,7 @@ describe('Ticks', () => {
beforeAll(async () => {
console.log('#BeforeAll Ticks')
- await connectDB()
+ await inMemoryDB.connect()
try {
await getTickModel().collection.drop()
@@ -65,7 +65,7 @@ describe('Ticks', () => {
})
afterAll(async () => {
- await mongoose.connection.close()
+ await inMemoryDB.close()
})
afterEach(async () => {
diff --git a/src/model/__tests__/updateAreas.ts b/src/model/__tests__/updateAreas.ts
index 82b7cadb..347bc52d 100644
--- a/src/model/__tests__/updateAreas.ts
+++ b/src/model/__tests__/updateAreas.ts
@@ -1,11 +1,11 @@
-import mongoose from 'mongoose'
import muuid from 'uuid-mongodb'
import { geometry } from '@turf/helpers'
import MutableAreaDataSource from '../MutableAreaDataSource.js'
import MutableClimbDataSource from '../MutableClimbDataSource.js'
-import { connectDB, createIndexes, getAreaModel, getClimbModel } from '../../db/index.js'
+import { createIndexes, getAreaModel, getClimbModel } from '../../db/index.js'
import { AreaEditableFieldsType, UpdateSortingOrderType } from '../../db/AreaTypes.js'
+import inMemoryDB from '../../utils/inMemoryDB.js'
describe('Areas', () => {
let areas: MutableAreaDataSource
@@ -13,7 +13,7 @@ describe('Areas', () => {
const testUser = muuid.v4()
beforeAll(async () => {
- await connectDB()
+ await inMemoryDB.connect()
try {
await getAreaModel().collection.drop()
@@ -27,7 +27,7 @@ describe('Areas', () => {
})
afterAll(async () => {
- await mongoose.connection.close()
+ await inMemoryDB.close()
})
it('should create a country by Alpha-3 country code', async () => {
diff --git a/src/server.ts b/src/server.ts
index 7159ec8d..a3548d91 100644
--- a/src/server.ts
+++ b/src/server.ts
@@ -1,4 +1,4 @@
-import { ApolloServer } from 'apollo-server'
+import { ApolloServer } from 'apollo-server-express'
import mongoose from 'mongoose'
import { applyMiddleware } from 'graphql-middleware'
import { graphqlSchema } from './graphql/resolvers.js'
@@ -10,7 +10,7 @@ import MutableClimbDataSource from './model/MutableClimbDataSource.js'
import TickDataSource from './model/TickDataSource.js'
import { createContext } from './auth/middleware.js'
import permissions from './auth/permissions.js'
-import { localDevBypassAuthMiddleware } from './auth/local-dev/middleware.js'
+import { localDevBypassAuthContext } from './auth/local-dev/middleware.js'
import localDevBypassAuthPermissions from './auth/local-dev/permissions.js'
import XMediaDataSource from './model/XMediaDataSource.js'
import PostDataSource from './model/PostDataSource.js'
@@ -18,8 +18,11 @@ import MutableOrgDS from './model/MutableOrganizationDataSource.js'
import type { Context } from './types.js'
import type { DataSources } from 'apollo-server-core/dist/graphqlOptions'
import UserDataSource from './model/UserDataSource.js'
+import express from 'express'
+import * as http from 'http'
+import BulkImportDataSource from './model/BulkImportDataSource.js'
-export async function createServer (): Promise {
+export async function createServer (): Promise<{ app: express.Application, server: ApolloServer }> {
const schema = applyMiddleware(
graphqlSchema,
(process.env.LOCAL_DEV_BYPASS_AUTH === 'true' ? localDevBypassAuthPermissions : permissions).generate(graphqlSchema)
@@ -27,6 +30,7 @@ export async function createServer (): Promise {
const dataSources: () => DataSources = () => ({
climbs: MutableClimbDataSource.getInstance(),
areas: MutableAreaDataSource.getInstance(),
+ bulkImport: BulkImportDataSource.getInstance(),
organizations: MutableOrgDS.getInstance(),
ticks: TickDataSource.getInstance(),
history: ChangeLogDataSource.getInstance(),
@@ -40,13 +44,34 @@ export async function createServer (): Promise {
post: new PostDataSource(mongoose.connection.db.collection('post'))
})
+ const app = express()
+
const server = new ApolloServer({
introspection: true,
schema,
- context: process.env.LOCAL_DEV_BYPASS_AUTH === 'true' ? localDevBypassAuthMiddleware : createContext,
+ context: process.env.LOCAL_DEV_BYPASS_AUTH === 'true' ? localDevBypassAuthContext : createContext,
dataSources,
cache: 'bounded'
})
+ // server must be started before applying middleware
+ await server.start()
+ server.applyMiddleware({ app, path: '/' })
+
+ return { app, server }
+}
+
+export async function startServer ({ app, server, port = 4000 }: {
+ app: express.Application
+ server: ApolloServer
+ port?: number
+}): Promise {
+ const httpServer = http.createServer(app)
+
+ httpServer.on('error', (e) => {
+ console.error('Error starting server', e)
+ throw e
+ })
- return server
+ await new Promise((resolve) => httpServer.listen({ port }, resolve))
+ console.log(`🚀 Server ready at http://localhost:${port}${server.graphqlPath}`)
}
diff --git a/src/types.ts b/src/types.ts
index 79bc66b8..4207dc59 100644
--- a/src/types.ts
+++ b/src/types.ts
@@ -11,6 +11,7 @@ import XMediaDataSource from './model/XMediaDataSource.js'
import PostDataSource from './model/PostDataSource.js'
import MutableOrganizationDataSource from './model/MutableOrganizationDataSource.js'
import type UserDataSource from './model/UserDataSource.js'
+import BulkImportDataSource from './model/BulkImportDataSource'
export enum SortDirection {
ASC = 1,
@@ -67,6 +68,7 @@ export type OrganizationGQLFilter = Partial m.toUUID().toString()
@@ -24,3 +25,14 @@ export const geojsonPointToLatitude = (point?: Point): number | undefined => poi
export const NON_ALPHANUMERIC_REGEX = /[\W_\s]+/g
export const canonicalizeUsername = (username: string): string => username.replaceAll(NON_ALPHANUMERIC_REGEX, '')
+
+// withTransaction() doesn't return the callback result
+// see https://jira.mongodb.org/browse/NODE-2014
+export const withTransaction = async (session: ClientSession, closure: () => Promise): Promise => {
+ let result: T | undefined
+ await session.withTransaction(async () => {
+ result = await closure()
+ return result
+ })
+ return result
+}
diff --git a/src/utils/inMemoryDB.ts b/src/utils/inMemoryDB.ts
index 567bdd46..ed9e6239 100644
--- a/src/utils/inMemoryDB.ts
+++ b/src/utils/inMemoryDB.ts
@@ -1,8 +1,9 @@
import mongoose, { ConnectOptions } from 'mongoose'
-import { ChangeStream, MongoClient } from 'mongodb'
+import { ChangeStream, ChangeStreamDocument, MongoClient } from 'mongodb'
import { MongoMemoryReplSet } from 'mongodb-memory-server'
-import { defaultPostConnect, checkVar } from '../db/index.js'
+import { checkVar, defaultPostConnect } from '../db/index.js'
import { logger } from '../logger.js'
+import { testStreamListener } from '../db/edit/streamListener'
/**
* In-memory Mongo replset used for testing.
@@ -10,12 +11,12 @@ import { logger } from '../logger.js'
* Need a replset to faciliate transactions.
*/
let mongod: MongoMemoryReplSet
-let stream: ChangeStream
+let stream: ChangeStream | undefined
/**
* Connect to the in-memory database.
*/
-const connect = async (): Promise => {
+export const connect = async (onChange?: (change: ChangeStreamDocument) => void): Promise => {
mongod = await MongoMemoryReplSet.create({
// Stream listener listens on DB denoted by 'MONGO_DBNAME' env var.
replSet: { count: 1, storageEngine: 'wiredTiger', dbName: checkVar('MONGO_DBNAME') }
@@ -27,14 +28,14 @@ const connect = async (): Promise => {
}
await mongoose.connect(uri, mongooseOpts)
- stream = await defaultPostConnect()
+ stream = await defaultPostConnect(async () => await testStreamListener(onChange))
}
/**
* Drop database, close the connection and stop mongod.
*/
-const close = async (): Promise => {
- await stream.close()
+export const close = async (): Promise => {
+ await stream?.close()
await mongoose.connection.dropDatabase()
await mongoose.connection.close()
await mongod.stop()
@@ -43,7 +44,7 @@ const close = async (): Promise => {
/**
* Remove all the data for all db collections.
*/
-const clear = async (): Promise => {
+export const clear = async (): Promise => {
const collections = mongoose.connection.collections
for (const key in collections) {
@@ -79,4 +80,4 @@ export interface InMemoryDB {
insertDirectly: (collection: string, documents: any[]) => Promise
}
-export default { connect, close, clear, insertDirectly }
+export default { connect, close, clear, insertDirectly, stream }
diff --git a/src/utils/testUtils.ts b/src/utils/testUtils.ts
index 7013f5a0..f16a2a7d 100644
--- a/src/utils/testUtils.ts
+++ b/src/utils/testUtils.ts
@@ -1,56 +1,75 @@
import jwt from 'jsonwebtoken'
import { jest } from '@jest/globals'
import request from 'supertest'
-import inMemoryDB from './inMemoryDB.js'
import type { InMemoryDB } from './inMemoryDB.js'
+import inMemoryDB from './inMemoryDB.js'
import { createServer } from '../server.js'
-import { ApolloServer } from 'apollo-server'
+import { ApolloServer } from 'apollo-server-express'
+import express from 'express'
const PORT = 4000
-interface QueryAPIProps {
- query: string
+export interface QueryAPIProps {
+ query?: string
operationName?: string
- variables: any
- userUuid: string
+ variables?: any
+ userUuid?: string
roles?: string[]
port?: number
+ endpoint?: string
+ app?: express.Application
+ body?: any
}
/*
* Helper function for querying the locally-served API. It mocks JWT verification
* so we can pretend to have an role we want when calling the API.
*/
-export const queryAPI = async ({ query, operationName, variables, userUuid, roles = [], port = PORT }: QueryAPIProps): Promise => {
+export const queryAPI = async ({
+ query,
+ operationName,
+ variables,
+ userUuid = '',
+ roles = [],
+ app,
+ endpoint = '/',
+ port = PORT
+}: QueryAPIProps): Promise => {
// Avoid needing to pass in actual signed tokens.
const jwtSpy = jest.spyOn(jwt, 'verify')
jwtSpy.mockImplementation(() => {
return {
- // Roles defined at https://manage.auth0.com/dashboard/us/dev-fmjy7n5n/roles
+ // Roles defined at https://manage.auth0.com/dashboard/us/dev-fmjy7n5n/roles
'https://tacos.openbeta.io/roles': roles,
'https://tacos.openbeta.io/uuid': userUuid
}
})
const queryObj = { query, operationName, variables }
- const response = await request(`http://localhost:${port}`)
- .post('/')
+ return await request(app ?? `http://localhost:${port}`)
+ .post(endpoint)
.send(queryObj)
.set('Authorization', 'Bearer placeholder-jwt-see-SpyOn')
-
- return response
}
-interface SetUpServerReturnType {
+export interface SetUpServerReturnType {
server: ApolloServer
+ app: express.Application
inMemoryDB: InMemoryDB
}
+
/*
* Starts Apollo server and has Mongo inMemory replset connect to it.
*/
-export const setUpServer = async (port = PORT): Promise => {
- const server = await createServer()
+export const setUpServer = async (): Promise => {
await inMemoryDB.connect()
- await server.listen({ port })
- return { server, inMemoryDB }
+ const { app, server } = await createServer()
+ return { app, server, inMemoryDB }
}
+
+export const isFulfilled = (
+ p: PromiseSettledResult
+): p is PromiseFulfilledResult => p.status === 'fulfilled'
+export const isRejected = (
+ p: PromiseSettledResult
+): p is PromiseRejectedResult => p.status === 'rejected'
diff --git a/yarn.lock b/yarn.lock
index 3e15720c..a0cfeaad 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -29,6 +29,25 @@
"@types/node" "^10.1.0"
long "^4.0.0"
+"@apollo/protobufjs@1.2.6":
+ version "1.2.6"
+ resolved "https://registry.yarnpkg.com/@apollo/protobufjs/-/protobufjs-1.2.6.tgz#d601e65211e06ae1432bf5993a1a0105f2862f27"
+ integrity sha512-Wqo1oSHNUj/jxmsVp4iR3I480p6qdqHikn38lKrFhfzcDJ7lwd7Ck7cHRl4JE81tWNArl77xhnG/OkZhxKBYOw==
+ dependencies:
+ "@protobufjs/aspromise" "^1.1.2"
+ "@protobufjs/base64" "^1.1.2"
+ "@protobufjs/codegen" "^2.0.4"
+ "@protobufjs/eventemitter" "^1.1.0"
+ "@protobufjs/fetch" "^1.1.0"
+ "@protobufjs/float" "^1.0.2"
+ "@protobufjs/inquire" "^1.1.0"
+ "@protobufjs/path" "^1.1.2"
+ "@protobufjs/pool" "^1.1.0"
+ "@protobufjs/utf8" "^1.1.0"
+ "@types/long" "^4.0.0"
+ "@types/node" "^10.1.0"
+ long "^4.0.0"
+
"@apollo/utils.dropunuseddefinitions@^1.1.0":
version "1.1.0"
resolved "https://registry.npmjs.org/@apollo/utils.dropunuseddefinitions/-/utils.dropunuseddefinitions-1.1.0.tgz"
@@ -1820,7 +1839,16 @@
resolved "https://registry.npmjs.org/@types/cors/-/cors-2.8.12.tgz"
integrity sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw==
-"@types/express-serve-static-core@4.17.29", "@types/express-serve-static-core@^4.17.18":
+"@types/express-serve-static-core@4.17.31":
+ version "4.17.31"
+ resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.31.tgz#a1139efeab4e7323834bb0226e62ac019f474b2f"
+ integrity sha512-DxMhY+NAsTwMMFHBTtJFNp5qiHKJ7TeqOo23zVEM9alT1Ml27Q3xcTH0xwxn7Q0BbMcVEJOs/7aQtUWupUQN3Q==
+ dependencies:
+ "@types/node" "*"
+ "@types/qs" "*"
+ "@types/range-parser" "*"
+
+"@types/express-serve-static-core@^4.17.18":
version "4.17.29"
resolved "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.29.tgz"
integrity sha512-uMd++6dMKS32EOuw1Uli3e3BPgdLIXmezcfHv7N4c1s3gkhikBplORPpMq3fuWkxncZN1reb16d5n8yhQ80x7Q==
@@ -1839,7 +1867,17 @@
"@types/range-parser" "*"
"@types/send" "*"
-"@types/express@4.17.13", "@types/express@^4.17.13":
+"@types/express@4.17.14":
+ version "4.17.14"
+ resolved "https://registry.yarnpkg.com/@types/express/-/express-4.17.14.tgz#143ea0557249bc1b3b54f15db4c81c3d4eb3569c"
+ integrity sha512-TEbt+vaPFQ+xpxFLFssxUDXj5cWCxZJjIcB7Yg0k0GMHGtgtQgpvx/MUQUeAkNbA9AAGrwkAsoeItdTgS7FMyg==
+ dependencies:
+ "@types/body-parser" "*"
+ "@types/express-serve-static-core" "^4.17.18"
+ "@types/qs" "*"
+ "@types/serve-static" "*"
+
+"@types/express@^4.17.13":
version "4.17.13"
resolved "https://registry.npmjs.org/@types/express/-/express-4.17.13.tgz"
integrity sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA==
@@ -2241,6 +2279,13 @@ apollo-reporting-protobuf@^3.3.1:
dependencies:
"@apollo/protobufjs" "1.2.2"
+apollo-reporting-protobuf@^3.4.0:
+ version "3.4.0"
+ resolved "https://registry.yarnpkg.com/apollo-reporting-protobuf/-/apollo-reporting-protobuf-3.4.0.tgz#6edd31f09d4a3704d9e808d1db30eca2229ded26"
+ integrity sha512-h0u3EbC/9RpihWOmcSsvTW2O6RXVaD/mPEjfrPkxRPTEPWqncsgOoRJw+wih4OqfH3PvTJvoEIf4LwKrUaqWog==
+ dependencies:
+ "@apollo/protobufjs" "1.2.6"
+
apollo-server-caching@0.3.1, apollo-server-caching@^0.3.1:
version "0.3.1"
resolved "https://registry.npmjs.org/apollo-server-caching/-/apollo-server-caching-0.3.1.tgz"
@@ -2248,10 +2293,10 @@ apollo-server-caching@0.3.1, apollo-server-caching@^0.3.1:
dependencies:
lru-cache "^5.0.0"
-apollo-server-core@^3.9.0:
- version "3.9.0"
- resolved "https://registry.npmjs.org/apollo-server-core/-/apollo-server-core-3.9.0.tgz"
- integrity sha512-WS54C33cTriDaBIcj7ijWv/fgeJICcrQKlP1Cn6pnZp/eumpMraezLeJ3gFWAXprOuR2E3fZe64lNlup0fMu8w==
+apollo-server-core@^3.13.0:
+ version "3.13.0"
+ resolved "https://registry.yarnpkg.com/apollo-server-core/-/apollo-server-core-3.13.0.tgz#ad6601fbb34cc97eedca27a9fb0b5738d11cd27d"
+ integrity sha512-v/g6DR6KuHn9DYSdtQijz8dLOkP78I5JSVJzPkARhDbhpH74QNwrQ2PP2URAPPEDJ2EeZNQDX8PvbYkAKqg+kg==
dependencies:
"@apollo/utils.keyvaluecache" "^1.0.1"
"@apollo/utils.logger" "^1.0.0"
@@ -2262,18 +2307,19 @@ apollo-server-core@^3.9.0:
"@graphql-tools/schema" "^8.0.0"
"@josephg/resolvable" "^1.0.0"
apollo-datasource "^3.3.2"
- apollo-reporting-protobuf "^3.3.1"
+ apollo-reporting-protobuf "^3.4.0"
apollo-server-env "^4.2.1"
apollo-server-errors "^3.3.1"
- apollo-server-plugin-base "^3.6.1"
- apollo-server-types "^3.6.1"
+ apollo-server-plugin-base "^3.7.2"
+ apollo-server-types "^3.8.0"
async-retry "^1.2.1"
fast-json-stable-stringify "^2.1.0"
graphql-tag "^2.11.0"
loglevel "^1.6.8"
lru-cache "^6.0.0"
+ node-abort-controller "^3.0.1"
sha.js "^2.4.11"
- uuid "^8.0.0"
+ uuid "^9.0.0"
whatwg-mimetype "^3.0.0"
apollo-server-env@2.2.0:
@@ -2301,50 +2347,40 @@ apollo-server-errors@^3.3.1:
resolved "https://registry.npmjs.org/apollo-server-errors/-/apollo-server-errors-3.3.1.tgz"
integrity sha512-xnZJ5QWs6FixHICXHxUfm+ZWqqxrNuPlQ+kj5m6RtEgIpekOPssH/SD9gf2B4HuWV0QozorrygwZnux8POvyPA==
-apollo-server-express@^3.9.0:
- version "3.9.0"
- resolved "https://registry.npmjs.org/apollo-server-express/-/apollo-server-express-3.9.0.tgz"
- integrity sha512-scSeHy9iB7W3OiF3uLQEzad9Jm9tEfDF8ACsJb2P+xX69uqg6zizsrQvj3qRhazCO7FKMcMu9zQFR0hy7zKbUA==
+apollo-server-express@^3.13.0:
+ version "3.13.0"
+ resolved "https://registry.yarnpkg.com/apollo-server-express/-/apollo-server-express-3.13.0.tgz#0d8d9bbba3b8b8264912d215f63fd44e74d5f42a"
+ integrity sha512-iSxICNbDUyebOuM8EKb3xOrpIwOQgKxGbR2diSr4HP3IW8T3njKFOoMce50vr+moOCe1ev8BnLcw9SNbuUtf7g==
dependencies:
"@types/accepts" "^1.3.5"
"@types/body-parser" "1.19.2"
"@types/cors" "2.8.12"
- "@types/express" "4.17.13"
- "@types/express-serve-static-core" "4.17.29"
+ "@types/express" "4.17.14"
+ "@types/express-serve-static-core" "4.17.31"
accepts "^1.3.5"
- apollo-server-core "^3.9.0"
- apollo-server-types "^3.6.1"
+ apollo-server-core "^3.13.0"
+ apollo-server-types "^3.8.0"
body-parser "^1.19.0"
cors "^2.8.5"
parseurl "^1.3.3"
-apollo-server-plugin-base@^3.6.1:
- version "3.6.1"
- resolved "https://registry.npmjs.org/apollo-server-plugin-base/-/apollo-server-plugin-base-3.6.1.tgz"
- integrity sha512-bFpxzWO0LTTtSAkGVBeaAtnQXJ5ZCi8eaLN/eMSju8RByifmF3Kr6gAqcOZhOH/geQEt3Y6G8n3bR0eHTGxljQ==
+apollo-server-plugin-base@^3.7.2:
+ version "3.7.2"
+ resolved "https://registry.yarnpkg.com/apollo-server-plugin-base/-/apollo-server-plugin-base-3.7.2.tgz#c19cd137bc4c993ba2490ba2b571b0f3ce60a0cd"
+ integrity sha512-wE8dwGDvBOGehSsPTRZ8P/33Jan6/PmL0y0aN/1Z5a5GcbFhDaaJCjK5cav6npbbGL2DPKK0r6MPXi3k3N45aw==
dependencies:
- apollo-server-types "^3.6.1"
+ apollo-server-types "^3.8.0"
-apollo-server-types@^3.6.1:
- version "3.6.1"
- resolved "https://registry.npmjs.org/apollo-server-types/-/apollo-server-types-3.6.1.tgz"
- integrity sha512-XOPlBlRdwP00PrG03OffGGWuzyei+J9t1rAnvyHsSdP0JCgQWigHJfvL1N9Bhgi4UTjl9JadKOJh1znLNlqIFQ==
+apollo-server-types@^3.8.0:
+ version "3.8.0"
+ resolved "https://registry.yarnpkg.com/apollo-server-types/-/apollo-server-types-3.8.0.tgz#d976b6967878681f715fe2b9e4dad9ba86b1346f"
+ integrity sha512-ZI/8rTE4ww8BHktsVpb91Sdq7Cb71rdSkXELSwdSR0eXu600/sY+1UXhTWdiJvk+Eq5ljqoHLwLbY2+Clq2b9A==
dependencies:
"@apollo/utils.keyvaluecache" "^1.0.1"
"@apollo/utils.logger" "^1.0.0"
- apollo-reporting-protobuf "^3.3.1"
+ apollo-reporting-protobuf "^3.4.0"
apollo-server-env "^4.2.1"
-apollo-server@^3.9.0:
- version "3.9.0"
- resolved "https://registry.npmjs.org/apollo-server/-/apollo-server-3.9.0.tgz"
- integrity sha512-g80gXDuK8fl2W0fQF/hEyeoO9AU+sO2gBzeJAYUyGLotYc+oL/Y3mTRk5GB8C7cXUXCg5uvWbUj8va0E5UZE7w==
- dependencies:
- "@types/express" "4.17.13"
- apollo-server-core "^3.9.0"
- apollo-server-express "^3.9.0"
- express "^4.17.1"
-
argparse@^1.0.7:
version "1.0.10"
resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
@@ -2615,7 +2651,25 @@ bl@^4.0.3:
inherits "^2.0.4"
readable-stream "^3.4.0"
-body-parser@1.20.0, body-parser@^1.19.0:
+body-parser@1.20.1:
+ version "1.20.1"
+ resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.1.tgz#b1812a8912c195cd371a3ee5e66faa2338a5c668"
+ integrity sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==
+ dependencies:
+ bytes "3.1.2"
+ content-type "~1.0.4"
+ debug "2.6.9"
+ depd "2.0.0"
+ destroy "1.2.0"
+ http-errors "2.0.0"
+ iconv-lite "0.4.24"
+ on-finished "2.4.1"
+ qs "6.11.0"
+ raw-body "2.5.1"
+ type-is "~1.6.18"
+ unpipe "1.0.0"
+
+body-parser@^1.19.0:
version "1.20.0"
resolved "https://registry.npmjs.org/body-parser/-/body-parser-1.20.0.tgz"
integrity sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg==
@@ -2633,6 +2687,24 @@ body-parser@1.20.0, body-parser@^1.19.0:
type-is "~1.6.18"
unpipe "1.0.0"
+body-parser@^1.20.2:
+ version "1.20.2"
+ resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.2.tgz#6feb0e21c4724d06de7ff38da36dad4f57a747fd"
+ integrity sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==
+ dependencies:
+ bytes "3.1.2"
+ content-type "~1.0.5"
+ debug "2.6.9"
+ depd "2.0.0"
+ destroy "1.2.0"
+ http-errors "2.0.0"
+ iconv-lite "0.4.24"
+ on-finished "2.4.1"
+ qs "6.11.0"
+ raw-body "2.5.2"
+ type-is "~1.6.18"
+ unpipe "1.0.0"
+
bowser@^2.11.0:
version "2.11.0"
resolved "https://registry.yarnpkg.com/bowser/-/bowser-2.11.0.tgz#5ca3c35757a7aa5771500c70a73a9f91ef420a8f"
@@ -2977,6 +3049,11 @@ content-type@~1.0.4:
resolved "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz"
integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==
+content-type@~1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918"
+ integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==
+
convert-source-map@^1.6.0, convert-source-map@^1.7.0:
version "1.9.0"
resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f"
@@ -3752,14 +3829,14 @@ expect@^29.4.2:
jest-message-util "^29.4.2"
jest-util "^29.4.2"
-express@^4.17.1:
- version "4.18.1"
- resolved "https://registry.npmjs.org/express/-/express-4.18.1.tgz"
- integrity sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q==
+express@^4.18.2:
+ version "4.18.2"
+ resolved "https://registry.yarnpkg.com/express/-/express-4.18.2.tgz#3fabe08296e930c796c19e3c516979386ba9fd59"
+ integrity sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==
dependencies:
accepts "~1.3.8"
array-flatten "1.1.1"
- body-parser "1.20.0"
+ body-parser "1.20.1"
content-disposition "0.5.4"
content-type "~1.0.4"
cookie "0.5.0"
@@ -3778,7 +3855,7 @@ express@^4.17.1:
parseurl "~1.3.3"
path-to-regexp "0.1.7"
proxy-addr "~2.0.7"
- qs "6.10.3"
+ qs "6.11.0"
range-parser "~1.2.1"
safe-buffer "5.2.1"
send "0.18.0"
@@ -4285,10 +4362,10 @@ graphql-tag@^2.11.0:
dependencies:
tslib "^2.1.0"
-graphql@^16.5.0:
- version "16.5.0"
- resolved "https://registry.npmjs.org/graphql/-/graphql-16.5.0.tgz"
- integrity sha512-qbHgh8Ix+j/qY+a/ZcJnFQ+j8ezakqPiHwPiZhV/3PgGlgf96QMBB5/f2rkiC9sgLoy/xvT6TSiaf2nTHJh5iA==
+graphql@^16.8.1:
+ version "16.8.1"
+ resolved "https://registry.yarnpkg.com/graphql/-/graphql-16.8.1.tgz#1930a965bef1170603702acdb68aedd3f3cf6f07"
+ integrity sha512-59LZHPdGZVh695Ud9lRzPBVTtlX9ZCV150Er2W43ro37wVof0ctenSaskPPjN7lVTIN8mSZt8PHUNKZuNQUuxw==
gtoken@^6.1.0:
version "6.1.2"
@@ -5875,6 +5952,11 @@ node-abi@^3.3.0:
dependencies:
semver "^7.3.5"
+node-abort-controller@^3.0.1:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/node-abort-controller/-/node-abort-controller-3.1.1.tgz#a94377e964a9a37ac3976d848cb5c765833b8548"
+ integrity sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ==
+
node-addon-api@^6.0.0:
version "6.1.0"
resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-6.1.0.tgz#ac8470034e58e67d0c6f1204a18ae6995d9c0d76"
@@ -6473,6 +6555,13 @@ qs@6.10.3:
dependencies:
side-channel "^1.0.4"
+qs@6.11.0:
+ version "6.11.0"
+ resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a"
+ integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==
+ dependencies:
+ side-channel "^1.0.4"
+
qs@^6.10.3:
version "6.11.2"
resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.2.tgz#64bea51f12c1f5da1bc01496f48ffcff7c69d7d9"
@@ -6517,6 +6606,16 @@ raw-body@2.5.1:
iconv-lite "0.4.24"
unpipe "1.0.0"
+raw-body@2.5.2:
+ version "2.5.2"
+ resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.2.tgz#99febd83b90e08975087e8f1f9419a149366b68a"
+ integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==
+ dependencies:
+ bytes "3.1.2"
+ http-errors "2.0.0"
+ iconv-lite "0.4.24"
+ unpipe "1.0.0"
+
rbush@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/rbush/-/rbush-3.0.1.tgz#5fafa8a79b3b9afdfe5008403a720cc1de882ecf"
@@ -7568,6 +7667,11 @@ vary@^1, vary@~1.1.2:
resolved "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz"
integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==
+wait-for-expect@^3.0.2:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/wait-for-expect/-/wait-for-expect-3.0.2.tgz#d2f14b2f7b778c9b82144109c8fa89ceaadaa463"
+ integrity sha512-cfS1+DZxuav1aBYbaO/kE06EOS8yRw7qOFoD3XtjTkYvCvh3zUvNST8DXK/nPaeqIzIv3P3kL3lRJn8iwOiSag==
+
walker@^1.0.8:
version "1.0.8"
resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f"